2012-11-12 16:51:13 +01:00
|
|
|
/* AddressSanitizer, a fast memory error detector.
|
2017-01-01 13:07:43 +01:00
|
|
|
Copyright (C) 2012-2017 Free Software Foundation, Inc.
|
2012-11-12 16:51:13 +01:00
|
|
|
Contributed by Kostya Serebryany <kcc@google.com>
|
|
|
|
|
|
|
|
This file is part of GCC.
|
|
|
|
|
|
|
|
GCC is free software; you can redistribute it and/or modify it under
|
|
|
|
the terms of the GNU General Public License as published by the Free
|
|
|
|
Software Foundation; either version 3, or (at your option) any later
|
|
|
|
version.
|
|
|
|
|
|
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
|
|
for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with GCC; see the file COPYING3. If not see
|
|
|
|
<http://www.gnu.org/licenses/>. */
|
|
|
|
|
|
|
|
|
|
|
|
#include "config.h"
|
|
|
|
#include "system.h"
|
|
|
|
#include "coretypes.h"
|
2015-07-08 02:53:03 +02:00
|
|
|
#include "backend.h"
|
2015-10-29 14:57:32 +01:00
|
|
|
#include "target.h"
|
|
|
|
#include "rtl.h"
|
2013-10-21 21:36:37 +02:00
|
|
|
#include "tree.h"
|
2015-07-08 02:53:03 +02:00
|
|
|
#include "gimple.h"
|
2015-10-29 14:57:32 +01:00
|
|
|
#include "cfghooks.h"
|
|
|
|
#include "alloc-pool.h"
|
|
|
|
#include "tree-pass.h"
|
Move MEMMODEL_* from coretypes.h to memmodel.h
2016-10-13 Thomas Preud'homme <thomas.preudhomme@arm.com>
gcc/
* coretypes.h: Move MEMMODEL_* macros and enum memmodel definition
into ...
* memmodel.h: This file.
* alias.c, asan.c, auto-inc-dec.c, bb-reorder.c, bt-load.c,
caller-save.c, calls.c, ccmp.c, cfgbuild.c, cfgcleanup.c,
cfgexpand.c, cfgloopanal.c, cfgrtl.c, cilk-common.c, combine.c,
combine-stack-adj.c, common/config/aarch64/aarch64-common.c,
common/config/arm/arm-common.c, common/config/bfin/bfin-common.c,
common/config/c6x/c6x-common.c, common/config/i386/i386-common.c,
common/config/ia64/ia64-common.c, common/config/nvptx/nvptx-common.c,
compare-elim.c, config/aarch64/aarch64-builtins.c,
config/aarch64/aarch64-c.c, config/aarch64/cortex-a57-fma-steering.c,
config/arc/arc.c, config/arc/arc-c.c, config/arm/arm-builtins.c,
config/arm/arm-c.c, config/avr/avr.c, config/avr/avr-c.c,
config/avr/avr-log.c, config/bfin/bfin.c, config/c6x/c6x.c,
config/cr16/cr16.c, config/cris/cris.c, config/darwin-c.c,
config/darwin.c, config/epiphany/epiphany.c,
config/epiphany/mode-switch-use.c,
config/epiphany/resolve-sw-modes.c, config/fr30/fr30.c,
config/frv/frv.c, config/ft32/ft32.c, config/h8300/h8300.c,
config/i386/i386-c.c, config/i386/winnt.c, config/iq2000/iq2000.c,
config/lm32/lm32.c, config/m32c/m32c.c, config/m32r/m32r.c,
config/m68k/m68k.c, config/mcore/mcore.c,
config/microblaze/microblaze.c, config/mmix/mmix.c,
config/mn10300/mn10300.c, config/moxie/moxie.c,
config/msp430/msp430.c, config/nds32/nds32-cost.c,
config/nds32/nds32-intrinsic.c, config/nds32/nds32-md-auxiliary.c,
config/nds32/nds32-memory-manipulation.c,
config/nds32/nds32-predicates.c, config/nds32/nds32.c,
config/nios2/nios2.c, config/nvptx/nvptx.c, config/pa/pa.c,
config/pdp11/pdp11.c, config/rl78/rl78.c, config/rs6000/rs6000-c.c,
config/rx/rx.c, config/s390/s390-c.c, config/s390/s390.c,
config/sh/sh.c, config/sh/sh-c.c, config/sh/sh-mem.cc,
config/sh/sh_treg_combine.cc, config/sol2.c, config/spu/spu.c,
config/stormy16/stormy16.c, config/tilegx/tilegx.c,
config/tilepro/tilepro.c, config/v850/v850.c, config/vax/vax.c,
config/visium/visium.c, config/vms/vms-c.c, config/xtensa/xtensa.c,
coverage.c, cppbuiltin.c, cprop.c, cse.c, cselib.c, dbxout.c, dce.c,
df-core.c, df-problems.c, df-scan.c, dojump.c, dse.c, dwarf2asm.c,
dwarf2cfi.c, dwarf2out.c, emit-rtl.c, except.c, explow.c, expmed.c,
expr.c, final.c, fold-const.c, function.c, fwprop.c, gcse.c,
ggc-page.c, haifa-sched.c, hsa-brig.c, hsa-gen.c, hw-doloop.c,
ifcvt.c, init-regs.c, internal-fn.c, ira-build.c, ira-color.c,
ira-conflicts.c, ira-costs.c, ira-emit.c, ira-lives.c, ira.c, jump.c,
loop-doloop.c, loop-invariant.c, loop-iv.c, loop-unroll.c,
lower-subreg.c, lra.c, lra-assigns.c, lra-coalesce.c,
lra-constraints.c, lra-eliminations.c, lra-lives.c, lra-remat.c,
lra-spills.c, mode-switching.c, modulo-sched.c, omp-low.c, passes.c,
postreload-gcse.c, postreload.c, predict.c, print-rtl-function.c,
recog.c, ree.c, reg-stack.c, regcprop.c, reginfo.c, regrename.c,
reload.c, reload1.c, reorg.c, resource.c, rtl-chkp.c, rtl-tests.c,
rtlanal.c, rtlhooks.c, sched-deps.c, sched-rgn.c, sdbout.c,
sel-sched-ir.c, sel-sched.c, shrink-wrap.c, simplify-rtx.c,
stack-ptr-mod.c, stmt.c, stor-layout.c, target-globals.c,
targhooks.c, toplev.c, tree-nested.c, tree-outof-ssa.c,
tree-profile.c, tree-ssa-coalesce.c, tree-ssa-ifcombine.c,
tree-ssa-loop-ivopts.c, tree-ssa-loop.c, tree-ssa-reassoc.c,
tree-ssa-sccvn.c, tree-vect-data-refs.c, ubsan.c, valtrack.c,
var-tracking.c, varasm.c: Include memmodel.h.
* genattrtab.c (write_header): Include memmodel.h in generated file.
* genautomata.c (main): Likewise.
* gengtype.c (open_base_files): Likewise.
* genopinit.c (main): Likewise.
* genconditions.c (write_header): Include memmodel.h earlier in
generated file.
* genemit.c (main): Likewise.
* genoutput.c (output_prologue): Likewise.
* genpeep.c (main): Likewise.
* genpreds.c (write_insn_preds_c): Likewise.
* genrecog.c (write_header): Likewise.
* Makefile.in (PLUGIN_HEADERS): Include memmodel.h
gcc/ada/
* gcc-interface/utils2.c: Include memmodel.h.
gcc/c-family/
* c-cppbuiltin.c: Include memmodel.h.
* c-opts.c: Likewise.
* c-pragma.c: Likewise.
* c-warn.c: Likewise.
gcc/c/
* c-typeck.c: Include memmodel.h.
gcc/cp/
* decl2.c: Include memmodel.h.
* rtti.c: Likewise.
gcc/fortran/
* trans-intrinsic.c: Include memmodel.h.
gcc/go/
* go-backend.c: Include memmodel.h.
libgcc/
* libgcov-profiler.c: Replace MEMMODEL_* macros by their __ATOMIC_*
equivalent.
* config/tilepro/atomic.c: Likewise and stop casting model to
enum memmodel.
From-SVN: r241121
2016-10-13 16:17:52 +02:00
|
|
|
#include "memmodel.h"
|
2015-10-29 14:57:32 +01:00
|
|
|
#include "tm_p.h"
|
2017-01-23 13:02:13 +01:00
|
|
|
#include "ssa.h"
|
2015-10-29 14:57:32 +01:00
|
|
|
#include "stringpool.h"
|
|
|
|
#include "tree-ssanames.h"
|
|
|
|
#include "optabs.h"
|
|
|
|
#include "emit-rtl.h"
|
|
|
|
#include "cgraph.h"
|
|
|
|
#include "gimple-pretty-print.h"
|
|
|
|
#include "alias.h"
|
genattrtab.c (write_header): Include hash-set.h...
2015-01-09 Michael Collison <michael.collison@linaro.org>
* genattrtab.c (write_header): Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h when generating
insn-attrtab.c.
* genautomata.c (main) : Include hash-set.h, macInclude hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h when generating
insn-automata.c.
* genemit.c (main): Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h when generating
insn-emit.c.
* gengtype.c (open_base_files): Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h when generating
gtype-desc.c.
* genopinit.c (main): Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h when generating
insn-opinit.c.
* genoutput.c (output_prologue): Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h when generating
insn-output.c.
* genpeep.c (main): Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h when generating
insn-peep.c.
* genpreds.c (write_insn_preds_c): Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h when generating
insn-preds.c.
* optc-save-gen-awk: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h when generating
options-save.c.
* opth-gen.awk: Change include guard from GCC_C_COMMON_H to GCC_C_COMMON_C
when generating options.h.
* ada/gcc-interface/cuintp.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h,
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* ada/gcc-interface/decl.c: ditto.
* ada/gcc-interface/misc.c: ditto.
* ada/gcc-interface/targtyps.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h,
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* ada/gcc-interface/trans.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, real.h,
fold-const.h, wide-int.h, inchash.h due to
flattening of tree.h.
* ada/gcc-interface/utils.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h,
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* ada/gcc-interface/utils2.c: ditto.
* alias.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* asan.c: ditto.
* attribs.c: ditto.
* auto-inc-dec.c: ditto.
* auto-profile.c: ditto
* bb-reorder.c: ditto.
* bt-load.c: Include symtab.h due to flattening of tree.h.
* builtins.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* c/c-array-notation.c: ditto.
* c/c-aux-info.c: ditto.
* c/c-convert.c: ditto.
* c/c-decl.c: ditto.
* c/c-errors.c: ditto.
* c/c-lang.c: dittoxs.
* c/c-objc-common.c: ditto.
* c/c-parser.c: ditto.
* c/c-typeck.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, inchash.h, real.h and
fixed-value.h due to flattening of tree.h.
* calls.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* ccmp.c: ditto.
* c-family/array-notation-common.c: ditto.
* c-family/c-ada-spec.c: ditto.
* c-family/c-cilkplus.c: ditto.
* c-family/c-common.c: Include input.h due to flattening of tree.h.
Define macro GCC_C_COMMON_C.
* c-family/c-common.h: Flatten tree.h header files into c-common.h.
Remove include of tree-core.h.
* c-family/c-cppbuiltin.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* c-family/c-dump.c: ditto.
* c-family/c-format.c: Flatten tree.h header files into c-common.h.
* c-family/c-cppbuiltin.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* c-family/c-dump.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* c-family/c-format.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, inchash.h and real.h due to
flattening of tree.h.
* c-family/c-gimplify.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* c-family/cilk.c: ditto.
* c-family/c-lex.c: ditto.
* c-family/c-omp.c: ditto.
* c-family/c-opts.c: ditto.
* c-family/c-pch.c: ditto.
* c-family/c-ppoutput.c: ditto.
* c-family/c-pragma.c: ditto.
* c-family/c-pretty-print.c: ditto.
* c-family/c-semantics.c: ditto.
* c-family/c-ubsan.c: ditto.
* c-family/stub-objc.c: ditto.
* cfgbuild.c: ditto.
* cfg.c: ditto.
* cfgcleanup.c: ditto.
* cfgexpand.c: ditto.
* cfghooks.c: ditto.
* cfgloop.c: Include symtab.h, fold-const.h, and
inchash.h due to flattening of tree.h.
* cfgloopmanip.c: ditto.
* cfgrtl.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* cgraphbuild.c: ditto.
* cgraph.c: ditto.
* cgraphclones.c: ditto.
* cgraphunit.c: ditto.
* cilk-common.c: ditto.
* combine.c: ditto.
* combine-stack-adj.c: Include symbol.h due to flattening of tree.h.
* config/aarch64/aarch64-builtins.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* config/aarch64/aarch64.c: ditto.
* config/alpha/alpha.c: ditto.
* config/arc/arc.c: ditto.
* config/arm/aarch-common.c: ditto.
* config/arm/arm-builtins.c: ditto.
* config/arm/arm.c: ditto.
* config/arm/arm-c.c: ditto.
* config/avr/avr.c: ditto.
* config/avr/avr-c.c: ditto.
* config/avr/avr-log.c: ditto.
* config/bfin/bfin.c: ditto.
* config/c6x/c6x.c: ditto.
* config/cr16/cr16.c: ditto.
* config/cris/cris.c: ditto.
* config/darwin.c: ditto.
* config/darwin-c.c: ditto.
* config/default-c.c: ditto.
* config/epiphany/epiphany.c: ditto.
* config/fr30/fr30.c: ditto.
* config/frv/frv.c: ditto.
* config/glibc-c.c: ditto.
* config/h8300/h8300.c: ditto.
* config/i386/i386.c: ditto.
* config/i386/i386-c.c: ditto.
* config/i386/msformat.c: ditto.
* config/i386/winnt.c: ditto.
* config/i386/winnt-cxx.c: ditto.
* config/i386/winnt-stubs.c: ditto.
* config/ia64/ia64.c: ditto.
* config/ia64/ia64-c.c: ditto.
* config/iq2000/iq2000.c: ditto.
* config/lm32/lm32.c: Include symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* config/m32c/m32c.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* config/m32c/m32c-pragma.c: ditto.
* config/m32c/m32cr.c: ditto.
* config/m68/m68k.c: ditto.
* config/mcore/mcore.c: ditto.
* config/mep/mep.c: ditto.
* config/mep/mep-pragma.c: ditto.
* config/microblaze/microblaze.c: ditto.
* config/microblaze/microblaze-c.c: ditto.
* config/mips/mips.c: ditto.
* config/mmix/mmix.c: Include symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* config/mn10300/mn10300.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* config/moxie/moxie.c: ditto.
* config/msp430/msp430.c: ditto.
* config/msp430/msp430-c.c: ditto.
* config/nds32/nds32.c: ditto.
* config/nds32/nds32-cost.c: ditto.
* config/nds32/nds32-fp-as-gp.c: ditto.
* config/nds32/nds32-intrinsic.c: ditto.
* config/nds32/nds32-isr.c: ditto.
* config/nds32/nds32-md-auxillary.c: ditto.
* config/nds32/nds32-memory-manipulationx.c: ditto.
* config/nds32/nds32-pipelines-auxillary.c: ditto.
* config/nds32/nds32-predicates.c: ditto.
* config/nios2/nios2.c: ditto.
* config/nvptx/nvptx.c: ditto.
* config/pa/pa.c: ditto.
* config/pdp11/pdp11x.c: Include symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* config/rl78/rl78.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* config/rl78/rl78-cx.c: ditto.
* config/rs6000/rs6000.c: ditto.
* config/rs6000/rs6000-c.c: ditto.
* config/rx/rx.c: ditto.
* config/s390/s390.c: ditto.
* config/sh/sh.c: ditto.
* config/sh/sc.c: ditto.
* config/sh/sh-mem.cc: ditto.
* config/sh/sh_treg_combine.cc: Include symtab.h, inchash.h and tree.h
due to flattening of tree.h.
Remove include of tree-core.h.
* config/sol2.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* config/sol2-c.c: ditto.
* config/sol2-cxx.c: ditto.
* config/sol2-stubs.c: ditto.
* config/sparc/sparc.c: ditto.
* config/sparc/sparc-cx.c: ditto.
* config/spu/spu.c: ditto.
* config/spu/spu-c.c: ditto
* config/storym16/stormy16.c: ditto.
* config/tilegx/tilegx.c: Include symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* config/tilepro/gen-mul-tables.cc: Include symtab.h in generated file.
* config/tilegx/tilegx-c.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* config/tilepro/tilepro.c: Include symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* config/tilepro/tilepro-c.c: Include hash-set.h, machmode.h,
vec.h, double-int.h, input.h, alias.h, symtab.h, options.h
fold-const.h, wide-int.h, and inchash.h due to
flattening of tree.h.
* config/v850/v850.c: ditto.
* config/v850/v850-c.c: ditto.
* config/vax/vax.c: ditto.
* config/vms/vms.c: ditto.
* config/vms/vms-c.c: ditto.
* config/vxworks.c: ditto.
* config/winnt-c.c: ditto.
* config/xtensa/xtensa.c: Include symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* convert.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* coverage.c: ditto.
* cp/call.c: ditto.
* cp/class.c: ditto.
* cp/constexpr.c: ditto.
* cp/cp-array-notation.c: ditto.
* cp/cp-gimplify.c: ditto.
* cp/cp-lang.c: ditto.
* cp/cp-objcp-common.c: ditto.
* cp/cvt.c: ditto.
* cp/decl2.c: ditto.
* cp/decl.c: ditto.
* cp/dump.c: ditto.
* cp/error.c: ditto.
* cp/except.c: ditto.
* cp/expr.c: ditto.
* cp/friend.c: ditto.
* cp/init.c: ditto.
* cp/lambda.c: ditto.
* cp/lex.c: ditto.
* cp/mangle.c: ditto.
* cp/name-lookup.c: ditto.
* cp/optimize.c: ditto.
* cp/parser.c: ditto.
* cp/pt.c: ditto.
* cp/ptree.c: ditto.
* cp/repo.c: ditto.
* cp/rtti.c: ditto.
* cp/search.c: ditto.
* cp/semantics.c: ditto.
* cp/tree.c: ditto.
* cp/typeck2.c: ditto.
* cp/typeck.c: ditto.
* cppbuiltin.c: ditto.
* cprop.c: ditto.
* cse.c: Add include of symtab.h due to flattening of tree.h.
* cselib.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* data-streamer.c: ditto.
* data-streamer-in.c: ditto.
* data-streamer-out.c: ditto.
* dbxout.c: ditto.
* dce.c: ditto.
* ddg.c: Add include of symtab.h due to flattening of tree.h.
* debug.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* dfp.c: ditto.
* df-scan.c: ditto.
* dojump.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, inchash.h and real.h due to flattening of tree.h.
* double-int.c: ditto.
* dse.c: ditto.
* dumpfile.c: ditto.
* dwarf2asm.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, inchash.h and real.h due to flattening of tree.h.
* dwarf2cfi.c: ditto.
* dwarf2out.c: ditto.
* emit-rtl.c: ditto.
* except.c: ditto.
* explow.c: ditto.
* expmed.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* expr.c: ditto.
* final.c: ditto.
* fixed-value.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, inchash.h and fixed-value.h due to flattening of tree.h.
* fold-const.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
Relocate inline function convert_to_ptrofftype_loc from tree.h.
Relocate inline function fold_build_pointer_plus_loc from tree.h.
Relocate inline function fold_build_pointer_plus_hwi_loc from tree.h.
* fold-const.h: Relocate macro convert_to_ptrofftype from tree.h.
Relocate macro fold_build_pointer_plus to relocate from tree.h.h.
Relocate macro fold_build_pointer_plus_hwi from tree.h.
Add prototype for convert_to_ptrofftype_loc relocated from tree.h.
Add prototype for fold_build_pointer_plus_loc relocated from tree.h.
Add prototype for fold_build_pointer_plus_hwi_loc relocated from tree.h.
* fortran/convert.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* fortran/cpp.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* fortran/decl.c: ditto.
* fortran/f95.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* fortran/iresolve.c: ditto.
* fortran/match.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* fortran/module.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* fortran/options.c: ditto.
* fortran/target-memory.c: Include hash-set.h, vec.h,
double-int.h, input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* fortran/trans-array.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* fortran/trans.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* fortran/trans-common.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* fortran/trans-const.c: ditto.
* fortran/trans-decl.c: ditto.
* fortran/trans-expr.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* fortran/trans-intrinsic.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, inchash.h and real.h due to flattening of tree.h.
* fortran/trans-io.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* fortran/trans-openmp.c: ditto.
* fortran/trans-stmt.c: ditto.
* fortran/trans-types.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, inchash.h and real.h due to flattening of tree.h.
* function.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* gcc-plugin.h: Include statistics.h, double-int.h, real.h, fixed-value.h,
alias.h, flags.h, and symtab.h due to flattening of tree.h
* gcse.c: ditto.
* generic-match-head.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* ggc-page.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* gimple-builder.c: ditto.
* gimple.c: ditto.
* gimple-expr.c: ditto.
* gimple-fold.c: ditto.
* gimple-iterator.c: ditto.
* gimple-low.c: ditto.
* gimple-match-head.c: ditto.
* gimple-pretty-print.c: ditto.
* generic-ssa-isolate-paths.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* gimple-ssa-strength-reduction.c: ditto.
* gimple-streamer-in.c: ditto.
* gimple-streamer-out.c: ditto.
* gimple-walk.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* gimplify.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* gimplify-me.c: ditto.
* go/go-gcc.cc: ditto.
* go/go-lang.c: ditto.
* go/gdump.c: ditto.
* graphite-blocking.c: ditto.
* graphite.c: ditto.
* graphite-dependencies.c: ditto.
* graphite-interchange.c: ditto.
* graphite-isl-ast-to-gimple.c: ditto.
* graphite-optimize-isl.c: ditto.
* graphite-poly.c: ditto.
* graphite-scop-detection.c: ditto.
* graphite-sese-to-poly.c: ditto.
* hw-doloop.c: Include symtab.h due to flattening of tree.h.
* ifcvt.c: ditto.
* init-regs.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* internal-fc.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h,options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* ipa.c: ditto.
* ipa-chkp.c: ditto.
* ipa-comdats.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* ipa-cp.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h,options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* ipa-devirt.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* ipa-icf.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h,options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* ipa-icf-gimple.c: ditto.
* ipa-inline-analysis.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* ipa-inline.c: ditto.
* ipa-inline-transform.c: ditto.
* ipa-polymorhpic-call.c: ditto.
* ipa-profile.c: ditto.
* ipa-prop.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* ipa-pure-const.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* ipa-ref.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* ipa-reference.c: ditto.
* ipa-split.c: ditto.
* ipa-utils.c: ditto.
* ipa-visbility.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* ira.c: ditto.
* ira-color.c: Include hash-set.h due to flattening of tree.h.
* ira-costs.c: ditto.
* ira-emit.c: ditto.
* java/boehm.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* java/builtins.c: ditto.
* java/class.c: ditto.
* java/constants.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* java/decl.c: ditto.
* java/except.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* java/expr.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h,inchash.h and real.h due to flattening of tree.h.
* java/gimplify.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* java/jcf-dump.c: ditto.
* java/jcf-io.c: ditto.
* java/jcf-parse.c: ditto.
* java/jvgenmain.c: ditto.
* java/lang.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* java/mangle.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* java/mangle_name.c: ditto.
* java/resource.c: ditto.
* java/typeck.c: ditto.
* java/verify-glue.c: ditto.
* java/verify-impl.c: ditto.
* jump.c: Include symtab.h due to flattening of tree.h.
* langhooks.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* loop-doloop.c: Include symtab.h due to flattening of tree.h.
* loop-init.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* loop-invariant.c: Include symtab.h due to flattening of tree.h.
* loop-iv.c: ditto.
* loop-unroll.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* lower-subreg.c: ditto.
* lra-assigns.c: Include symtab.h due to flattening of tree.h.
* lra.c: Include symtab.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* lra-coalesce.c: Include symtab.h due to flattening of tree.h.
* lra-constraints.c: ditto.
* lra-eliminations.c: ditto.
* lra-livesc: ditto.
* lra-remat.c: ditto.
* lra-spills.c: ditto.
* lto/lto.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* lto/lto-lang.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* lto/lto-object.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* lto/lto-partition.c: ditto.
* lto/lto-symtab.c: ditto.
* lto-cgraph.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* lto-compress.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* lto-opts.c: ditto.
* lto-section-in.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* lto-section-out.c: ditto.
* lto-streamer.c: ditto.
* lto-streamer-in.c: ditto.
* lto-streamer-out.c: ditto.
* modulo-sched.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* objc/objc-act.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options, fold-const.h,
wide-int.h, and inchash.h due to flattening of tree.h.
* objc/objc-encoding.c: ditto.
* objc/objc-gnu-runtime-abi-01.c: ditto.
* objc/objc-lang.c: ditto.
* objc/objc-map.c: ditto.
* objc/objc-next-runtime-abi-01.c: ditto.
* objc/objc-next-runtime-abi-02.c: ditto.
* objc/objc-runtime-shared-support.c: ditto.
* objcp/objcp-decl.c: ditto.
* objcp/objcp-lang.c: ditto.
* omega.c: ditto.
* omega-low.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* optabs.c: ditto.
* opts-global.c: ditto.
* passes.c: ditto.
* plugin.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* postreload.c: Include symtab.h due to flattening of tree.h.
* postreload-gcse.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* predict.c: ditto.
* print-rtl.c: ditto.
* print-tree.c: ditto.
* profile.c: Include symtab.h, fold-const.h
and inchash.h due to flattening of tree.h.
* real.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* realmpfr.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* recog.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* ree.c: ditto.
* reginfo.c: ditto.
* reg-stack.c: ditto.
* reload1.c: Include symtab.h, fold-const.h, wide-int.h
and inchash.h due to flattening of tree.h.
* reload.c: Include symtab.h due to flattening of tree.h.
* reorg.c: ditto.
* rtlanal.c: Include symtab.h, fold-const.h, wide-int.h
and inchash.h due to flattening of tree.h.
* rtl-chkp.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* rtlhooks.c: Include symtab.h due to flattening of tree.h.
* sanopt.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* sched-deps.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* sched-vis.c: ditto.
* sdbout.c: ditto.
* sel-sched.c: Include symtab.h, fold-const.h, wide-int.h
and inchash.h due to flattening of tree.h.
* sel-sched-ir.c: ditto.
* sese.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* shrink-wrap.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* simplify-rtx.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* stack-ptr-mod.c: ditto.
* stmt.c: ditto.
* store-motion.c: ditto.
* store-layout.c: ditto.
* stringpool.c: ditto.
* symtab.c: ditto.
* target-globals.c: ditto.
* targhooks.c: ditto.
* toplev.c: ditto.
* tracer.c: ditto.
* trans-mem.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* tree-affine.c: ditto.
* tree-browser.c: ditto.
* tree.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* tree-call-cdce.c: Include symtab.h, alias.h, double-int.h,
fold-const.h, wide-int.h, inchash.h and real.h due to
flattening of tree.h.
* tree-cfg.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* tree-cfgcleanup.c: ditto.
* tree-chkp.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* tree-chkp-opt.c: ditto.
* tree-chrec.c: ditto.
* tree-chkp-opt.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, inchash.h and
real.h due to flattening of tree.h.
* tree-core.h: Flatten header file by removing all #include statements.
* tree-data-ref.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* tree-dfa.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, inchash.h and
real.h due to flattening of tree.h.
* tree-diagnostic.c: ditto.
* tree-dump.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, inchash.h, real.h and
fixed-value.h due to flattening of tree.h.
* tree-dfa.c: ditto.
* tree-eh.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, inchash.h and
real.h due to flattening of tree.h.
* tree-emutls.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* tree.h: Flatten header files by removing all includes except tree-core.h.
Remove inline function convert_to_ptrofftype_loc to relocate to fold-const.c.
Remove macro convert_to_ptrofftype to relocate to fold-const.h.
Remove inline function fold_build_pointer_plus_loc to relocate to fold-const.c.
Remove macro fold_build_pointer_plus to relocate to fold-const.h.
Remove inline function fold_build_pointer_plus_hwi_loc to relocate to fold-const.c.
Remove macro fold_build_pointer_plus_hwi to relocate to fold-const.h.
* tree-if-conv.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, inchash.h, real.h and
fixed-value.h due to flattening of tree.h.
* tree-inline.c: ditto.
* tree-into-ssa.c: ditto.
* tree-iterator.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* tree-loop-distribution.c: ditto.
* tree-nested.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* tree-nrv.c: ditto.
* tree-object-size.c: ditto.
* tree-outof-ssa.c: ditto.
* tree-parloops.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* tree-phinodes.c: ditto.
* tree-predcom.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* tree-pretty-print.c: ditto.
* tree-profile.c: double-int.h, input.h, alias.h, symtab.h,
fold-const.h, wide-int.h and inchash.h due to flattening of tree.h.
* tree-scalar-evolution.c: Include hash-set.h, machmode.h, vec.h,
double-int.h, input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h and inchash.h due to flattening of tree.h.
* tree-sra.c: Include vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, and
inchash.h due to flattening of tree.h.
* tree-ssa-alias.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* tree-ssa.c: ditto.
* tree-ssa-ccp.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, inchash.h
and real.h due to flattening of tree.h.
* tree-ssa-coalesce.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* tree-ssa-copy.c: ditto.
* tree-ssa-copyrename.c: ditto.
* tree-ssa-dce.c: ditto.
* tree-ssa-dom.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h, inchash.h
and real.h due to flattening of tree.h.
* tree-ssa-dse.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* tree-ssa-forwprop.c: ditto.
* tree-ssa-ifcombine.c: ditto.
* tree-ssa-live.c: ditto.
* tree-ssa-loop.c: ditto.
* tree-ssa-loop-ch.c: ditto.
* tree-ssa-loop-im.c: ditto.
* tree-ssa-loop-ivcanon.c: ditto.
* tree-ssa-loop-ivopts.c: ditto.
* tree-ssa-loop-manip.c: ditto.
* tree-ssa-loop-niter.c: ditto.
* tree-ssa-loop-prefetch.c: ditto.
* tree-ssa-loop-unswitch.c: ditto.
* tree-ssa-loop-math-opts.c: ditto.
* tree-ssanames.c: ditto.
* tree-ssa-operands.c: ditto.
* tree-ssa-phiopt.c: ditto.
* tree-ssa-phiprop.c: ditto.
* tree-ssa-pre.c: ditto.
* tree-ssa-propagate.c: ditto.
* tree-ssa-reassoc.c: ditto.
* tree-ssa-sccvn.c: ditto.
* tree-ssa-sink.c: ditto.
* tree-ssa-strlen.c: Include hash-set.h, machmode.h, vec.h,
double-int.h, input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h and inchash.h due to flattening of tree.h.
* tree-ssa-structalias.c: double-int.h, input.h, alias.h, symtab.h,
fold-const.h, wide-int.h and inchash.h due to flattening of tree.h.
* tree-ssa-tail-merge.c: Include hash-set.h, machmode.h, vec.h,
double-int.h, input.h, alias.h, symtab.h, fold-const.h,
wide-int.h and inchash.h due to flattening of tree.h.
* tree-ssa-ter.c: ditto.
* tree-ssa-threadedge.c: ditto.
* tree-ssa-threadupdate.c: Include hash-set.h, machmode.h, vec.h,
double-int.h, input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h and inchash.h due to flattening of tree.h.
* tree-ssa-uncprop.c: Include hash-set.h, machmode.h, vec.h,
double-int.h, input.h, alias.h, symtab.h, fold-const.h,
wide-int.h and inchash.h due to flattening of tree.h.
* tree-ssa-uninit.c: ditto.
* tree-stdarg.c: Include vec.h, double-int.h, input.h, alias.h,
symtab.h, fold-const.h, wide-int.h and inchash.h due to flattening
of tree.h.
* tree-streamer.c: Include vec.h, double-int.h, input.h, alias.h,
symtab.h, options.h, fold-const.h, wide-int.h and
inchash.h due to flattening of tree.h.
* tree-streamer-in.c: Include hash-set.h, machmode.h, vec.h,
double-int.h, input.h, alias.h, symtab.h, options.h, fold-const.h,
wide-int.h, inchash.h, real.h and fixed-value.h due to flattening
of tree.h.
* tree-streamer-out.c: dittoo.
* tree-switch-conversion.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* tree-tailcall.c: ditto.
* tree-vect-data-refs.c: ditto.
* tree-vect-generic.c: Include hash-set.h, machmode.h, vec.h, double-int.h, input.h,
alias.h, symtab.h, options.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* tree-vect-loop.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* tree-vect-loop-manip.c: ditto.
* tree-vectorizer.c: ditto.
* tree-vect-patterns.c: ditto.
* tree-vect-slp.c: ditto.
* tree-vect-stmts.c: ditto.
* tree-vrp.c: ditto.
* tsan.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* ubsan.c: ditto.
* value-prof.c.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* varasm.c: ditto.
* varpool.c: ditto.
* var-tracking.c: ditto.
* vmsdbgout.c: ditto.
* vtable-verify.c: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* wide-int.cc: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
* xcoffout.c: ditto.
* libcc1/plugin.cc: Include hash-set.h, machmode.h, vec.h, double-int.h,
input.h, alias.h, symtab.h, options.h, fold-const.h, wide-int.h and inchash.h
due to flattening of tree.h.
From-SVN: r219402
2015-01-09 21:18:42 +01:00
|
|
|
#include "fold-const.h"
|
2014-10-27 13:41:01 +01:00
|
|
|
#include "cfganal.h"
|
gimple-expr.h (create_tmp_var_name, [...]): Relocate prototypes from gimple.h.
2013-11-12 Andrew MacLeod <amacleod@redhat.com>
* gimple-expr.h (create_tmp_var_name, create_tmp_var_raw,
create_tmp_var, create_tmp_reg, mark_addressable, is_gimple_reg_rhs):
Relocate prototypes from gimple.h.
* gimplify.h: New File. Relocate some prototypes from gimple.h here.
(gimple_predicate, enum fallback, enum gimplify_status): Relocate
from gimple.h.
* gimple.h: Move some prototypes to gimplify.h.
(gimple_predicate, enum fallback, enum gimplify_status): Move to
gimplify.h.
(gimple_do_not_emit_location_p, gimple_set_do_not_emit_location):
Relocate from gimpify.c.
* gimple-expr.c (remove_suffix, tmp_var_id_num, create_tmp_var_name,
create_tmp_var_raw, create_tmp_var, create_tmp_reg, mark_addressable,
is_gimple_reg_rhs) Relocate from gimplify.c.
* gimplify.c (mark_addressable): Move to gimple-expr.c.
(gimple_seq_add_stmt_without_update): Move to gimple.c.
(remove_suffix, tmp_var_id_num, create_tmp_var_name, create_tmp_var_raw,
create_tmp_var, create_tmp_reg, is_gimple_reg_rhs): Move to
gimple-expr.c.
(should_carry_location_p): Move to gimple.c.
(gimple_do_not_emit_location_p, gimple_set_do_not_emit_location): Move
to gimple.h.
(annotate_one_with_location, annotate_all_with_location_after,
annotate_all_with_location): Move to gimple.c.
(compare_case_labels, sort_case_labels,
preprocess_case_label_vec_for_gimple): Move to gimple.c.
(rhs_predicate_for): Make static.
(gimplify_assign): Relocate from gimple.c.
* gimple.c (gimplify_assign): Move to gimplify.c.
(gimple_seq_add_stmt_without_update, should_carry_location_p,
annotate_one_with_location, annotate_all_with_location_after,
annotate_all_with_location, compare_case_labels, sort_case_labels,
preprocess_case_label_vec_for_gimple): Relocate from gimplify.c.
* tree.h (unshare_expr, unshare_expr_without_location,
mark_addressable): Move prototypes to gimplify.h.
* Makefile.in (GTFILES): gimple-expr.c now has the GTY tag for
tmp_var_id_num
* asan.c: Include gimplify.h rather than gimple.h.
* cfgloopmanip.c: Likewise.
* cgraphunit.c: Likewise.
* cilk-common.c: Likewise.
* dwarf2out.c: Dont include gimple.h.
* fold-const.c: Include gimplify.h rather than gimple.h.
* function.c: Likewise.
* gimple-fold.c: Likewise.
* gimple-ssa-strength-reduction.c: Likewise.
* graphite-clast-to-gimple.c: Likewise.
* graphite-sese-to-poly.c: Likewise.
* ipa-prop.c: Likewise.
* ipa-split.c: Likewise.
* ipa.c: Likewise.
* langhooks.c: Dont include gimple.h.
* loop-init.c: Include gimplify.h rather than gimple.h.
* omp-low.c: Likewise.
* sese.c: Likewise.
* stor-layout.c: Likewise.
* targhooks.c: Likewise.
* trans-mem.c: Likewise.
* tree-affine.c: Likewise.
* tree-cfg.c: Likewise.
* tree-cfgcleanup.c: Likewise.
* tree-complex.c: Likewise.
* tree-if-conv.c: Likewise.
* tree-inline.c: Likewise.
* tree-iterator.c: Likewise.
* tree-loop-distribution.c: Likewise.
* tree-nested.c: Likewise.
* tree-parloops.c: Likewise.
* tree-predcom.c: Likewise.
* tree-profile.c: Likewise.
* tree-scalar-evolution.c: Likewise.
* tree-sra.c: Likewise.
* tree-ssa-address.c: Likewise.
* tree-ssa-ccp.c: Likewise.
* tree-ssa-dce.c: Likewise.
* tree-ssa-forwprop.c: Likewise.
* tree-ssa-ifcombine.c: Likewise.
* tree-ssa-loop-im.c: Likewise.
* tree-ssa-loop-ivopts.c: Likewise.
* tree-ssa-loop-manip.c: Likewise.
* tree-ssa-loop-niter.c: Likewise.
* tree-ssa-loop-prefetch.c: Likewise.
* tree-ssa-loop-unswitch.c: Likewise.
* tree-ssa-math-opts.c: Likewise.
* tree-ssa-phiopt.c: Likewise.
* tree-ssa-phiprop.c: Likewise.
* tree-ssa-pre.c: Likewise.
* tree-ssa-propagate.c: Likewise.
* tree-ssa-reassoc.c: Likewise.
* tree-ssa-sccvn.c: Likewise.
* tree-ssa-strlen.c: Likewise.
* tree-ssa.c: Likewise.
* tree-switch-conversio: Likewise.n.c
* tree-tailcall.c: Likewise.
* tree-vect-data-refs.c: Likewise.
* tree-vect-generic.c: Likewise.
* tree-vect-loop-manip.c: Likewise.
* tree-vect-loop.c: Likewise.
* tree-vect-patterns.c: Likewise.
* tree-vect-stmts.c: Likewise.
* tsan.c: Likewise.
* value-prof.c: Likewise.
* config/aarch64/aarch64.c: Include gimplify.h instead of gimple.h.
* config/alpha/alpha.c: Likewise.
* config/darwin.c: Likewise.
* config/i386/i386.c: Likewise.
* config/ia64/ia64.c: Likewise.
* config/mep/mep.c: Likewise.
* config/mips/mips.c: Likewise.
* config/rs6000/rs6000.c: Likewise.
* config/s390/s390.c: Likewise.
* config/sh/sh.c: Likewise.
* config/sparc/sparc.c: Likewise.
* config/spu/spu.c: Likewise.
* config/stormy16/stormy16.c: Likewise.
* config/tilegx/tilegx.c: Likewise.
* config/tilepro/tilepro.c: Likewise.
* config/xtensa/xtensa.c: Likewise.
* ada/gcc-interface/trans.c: Include gimplify.h.
* c/c-typeck.c: Include gimplify.h.
* c-family/c-common.c: Include gimplify.h.
* c-family/c-gimplify.c: Likewise.
* c-family/cilk.c: Likewise.
* c-family/c-omp.c: Include gimple-expr.h instead of gimple.h.
* c-family/c-ubsan.c: Don't include gimple.h.
* cp/class.c: Include gimplify.h.
* cp/cp-gimplify.c: Likewise.
* cp/error.c: Likewise.
* cp/init.c: Likewise.
* cp/optimize.c: Likewise.
* cp/pt.c: Likewise.
* cp/semantics.c: Likewise.
* cp/tree.c: Likewise.
* cp/vtable-class-hierarchy.c: Likewise.
* cp/decl2.c: Don't include gimple.h.
* cp/except.c: Likewise.
* cp/method.c: Include pointer-set.h instead of gimple.h.
* fortran/f95-lang.c: Don't include gimple.h.
* fortran/trans-array.c: Include gimple-expr.h instead of gimple.h.
* fortran/trans.c: Likewise.
* fortran/trans-decl.c: Likewise.
* fortran/trans-expr.c: Include gimplify.h.
* fortran/trans-openmp.c: Likewise.
* go/go-lang.c: Include gimplify.h.
* java/java-gimplify.c: Include gimplify.h.
* objc/objc-act.c: Include gimplify.h.
From-SVN: r204717
2013-11-12 21:26:43 +01:00
|
|
|
#include "gimplify.h"
|
gimple-walk.h: New File.
* gimple-walk.h: New File. Relocate prototypes from gimple.h.
(struct walk_stmt_info): Relocate here from gimple.h.
* gimple-iterator.h: New File. Relocate prototypes from gimple.h.
(struct gimple_stmt_iterator_d): Relocate here from gimple.h.
(gsi_start_1, gsi_none, gsi_start_bb, gsi_last_1, gsi_last_bb,
gsi_end_p, gsi_one_before_end_p, gsi_next, gsi_prev, gsi_stmt,
gsi_after_labels, gsi_next_nondebug, gsi_prev_nondebug,
gsi_start_nondebug_bb, gsi_start_nondebug_after_labels_bb,
gsi_last_nondebug_bb, gsi_bb, gsi_seq): Relocate here from gimple.h.
* gimple.h (struct gimple_stmt_iterator_d): Move to gimple-iterator.h.
(gsi_start_1, gsi_none, gsi_start_bb, gsi_last_1, gsi_last_bb,
gsi_end_p, gsi_one_before_end_p, gsi_next, gsi_prev, gsi_stmt,
gsi_after_labels, gsi_next_nondebug, gsi_prev_nondebug,
gsi_start_nondebug_bb, gsi_start_nondebug_after_labels_bb,
gsi_last_nondebug_bb, gsi_bb, gsi_seq): Move to gimple-iterator.h.
(struct walk_stmt_info): Move to gimple-walk.h.
(gimple_seq_set_location): Move to gimple.c
* gimple-walk.c: New File.
(walk_gimple_seq_mod, walk_gimple_seq, walk_gimple_asm, walk_gimple_op,
walk_gimple_stmt, get_base_loadstore, walk_stmt_load_store_addr_ops,
walk_stmt_load_store_ops): Relocate here from gimple.c.
* gimple-iterator.c: Include gimple-iterator.h.
* gimple.c (walk_gimple_seq_mod, walk_gimple_seq, walk_gimple_asm,
walk_gimple_op, walk_gimple_stmt, get_base_loadstore,
walk_stmt_load_store_addr_ops, walk_stmt_load_store_ops): Move to
gimple-walk.c.
(gimple_seq_set_location): Relocate from gimple.h.
* tree-phinodes.h (set_phi_nodes): Move to tree-phinodes.c.
* tree-phinodes.c (set_phi_nodes): Relocate from tree-phinodes.h.
* gengtype.c (open_base_files): Add gimple-iterator.h to include list.
* Makefile.in (OBJS): Add gimple-walk.o
* asan.c: Update Include list as required for gimple-iterator.h and
gimple-walk.h.
* cfgexpand.c: Likewise.
* cfgloop.c: Likewise.
* cfgloopmanip.c: Likewise.
* cgraph.c: Likewise.
* cgraphbuild.c: Likewise.
* cgraphunit.c: Likewise.
* gimple-fold.c: Likewise.
* gimple-low.c: Likewise.
* gimple-pretty-print.c: Likewise.
* gimple-ssa-isolate-paths.c: Likewise.
* gimple-ssa-strength-reduction.c: Likewise.
* gimple-streamer-in.c: Likewise.
* gimple-streamer-out.c: Likewise.
* gimplify.c: Likewise.
* graphite-blocking.c: Likewise.
* graphite-clast-to-gimple.c: Likewise.
* graphite-dependences.c: Likewise.
* graphite-interchange.c: Likewise.
* graphite-optimize-isl.c: Likewise.
* graphite-poly.c: Likewise.
* graphite-scop-detection.c: Likewise.
* graphite-sese-to-poly.c: Likewise.
* graphite.c: Likewise.
* ipa-inline-analysis.c: Likewise.
* ipa-profile.c: Likewise.
* ipa-prop.c: Likewise.
* ipa-pure-const.c: Likewise.
* ipa-split.c: Likewise.
* lto-streamer-in.c: Likewise.
* lto-streamer-out.c: Likewise.
* omp-low.c: Likewise.
* predict.c: Likewise.
* profile.c: Likewise.
* sese.c: Likewise.
* tracer.c: Likewise.
* trans-mem.c: Likewise.
* tree-call-cdce.c: Likewise.
* tree-cfg.c: Likewise.
* tree-cfgcleanup.c: Likewise.
* tree-complex.c: Likewise.
* tree-data-ref.c: Likewise.
* tree-dfa.c: Likewise.
* tree-eh.c: Likewise.
* tree-emutls.c: Likewise.
* tree-if-conv.c: Likewise.
* tree-inline.c: Likewise.
* tree-into-ssa.c: Likewise.
* tree-loop-distribution.c: Likewise.
* tree-nested.c: Likewise.
* tree-nrv.c: Likewise.
* tree-object-size.c: Likewise.
* tree-outof-ssa.c: Likewise.
* tree-parloops.c: Likewise.
* tree-predcom.c: Likewise.
* tree-profile.c: Likewise.
* tree-scalar-evolution.c: Likewise.
* tree-sra.c: Likewise.
* tree-ssa-ccp.c: Likewise.
* tree-ssa-coalesce.c: Likewise.
* tree-ssa-copy.c: Likewise.
* tree-ssa-copyrename.c: Likewise.
* tree-ssa-dce.c: Likewise.
* tree-ssa-dom.c: Likewise.
* tree-ssa-dse.c: Likewise.
* tree-ssa-forwprop.c: Likewise.
* tree-ssa-ifcombine.c: Likewise.
* tree-ssa-live.c: Likewise.
* tree-ssa-loop-ch.c: Likewise.
* tree-ssa-loop-im.c: Likewise.
* tree-ssa-loop-ivcanon.c: Likewise.
* tree-ssa-loop-ivopts.c: Likewise.
* tree-ssa-loop-manip.c: Likewise.
* tree-ssa-loop-niter.c: Likewise.
* tree-ssa-loop-prefetch.c: Likewise.
* tree-ssa-loop.c: Likewise.
* tree-ssa-math-opts.c: Likewise.
* tree-ssa-phiopt.c: Likewise.
* tree-ssa-phiprop.c: Likewise.
* tree-ssa-pre.c: Likewise.
* tree-ssa-propagate.c: Likewise.
* tree-ssa-reassoc.c: Likewise.
* tree-ssa-sink.c: Likewise.
* tree-ssa-strlen.c: Likewise.
* tree-ssa-structalias.c: Likewise.
* tree-ssa-tail-merge.c: Likewise.
* tree-ssa-ter.c: Likewise.
* tree-ssa-threadedge.c: Likewise.
* tree-ssa-threadupdate.c: Likewise.
* tree-ssa-uncprop.c: Likewise.
* tree-ssa-uninit.c: Likewise.
* tree-ssa.c: Likewise.
* tree-stdarg.c: Likewise.
* tree-switch-conversion.c: Likewise.
* tree-tailcall.c: Likewise.
* tree-vect-data-refs.c: Likewise.
* tree-vect-generic.c: Likewise.
* tree-vect-loop-manip.c: Likewise.
* tree-vect-loop.c: Likewise.
* tree-vect-patterns.c: Likewise.
* tree-vect-slp.c: Likewise.
* tree-vect-stmts.c: Likewise.
* tree-vectorizer.c: Likewise.
* tree-vrp.c: Likewise.
* tree.c: Likewise.
* tsan.c: Likewise.
* value-prof.c: Likewise.
* vtable-verify.c: Likewise.
* config/aarch64/aarch64-builtins.c: Include gimple-iterator.h.
* config/rs6000/rs6000.c: Include gimple-iterator.h and gimple-walk.h.
* testsuite/g++.dg/plugin/selfassign.c: Include gimple-iterator.h.
* testsuite/gcc.dg/plugin/selfassign.c: Likewise.
From-SVN: r204763
2013-11-14 00:54:17 +01:00
|
|
|
#include "gimple-iterator.h"
|
2013-11-19 13:31:09 +01:00
|
|
|
#include "varasm.h"
|
|
|
|
#include "stor-layout.h"
|
2012-11-12 16:51:13 +01:00
|
|
|
#include "tree-iterator.h"
|
|
|
|
#include "asan.h"
|
dojump.h: New header file.
2015-10-15 Prathamesh Kulkarni <prathamesh.kulkarni@linaro.org>
* dojump.h: New header file.
* explow.h: Likewise.
* expr.h: Remove includes.
Move expmed.c prototypes to expmed.h.
Move dojump.c prototypes to dojump.h.
Move alias.c prototypes to alias.h.
Move explow.c prototypes to explow.h.
Move calls.c prototypes to calls.h.
Move emit-rtl.c prototypes to emit-rtl.h.
Move varasm.c prototypes to varasm.h.
Move stmt.c prototypes to stmt.h.
(saved_pending_stack_adjust): Move to dojump.h.
(adjust_address): Move to explow.h.
(adjust_address_nv): Move to emit-rtl.h.
(adjust_bitfield_address): Likewise.
(adjust_bitfield_address_size): Likewise.
(adjust_bitfield_address_nv): Likewise.
(adjust_automodify_address_nv): Likewise.
* explow.c (expr_size): Move to expr.c.
(int_expr_size): Likewise.
(tree_expr_size): Likewise.
Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h statistics.h stmt.h varasm.h.
* genemit.c (main): Generate includes statistics.h, real.h, fixed-value.h,
insn-config.h, expmed.h, dojump.h, explow.h, emit-rtl.h, stmt.h.
* genopinit.c (main): Generate includes hashtab.h, hard-reg-set.h, function.h,
statistics.h, real.h, fixed-value.h, expmed.h, dojump.h, explow.h, emit-rtl.h,
stmt.h.
* genoutput.c (main): Generate includes hashtab.h, statistics.h, real.h,
fixed-value.h, expmed.h, dojump.h, explow.h, emit-rtl.h, stmt.h.
* genemit.c (open_base_files): Generate includes flags.h, statistics.h, real.h,
fixed-value.h, tree.h, expmed.h, dojump.h, explow.h, calls.h, emit-rtl.h, varasm.h,
stmt.h.
* config/tilepro/gen-mul-tables.cc: Generate includes hashtab.h, hash-set.h, vec.h,
machmode.h, tm.h, hard-reg-set.h, input.h, function.h, rtl.h, flags.h, statistics.h,
double-int.h, real.h, fixed-value.h, alias.h, wide-int.h, inchash.h, tree.h,
insn-config.h, expmed.h, dojump.h, explow.h, calls.h, emit-rtl.h, varasm.h, stmt.h.
* config/tilegx/mul-tables.c: Include alias.h calls.h dojump.h
double-int.h emit-rtl.h explow.h expmed.h fixed-value.h flags.h
function.h hard-reg-set.h hash-set.h hashtab.h inchash.h input.h
insn-config.h machmode.h real.h rtl.h statistics.h stmt.h symtab.h
tm.h tree.h varasm.h vec.h wide-int.h.
* rtlhooks.c: Include alias.h calls.h dojump.h double-int.h emit-rtl.h
explow.h expmed.h fixed-value.h flags.h function.h hard-reg-set.h
hash-set.h hashtab.h inchash.h input.h insn-config.h machmode.h
real.h statistics.h stmt.h tree.h varasm.h vec.h wide-int.h.
* cfgloopanal.c: Include alias.h calls.h dojump.h double-int.h emit-rtl.h
explow.h expmed.h fixed-value.h flags.h inchash.h insn-config.h
real.h statistics.h stmt.h tree.h varasm.h wide-int.h.
* loop-iv.c: Likewise.
* lra-assigns.c: Include alias.h calls.h dojump.h double-int.h emit-rtl.h
explow.h expmed.h fixed-value.h flags.h inchash.h real.h
statistics.h stmt.h tree.h varasm.h wide-int.h.
* lra-constraints.c: Likewise.
* lra-eliminations.c: Likewise.
* lra-lives.c: Likewise.
* lra-remat.c: Likewise.
* bt-load.c: Include alias.h calls.h dojump.h double-int.h emit-rtl.h
explow.h expmed.h fixed-value.h inchash.h insn-config.h real.h
statistics.h stmt.h tree.h varasm.h wide-int.h.
* hw-doloop.c: Likewise.
* ira-color.c: Likewise.
* ira-emit.c: Likewise.
* loop-doloop.c: Likewise.
* loop-invariant.c: Likewise.
* reload.c: Include alias.h calls.h dojump.h double-int.h emit-rtl.h
explow.h expmed.h fixed-value.h inchash.h real.h rtl.h
statistics.h stmt.h tree.h varasm.h wide-int.h.
* caller-save.c: Include alias.h calls.h dojump.h double-int.h emit-rtl.h
explow.h expmed.h fixed-value.h inchash.h real.h statistics.h
stmt.h tree.h varasm.h wide-int.h.
* combine-stack-adj.c: Likewise.
* cse.c: Likewise.
* ddg.c: Likewise.
* ifcvt.c: Likewise.
* ira-costs.c: Likewise.
* jump.c: Likewise.
* lra-coalesce.c: Likewise.
* lra-spills.c: Likewise.
* profile.c: Include alias.h calls.h dojump.h double-int.h emit-rtl.h
explow.h expmed.h fixed-value.h insn-config.h real.h statistics.h
stmt.h varasm.h wide-int.h.
* lra.c: Include alias.h calls.h dojump.h double-int.h emit-rtl.h
explow.h expmed.h fixed-value.h real.h statistics.h stmt.h
varasm.h.
* config/sh/sh_treg_combine.cc: Include alias.h calls.h dojump.h
double-int.h explow.h expmed.h fixed-value.h flags.h real.h
statistics.h stmt.h varasm.h wide-int.h.
* reorg.c: Include alias.h calls.h dojump.h double-int.h explow.h
expmed.h fixed-value.h inchash.h real.h statistics.h stmt.h tree.h
varasm.h wide-int.h.
* reload1.c: Include alias.h calls.h dojump.h double-int.h explow.h
expmed.h fixed-value.h real.h rtl.h statistics.h stmt.h varasm.h.
* config/tilegx/tilegx.c: Include alias.h dojump.h double-int.h
emit-rtl.h explow.h expmed.h fixed-value.h flags.h real.h
statistics.h stmt.h.
* config/tilepro/tilepro.c: Likewise.
* config/mmix/mmix.c: Include alias.h dojump.h double-int.h emit-rtl.h
explow.h expmed.h fixed-value.h real.h statistics.h stmt.h.
* config/pdp11/pdp11.c: Likewise.
* config/xtensa/xtensa.c: Likewise.
* config/lm32/lm32.c: Include alias.h dojump.h double-int.h emit-rtl.h
explow.h expmed.h fixed-value.h real.h statistics.h stmt.h
varasm.h.
* tree-chkp.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h flags.h function.h hard-reg-set.h hashtab.h
insn-config.h real.h rtl.h statistics.h stmt.h tm.h.
* cilk-common.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h flags.h function.h hard-reg-set.h hashtab.h
insn-config.h real.h rtl.h statistics.h stmt.h tm.h varasm.h.
* rtl-chkp.c: Likewise.
* tree-chkp-opt.c: Likewise.
* config/arm/arm-builtins.c: Include calls.h dojump.h emit-rtl.h explow.h
expmed.h fixed-value.h flags.h function.h hard-reg-set.h hashtab.h
insn-config.h real.h statistics.h stmt.h varasm.h.
* ipa-icf.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h flags.h hashtab.h insn-config.h real.h rtl.h
statistics.h stmt.h.
* tree-vect-data-refs.c: Likewise.
* graphite-sese-to-poly.c: Include calls.h dojump.h emit-rtl.h explow.h
expmed.h fixed-value.h flags.h hashtab.h insn-config.h real.h
rtl.h statistics.h stmt.h varasm.h.
* internal-fn.c: Likewise.
* ipa-icf-gimple.c: Likewise.
* lto-section-out.c: Likewise.
* tree-data-ref.c: Likewise.
* tree-nested.c: Likewise.
* tree-outof-ssa.c: Likewise.
* tree-predcom.c: Likewise.
* tree-pretty-print.c: Likewise.
* tree-scalar-evolution.c: Likewise.
* tree-ssa-strlen.c: Likewise.
* tree-vect-loop.c: Likewise.
* tree-vect-patterns.c: Likewise.
* tree-vect-slp.c: Likewise.
* tree-vect-stmts.c: Likewise.
* tsan.c: Likewise.
* targhooks.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h flags.h hashtab.h insn-config.h real.h statistics.h
stmt.h.
* config/sh/sh-mem.cc: Include calls.h dojump.h emit-rtl.h explow.h
expmed.h fixed-value.h flags.h hashtab.h insn-config.h real.h
statistics.h stmt.h varasm.h.
* loop-unroll.c: Likewise.
* ubsan.c: Likewise.
* tree-ssa-loop-prefetch.c: Include calls.h dojump.h emit-rtl.h explow.h
expmed.h fixed-value.h flags.h hashtab.h real.h rtl.h statistics.h
stmt.h varasm.h.
* dse.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h function.h hashtab.h statistics.h stmt.h varasm.h.
* tree-switch-conversion.c: Include calls.h dojump.h emit-rtl.h explow.h
expmed.h fixed-value.h hashtab.h insn-config.h real.h rtl.h
statistics.h stmt.h.
* generic-match-head.c: Include calls.h dojump.h emit-rtl.h explow.h
expmed.h fixed-value.h hashtab.h insn-config.h real.h rtl.h
statistics.h stmt.h varasm.h.
* gimple-match-head.c: Likewise.
* lto-cgraph.c: Likewise.
* lto-section-in.c: Likewise.
* lto-streamer-in.c: Likewise.
* lto-streamer-out.c: Likewise.
* tree-affine.c: Likewise.
* tree-cfg.c: Likewise.
* tree-cfgcleanup.c: Likewise.
* tree-if-conv.c: Likewise.
* tree-into-ssa.c: Likewise.
* tree-ssa-alias.c: Likewise.
* tree-ssa-copyrename.c: Likewise.
* tree-ssa-dse.c: Likewise.
* tree-ssa-forwprop.c: Likewise.
* tree-ssa-live.c: Likewise.
* tree-ssa-math-opts.c: Likewise.
* tree-ssa-pre.c: Likewise.
* tree-ssa-sccvn.c: Likewise.
* tree-tailcall.c: Likewise.
* tree-vect-generic.c: Likewise.
* tree-sra.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h insn-config.h real.h rtl.h stmt.h varasm.h.
* stor-layout.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h insn-config.h real.h statistics.h stmt.h.
* varasm.c: Likewise.
* coverage.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h insn-config.h real.h statistics.h stmt.h
varasm.h.
* init-regs.c: Likewise.
* ira.c: Likewise.
* omp-low.c: Likewise.
* stack-ptr-mod.c: Likewise.
* tree-ssa-reassoc.c: Likewise.
* tree-complex.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h insn-config.h rtl.h statistics.h stmt.h
varasm.h.
* dwarf2cfi.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h insn-config.h statistics.h stmt.h varasm.h.
* shrink-wrap.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h real.h rtl.h statistics.h stmt.h.
* recog.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h real.h rtl.h statistics.h stmt.h varasm.h.
* tree-ssa-phiopt.c: Likewise.
* config/darwin.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h real.h statistics.h stmt.h.
* config/fr30/fr30.c: Likewise.
* config/frv/frv.c: Likewise.
* expr.c: Likewise.
* final.c: Likewise.
* optabs.c: Likewise.
* passes.c: Likewise.
* simplify-rtx.c: Likewise.
* stmt.c: Likewise.
* toplev.c: Likewise.
* var-tracking.c: Likewise.
* gcse.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h real.h statistics.h stmt.h varasm.h.
* lower-subreg.c: Likewise.
* postreload-gcse.c: Likewise.
* ree.c: Likewise.
* reginfo.c: Likewise.
* store-motion.c: Likewise.
* combine.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h real.h stmt.h varasm.h.
* emit-rtl.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h statistics.h stmt.h.
* dojump.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h statistics.h stmt.h varasm.h.
* except.c: Likewise.
* explow.c: Likewise.
* tree-dfa.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h insn-config.h real.h rtl.h statistics.h stmt.h
varasm.h.
* gimple-fold.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h insn-config.h real.h rtl.h statistics.h varasm.h.
* tree-ssa-structalias.c: Likewise.
* cfgexpand.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h insn-config.h real.h statistics.h.
* calls.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h insn-config.h real.h statistics.h stmt.h.
* bb-reorder.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h insn-config.h real.h statistics.h stmt.h varasm.h.
* cfgbuild.c: Likewise.
* function.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h real.h rtl.h statistics.h stmt.h.
* cfgrtl.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h real.h rtl.h statistics.h stmt.h varasm.h.
* dbxout.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h real.h statistics.h stmt.h.
* auto-inc-dec.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h real.h statistics.h stmt.h varasm.h.
* cprop.c: Likewise.
* modulo-sched.c: Likewise.
* postreload.c: Likewise.
* ccmp.c: Include calls.h dojump.h emit-rtl.h explow.h fixed-value.h
flags.h function.h hard-reg-set.h hashtab.h insn-config.h real.h
statistics.h stmt.h varasm.h.
* gimple-ssa-strength-reduction.c: Include calls.h dojump.h emit-rtl.h
explow.h fixed-value.h flags.h hashtab.h insn-config.h real.h
rtl.h statistics.h stmt.h varasm.h.
* tree-ssa-loop-ivopts.c: Include calls.h dojump.h emit-rtl.h explow.h
fixed-value.h flags.h hashtab.h real.h rtl.h statistics.h stmt.h
varasm.h.
* expmed.c: Include calls.h dojump.h emit-rtl.h explow.h fixed-value.h
function.h hard-reg-set.h hashtab.h real.h statistics.h stmt.h
varasm.h.
* target-globals.c: Include calls.h dojump.h emit-rtl.h explow.h
fixed-value.h function.h hashtab.h real.h statistics.h stmt.h
varasm.h.
* tree-ssa-address.c: Include calls.h dojump.h emit-rtl.h explow.h
fixed-value.h hashtab.h real.h statistics.h stmt.h varasm.h.
* cfgcleanup.c: Include calls.h dojump.h explow.h expmed.h fixed-value.h
function.h real.h statistics.h stmt.h varasm.h.
* alias.c: Include calls.h dojump.h explow.h expmed.h fixed-value.h
insn-config.h real.h statistics.h stmt.h.
* dwarf2out.c: Include calls.h dojump.h explow.h expmed.h fixed-value.h
statistics.h stmt.h.
* config/nvptx/nvptx.c: Include dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h flags.h hard-reg-set.h insn-config.h real.h
statistics.h stmt.h varasm.h.
* gimplify.c: Include dojump.h emit-rtl.h explow.h expmed.h fixed-value.h
flags.h hashtab.h insn-config.h real.h rtl.h statistics.h.
* asan.c: Include dojump.h emit-rtl.h explow.h expmed.h fixed-value.h
flags.h hashtab.h insn-config.h real.h rtl.h statistics.h stmt.h.
* ipa-devirt.c: Include dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h flags.h hashtab.h insn-config.h real.h rtl.h
statistics.h stmt.h varasm.h.
* ipa-polymorphic-call.c: Likewise.
* config/aarch64/aarch64.c: Include dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h flags.h hashtab.h insn-config.h real.h statistics.h
stmt.h.
* config/c6x/c6x.c: Likewise.
* config/aarch64/aarch64-builtins.c: Include dojump.h emit-rtl.h explow.h
expmed.h fixed-value.h flags.h hashtab.h insn-config.h real.h
statistics.h stmt.h varasm.h.
* ipa-prop.c: Include dojump.h emit-rtl.h explow.h expmed.h fixed-value.h
hashtab.h insn-config.h real.h rtl.h statistics.h stmt.h varasm.h.
* ipa-split.c: Likewise.
* tree-eh.c: Likewise.
* tree-ssa-dce.c: Likewise.
* tree-ssa-loop-niter.c: Likewise.
* tree-vrp.c: Likewise.
* config/nds32/nds32-cost.c: Include dojump.h emit-rtl.h explow.h
expmed.h fixed-value.h hashtab.h insn-config.h real.h statistics.h
stmt.h.
* config/nds32/nds32-fp-as-gp.c: Likewise.
* config/nds32/nds32-intrinsic.c: Likewise.
* config/nds32/nds32-isr.c: Likewise.
* config/nds32/nds32-md-auxiliary.c: Likewise.
* config/nds32/nds32-memory-manipulation.c: Likewise.
* config/nds32/nds32-pipelines-auxiliary.c: Likewise.
* config/nds32/nds32-predicates.c: Likewise.
* config/nds32/nds32.c: Likewise.
* config/cris/cris.c: Include dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h real.h statistics.h.
* config/alpha/alpha.c: Include dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h real.h statistics.h stmt.h.
* config/arm/arm.c: Likewise.
* config/avr/avr.c: Likewise.
* config/bfin/bfin.c: Likewise.
* config/h8300/h8300.c: Likewise.
* config/i386/i386.c: Likewise.
* config/ia64/ia64.c: Likewise.
* config/iq2000/iq2000.c: Likewise.
* config/m32c/m32c.c: Likewise.
* config/m32r/m32r.c: Likewise.
* config/m68k/m68k.c: Likewise.
* config/mcore/mcore.c: Likewise.
* config/mep/mep.c: Likewise.
* config/mips/mips.c: Likewise.
* config/mn10300/mn10300.c: Likewise.
* config/moxie/moxie.c: Likewise.
* config/pa/pa.c: Likewise.
* config/rl78/rl78.c: Likewise.
* config/rx/rx.c: Likewise.
* config/s390/s390.c: Likewise.
* config/sh/sh.c: Likewise.
* config/sparc/sparc.c: Likewise.
* config/spu/spu.c: Likewise.
* config/stormy16/stormy16.c: Likewise.
* config/v850/v850.c: Likewise.
* config/vax/vax.c: Likewise.
* config/cr16/cr16.c: Include dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h hashtab.h real.h statistics.h stmt.h varasm.h.
* config/msp430/msp430.c: Likewise.
* predict.c: Likewise.
* value-prof.c: Likewise.
* config/epiphany/epiphany.c: Include dojump.h emit-rtl.h explow.h
expmed.h fixed-value.h hashtab.h statistics.h stmt.h.
* config/microblaze/microblaze.c: Likewise.
* config/nios2/nios2.c: Likewise.
* config/rs6000/rs6000.c: Likewise.
* tree.c: Include dojump.h emit-rtl.h explow.h expmed.h fixed-value.h
insn-config.h real.h rtl.h statistics.h stmt.h.
* cgraph.c: Include dojump.h emit-rtl.h explow.h expmed.h fixed-value.h
insn-config.h real.h statistics.h stmt.h.
* fold-const.c: Include dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h insn-config.h real.h statistics.h stmt.h varasm.h.
* tree-inline.c: Include dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h real.h rtl.h statistics.h stmt.h varasm.h.
* builtins.c: Include dojump.h emit-rtl.h explow.h expmed.h fixed-value.h
real.h statistics.h stmt.h.
* config/arc/arc.c: Include dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h statistics.h stmt.h.
* config/visium/visium.c: Include dojump.h emit-rtl.h explow.h expmed.h
stmt.h.
java/
* builtins.c: Include calls.h dojump.h emit-rtl.h explow.h expmed.h
fixed-value.h function.h hard-reg-set.h hashtab.h insn-config.h
real.h statistics.h stmt.h varasm.h.
From-SVN: r219655
2015-01-15 14:28:42 +01:00
|
|
|
#include "dojump.h"
|
|
|
|
#include "explow.h"
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
#include "expr.h"
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
#include "output.h"
|
2012-12-03 18:28:10 +01:00
|
|
|
#include "langhooks.h"
|
2013-04-26 10:01:19 +02:00
|
|
|
#include "cfgloop.h"
|
2013-09-26 15:23:31 +02:00
|
|
|
#include "gimple-builder.h"
|
2013-11-19 12:45:15 +01:00
|
|
|
#include "ubsan.h"
|
2014-01-09 08:31:05 +01:00
|
|
|
#include "params.h"
|
expr.h: Remove prototypes of functions defined in builtins.c.
* expr.h: Remove prototypes of functions defined in builtins.c.
* tree.h: (build_call_expr_*, build_string_literal): Add prototypes.
Remove prototypes of functions defined in builtins.c.
* builtins.h: Update prototype list to include all exported functions.
* builtins.c: (default_libc_has_function, gnu_libc_has_function,
no_c99_libc_has_function): Move to targhooks.c
(build_string_literal, build_call_expr_loc_array,
build_call_expr_loc_vec, build_call_expr_loc, build_call_expr): Move
to tree.c.
(expand_builtin_object_size, fold_builtin_object_size): Make static.
* targhooks.c (default_libc_has_function, gnu_libc_has_function,
no_c99_libc_has_function): Relocate from builtins.c.
* tree.c: Include builtins.h.
(build_call_expr_loc_array, build_call_expr_loc_vec,
build_call_expr_loc, build_call_expr, build_string_literal): Relocate
from builtins.c.
* fold-const.h (fold_fma): Move prototype to builtins.h.
* realmpfr.h (do_mpc_arg2): Move prototype to builtins.h.
* fortran/trans.c (trans_runtime_error_vararg): Call
fold_build_call_array_loc instead of fold_builtin_call_array.
* asan.c: Include builtins.h.
* cfgexpand.c: Likewise.
* convert.c: Likewise.
* emit-rtl.c: Likewise.
* except.c: Likewise.
* expr.c: Likewise.
* fold-const.c: Likewise.
* gimple-fold.c: Likewise.
* gimple-ssa-strength-reduction.c: Likewise.
* gimplify.c: Likewise.
* ipa-inline.c: Likewise.
* ipa-prop.c: Likewise.
* lto-streamer-out.c: Likewise.
* stmt.c: Likewise.
* tree-inline.c: Likewise.
* tree-object-size.c: Likewise.
* tree-sra.c: Likewise.
* tree-ssa-ccp.c: Likewise.
* tree-ssa-forwprop.c: Likewise.
* tree-ssa-loop-ivcanon.c: Likewise.
* tree-ssa-loop-ivopts.c: Likewise.
* tree-ssa-math-opts.c: Likewise.
* tree-ssa-reassoc.c: Likewise.
* tree-ssa-threadedge.c: Likewise.
* tree-streamer-in.c: Likewise.
* tree-vect-data-refs.c: Likewise.
* tree-vect-patterns.c: Likewise.
* tree-vect-stmts.c: Likewise.
c
* c-decl.c: Include builtins.h.
* c-parser.c: Likewise.
cp
* decl.c: Include builtins.h.
* semantics.c: Likewise.
go
* go-gcc.cc: Include builtins.h.
lto
* lto-symtab.c: Include builtins.h.
config
* aarch64/aarch64.c: Include builtins.h.
* alpha/alpha.c: Likewise.
* arc/arc.c: Likewise.
* arm/arm.c: Likewise.
* avr/avr.c: Likewise.
* bfin/bfin.c: Likewise.
* c6x/c6x.c: Likewise.
* cr16/cr16.c: Likewise.
* cris/cris.c: Likewise.
* epiphany/epiphany.c: Likewise.
* fr30/fr30.c: Likewise.
* frv/frv.c: Likewise.
* h8300/h8300.c: Likewise.
* i386/i386.c: Likewise.
* i386/winnt.c: Likewise.
* ia64/ia64.c: Likewise.
* iq2000/iq2000.c: Likewise.
* lm32/lm32.c: Likewise.
* m32c/m32c.c: Likewise.
* m32r/m32r.c: Likewise.
* m68k/m68k.c: Likewise.
* mcore/mcore.c: Likewise.
* mep/mep.c: Likewise.
* microblaze/microblaze.c: Likewise.
* mips/mips.c: Likewise.
* mmix/mmix.c: Likewise.
* mn10300/mn10300.c: Likewise.
* moxie/moxie.c: Likewise.
* msp430/msp430.c: Likewise.
* nds32/nds32.c: Likewise.
* pa/pa.c: Likewise.
* pdp11/pdp11.c: Likewise.
* picochip/picochip.c: Likewise.
* rl78/rl78.c: Likewise.
* rs6000/rs6000.c: Likewise.
* rx/rx.c: Likewise.
* s390/s390.c: Likewise.
* score/score.c: Likewise.
* sh/sh.c: Likewise.
* sparc/sparc.c: Likewise.
* spu/spu.c: Likewise.
* stormy16/stormy16.c: Likewise.
* tilegx/tilegx.c: Likewise.
* tilepro/tilepro.c: Likewise.
* v850/v850.c: Likewise.
* vax/vax.c: Likewise.
* xtensa/xtensa.c: Likewise.
From-SVN: r211145
2014-06-02 22:13:44 +02:00
|
|
|
#include "builtins.h"
|
2015-04-22 13:44:26 +02:00
|
|
|
#include "fnmatch.h"
|
2017-01-23 13:02:13 +01:00
|
|
|
#include "tree-inline.h"
|
2012-11-12 16:51:13 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
/* AddressSanitizer finds out-of-bounds and use-after-free bugs
|
|
|
|
with <2x slowdown on average.
|
|
|
|
|
|
|
|
The tool consists of two parts:
|
|
|
|
instrumentation module (this file) and a run-time library.
|
|
|
|
The instrumentation module adds a run-time check before every memory insn.
|
|
|
|
For a 8- or 16- byte load accessing address X:
|
|
|
|
ShadowAddr = (X >> 3) + Offset
|
|
|
|
ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
|
|
|
|
if (ShadowValue)
|
|
|
|
__asan_report_load8(X);
|
|
|
|
For a load of N bytes (N=1, 2 or 4) from address X:
|
|
|
|
ShadowAddr = (X >> 3) + Offset
|
|
|
|
ShadowValue = *(char*)ShadowAddr;
|
|
|
|
if (ShadowValue)
|
|
|
|
if ((X & 7) + N - 1 > ShadowValue)
|
|
|
|
__asan_report_loadN(X);
|
|
|
|
Stores are instrumented similarly, but using __asan_report_storeN functions.
|
2013-11-04 22:33:31 +01:00
|
|
|
A call too __asan_init_vN() is inserted to the list of module CTORs.
|
|
|
|
N is the version number of the AddressSanitizer API. The changes between the
|
|
|
|
API versions are listed in libsanitizer/asan/asan_interface_internal.h.
|
2012-11-12 17:18:59 +01:00
|
|
|
|
|
|
|
The run-time library redefines malloc (so that redzone are inserted around
|
|
|
|
the allocated memory) and free (so that reuse of free-ed memory is delayed),
|
2013-11-04 22:33:31 +01:00
|
|
|
provides __asan_report* and __asan_init_vN functions.
|
2012-11-12 17:18:59 +01:00
|
|
|
|
|
|
|
Read more:
|
|
|
|
http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
|
|
|
|
|
|
|
|
The current implementation supports detection of out-of-bounds and
|
|
|
|
use-after-free in the heap, on the stack and for global variables.
|
|
|
|
|
|
|
|
[Protection of stack variables]
|
|
|
|
|
|
|
|
To understand how detection of out-of-bounds and use-after-free works
|
|
|
|
for stack variables, lets look at this example on x86_64 where the
|
|
|
|
stack grows downward:
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
|
|
|
int
|
|
|
|
foo ()
|
|
|
|
{
|
|
|
|
char a[23] = {0};
|
|
|
|
int b[2] = {0};
|
|
|
|
|
|
|
|
a[5] = 1;
|
|
|
|
b[1] = 2;
|
|
|
|
|
|
|
|
return a[5] + b[1];
|
|
|
|
}
|
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
For this function, the stack protected by asan will be organized as
|
|
|
|
follows, from the top of the stack to the bottom:
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
|
|
|
|
the next slot be 32 bytes aligned; this one is called Partial
|
|
|
|
Redzone; this 32 bytes alignment is an asan constraint]
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
Slot 3/ [24 bytes for variable 'a']
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
Slot 6/ [8 bytes for variable 'b']
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
|
|
|
|
'LEFT RedZone']
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
The 32 bytes of LEFT red zone at the bottom of the stack can be
|
|
|
|
decomposed as such:
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
|
|
|
1/ The first 8 bytes contain a magical asan number that is always
|
|
|
|
0x41B58AB3.
|
|
|
|
|
|
|
|
2/ The following 8 bytes contains a pointer to a string (to be
|
|
|
|
parsed at runtime by the runtime asan library), which format is
|
|
|
|
the following:
|
|
|
|
|
|
|
|
"<function-name> <space> <num-of-variables-on-the-stack>
|
|
|
|
(<32-bytes-aligned-offset-in-bytes-of-variable> <space>
|
|
|
|
<length-of-var-in-bytes> ){n} "
|
|
|
|
|
|
|
|
where '(...){n}' means the content inside the parenthesis occurs 'n'
|
|
|
|
times, with 'n' being the number of variables on the stack.
|
2015-06-01 14:37:26 +02:00
|
|
|
|
2013-11-04 22:33:31 +01:00
|
|
|
3/ The following 8 bytes contain the PC of the current function which
|
|
|
|
will be used by the run-time library to print an error message.
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2013-11-04 22:33:31 +01:00
|
|
|
4/ The following 8 bytes are reserved for internal use by the run-time.
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
The shadow memory for that stack layout is going to look like this:
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
|
|
|
- content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
|
|
|
|
The F1 byte pattern is a magic number called
|
|
|
|
ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
|
|
|
|
the memory for that shadow byte is part of a the LEFT red zone
|
|
|
|
intended to seat at the bottom of the variables on the stack.
|
|
|
|
|
|
|
|
- content of shadow memory 8 bytes for slots 6 and 5:
|
|
|
|
0xF4F4F400. The F4 byte pattern is a magic number
|
|
|
|
called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
|
|
|
|
memory region for this shadow byte is a PARTIAL red zone
|
|
|
|
intended to pad a variable A, so that the slot following
|
|
|
|
{A,padding} is 32 bytes aligned.
|
|
|
|
|
|
|
|
Note that the fact that the least significant byte of this
|
|
|
|
shadow memory content is 00 means that 8 bytes of its
|
|
|
|
corresponding memory (which corresponds to the memory of
|
|
|
|
variable 'b') is addressable.
|
|
|
|
|
|
|
|
- content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
|
|
|
|
The F2 byte pattern is a magic number called
|
|
|
|
ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
|
|
|
|
region for this shadow byte is a MIDDLE red zone intended to
|
|
|
|
seat between two 32 aligned slots of {variable,padding}.
|
|
|
|
|
|
|
|
- content of shadow memory 8 bytes for slot 3 and 2:
|
2012-11-12 17:18:59 +01:00
|
|
|
0xF4000000. This represents is the concatenation of
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
variable 'a' and the partial red zone following it, like what we
|
|
|
|
had for variable 'b'. The least significant 3 bytes being 00
|
|
|
|
means that the 3 bytes of variable 'a' are addressable.
|
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
- content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
The F3 byte pattern is a magic number called
|
|
|
|
ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
|
|
|
|
region for this shadow byte is a RIGHT red zone intended to seat
|
|
|
|
at the top of the variables of the stack.
|
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
Note that the real variable layout is done in expand_used_vars in
|
|
|
|
cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
|
|
|
|
stack variables as well as the different red zones, emits some
|
|
|
|
prologue code to populate the shadow memory as to poison (mark as
|
|
|
|
non-accessible) the regions of the red zones and mark the regions of
|
|
|
|
stack variables as accessible, and emit some epilogue code to
|
|
|
|
un-poison (mark as accessible) the regions of red zones right before
|
|
|
|
the function exits.
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
[Protection of global variables]
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
The basic idea is to insert a red zone between two global variables
|
|
|
|
and install a constructor function that calls the asan runtime to do
|
|
|
|
the populating of the relevant shadow memory regions at load time.
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
So the global variables are laid out as to insert a red zone between
|
|
|
|
them. The size of the red zones is so that each variable starts on a
|
|
|
|
32 bytes boundary.
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
Then a constructor function is installed so that, for each global
|
|
|
|
variable, it calls the runtime asan library function
|
|
|
|
__asan_register_globals_with an instance of this type:
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|
|
|
|
struct __asan_global
|
|
|
|
{
|
|
|
|
// Address of the beginning of the global variable.
|
|
|
|
const void *__beg;
|
|
|
|
|
|
|
|
// Initial size of the global variable.
|
|
|
|
uptr __size;
|
|
|
|
|
|
|
|
// Size of the global variable + size of the red zone. This
|
|
|
|
// size is 32 bytes aligned.
|
|
|
|
uptr __size_with_redzone;
|
|
|
|
|
|
|
|
// Name of the global variable.
|
|
|
|
const void *__name;
|
|
|
|
|
2013-11-04 22:33:31 +01:00
|
|
|
// Name of the module where the global variable is declared.
|
|
|
|
const void *__module_name;
|
|
|
|
|
2013-11-22 21:04:45 +01:00
|
|
|
// 1 if it has dynamic initialization, 0 otherwise.
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
uptr __has_dynamic_init;
|
2014-09-23 19:59:53 +02:00
|
|
|
|
|
|
|
// A pointer to struct that contains source location, could be NULL.
|
|
|
|
__asan_global_source_location *__location;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
}
|
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
A destructor function that calls the runtime asan library function
|
|
|
|
_asan_unregister_globals is also installed. */
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2014-10-28 10:46:29 +01:00
|
|
|
static unsigned HOST_WIDE_INT asan_shadow_offset_value;
|
|
|
|
static bool asan_shadow_offset_computed;
|
2015-04-22 13:44:26 +02:00
|
|
|
static vec<char *> sanitized_sections;
|
2014-10-28 10:46:29 +01:00
|
|
|
|
2016-11-07 11:23:38 +01:00
|
|
|
/* Set of variable declarations that are going to be guarded by
|
|
|
|
use-after-scope sanitizer. */
|
|
|
|
|
|
|
|
static hash_set<tree> *asan_handled_variables = NULL;
|
|
|
|
|
|
|
|
hash_set <tree> *asan_used_labels = NULL;
|
|
|
|
|
2014-10-28 10:46:29 +01:00
|
|
|
/* Sets shadow offset to value in string VAL. */
|
|
|
|
|
|
|
|
bool
|
|
|
|
set_asan_shadow_offset (const char *val)
|
|
|
|
{
|
|
|
|
char *endp;
|
2015-06-01 14:37:26 +02:00
|
|
|
|
2014-10-28 10:46:29 +01:00
|
|
|
errno = 0;
|
|
|
|
#ifdef HAVE_LONG_LONG
|
|
|
|
asan_shadow_offset_value = strtoull (val, &endp, 0);
|
|
|
|
#else
|
|
|
|
asan_shadow_offset_value = strtoul (val, &endp, 0);
|
|
|
|
#endif
|
|
|
|
if (!(*val != '\0' && *endp == '\0' && errno == 0))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
asan_shadow_offset_computed = true;
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2015-04-17 09:51:02 +02:00
|
|
|
/* Set list of user-defined sections that need to be sanitized. */
|
|
|
|
|
|
|
|
void
|
2015-04-22 13:44:26 +02:00
|
|
|
set_sanitized_sections (const char *sections)
|
2015-04-17 09:51:02 +02:00
|
|
|
{
|
2015-04-22 13:44:26 +02:00
|
|
|
char *pat;
|
|
|
|
unsigned i;
|
|
|
|
FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
|
|
|
|
free (pat);
|
|
|
|
sanitized_sections.truncate (0);
|
|
|
|
|
|
|
|
for (const char *s = sections; *s; )
|
|
|
|
{
|
|
|
|
const char *end;
|
|
|
|
for (end = s; *end && *end != ','; ++end);
|
|
|
|
size_t len = end - s;
|
|
|
|
sanitized_sections.safe_push (xstrndup (s, len));
|
|
|
|
s = *end ? end + 1 : end;
|
|
|
|
}
|
2015-04-17 09:51:02 +02:00
|
|
|
}
|
|
|
|
|
2016-12-13 10:14:47 +01:00
|
|
|
bool
|
|
|
|
asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
|
|
|
|
{
|
|
|
|
return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
|
|
|
|
&& tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
|
|
|
|
}
|
|
|
|
|
2016-11-07 11:23:38 +01:00
|
|
|
bool
|
|
|
|
asan_sanitize_stack_p (void)
|
|
|
|
{
|
|
|
|
return ((flag_sanitize & SANITIZE_ADDRESS)
|
|
|
|
&& ASAN_STACK
|
|
|
|
&& !asan_no_sanitize_address_p ());
|
|
|
|
}
|
|
|
|
|
2015-04-17 09:51:02 +02:00
|
|
|
/* Checks whether section SEC should be sanitized. */
|
|
|
|
|
|
|
|
static bool
|
|
|
|
section_sanitized_p (const char *sec)
|
|
|
|
{
|
2015-04-22 13:44:26 +02:00
|
|
|
char *pat;
|
|
|
|
unsigned i;
|
|
|
|
FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
|
|
|
|
if (fnmatch (pat, sec, FNM_PERIOD) == 0)
|
|
|
|
return true;
|
2015-04-17 09:51:02 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2014-10-28 10:46:29 +01:00
|
|
|
/* Returns Asan shadow offset. */
|
|
|
|
|
|
|
|
static unsigned HOST_WIDE_INT
|
|
|
|
asan_shadow_offset ()
|
|
|
|
{
|
|
|
|
if (!asan_shadow_offset_computed)
|
|
|
|
{
|
|
|
|
asan_shadow_offset_computed = true;
|
|
|
|
asan_shadow_offset_value = targetm.asan_shadow_offset ();
|
|
|
|
}
|
|
|
|
return asan_shadow_offset_value;
|
|
|
|
}
|
|
|
|
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
alias_set_type asan_shadow_set = -1;
|
2012-11-12 16:51:13 +01:00
|
|
|
|
2016-11-07 11:23:38 +01:00
|
|
|
/* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
alias set is used for all shadow memory accesses. */
|
2016-11-07 11:23:38 +01:00
|
|
|
static GTY(()) tree shadow_ptr_types[3];
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
|
2013-11-28 09:18:59 +01:00
|
|
|
/* Decl for __asan_option_detect_stack_use_after_return. */
|
|
|
|
static GTY(()) tree asan_detect_stack_use_after_return;
|
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
/* Hashtable support for memory references used by gimple
|
|
|
|
statements. */
|
|
|
|
|
|
|
|
/* This type represents a reference to a memory region. */
|
|
|
|
struct asan_mem_ref
|
|
|
|
{
|
2013-07-21 20:31:07 +02:00
|
|
|
/* The expression of the beginning of the memory region. */
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
tree start;
|
|
|
|
|
2014-05-30 20:37:05 +02:00
|
|
|
/* The size of the access. */
|
|
|
|
HOST_WIDE_INT access_size;
|
2015-06-01 14:37:26 +02:00
|
|
|
};
|
|
|
|
|
Share memory blocks between pool allocators
gcc/
* Makefile.in: Add memory-block.cc
(pool_allocator::initialize): Use fixed block size.
(pool_allocator::release): Use memory_block_pool.
(pool_allocator::allocate): Likewise.
* asan.c (asan_mem_ref_pool): Adjust to use common block size in all
object pools.
* cfg.c (initialize_original_copy_tables): Likewise.
* cselib.c (elt_list_pool, elt_loc_list_pool,
cselib_val_pool): Likewise.
* df-problems.c (df_chain_alloc): Likewise.
* df-scan.c (df_scan_alloc): Likewise.
* dse.c (cse_store_info_pool, rtx_store_info_pool,
read_info_type_pool, insn_info_type_pool, bb_info_pool,
group_info_pool, deferred_change_pool): Likewise.
* et-forest.c (et_nodes, et_occurrences): Likewise.
* ipa-cp.c (ipcp_cst_values_pool, ipcp_sources_pool,
ipcp_agg_lattice_pool): Likewise.
* ipa-inline-analysis.c (edge_predicate_pool): Likewise.
* ipa-profile.c (histogram_pool): Likewise.
* ipa-prop.c (ipa_refdesc_pool): Likewise.
* ira-build.c (live_range_pool, allocno_pool, object_pool,
initiate_cost_vectors, pref_pool, copy_pool): Likewise.
* ira-color.c (update_cost_record_pool): Likewise.
* lra-lives.c (lra_live_range_pool): Likewise.
* lra.c (lra_insn_reg_pool, lra_copy_pool): Likewise.
* memory-block.cc: New file.
* memory-block.h: New file.
* regcprop.c (queued_debug_insn_change_pool): Use common block size.
* sched-deps.c (sched_deps_init): Likewise.
* sel-sched-ir.c (sched_lists_pool): Likewise.
* stmt.c (expand_case, expand_sjlj_dispatch_table): Likewise.
* tree-sra.c (access_pool): Likewise.
* tree-ssa-math-opts.c (pass_cse_reciprocals::execute): Likewise.
* tree-ssa-pre.c (pre_expr_pool, bitmap_set_pool): Likewise.
* tree-ssa-reassoc.c (operand_entry_pool): Likewise.
* tree-ssa-sccvn.c (allocate_vn_table): Likewise.
* tree-ssa-strlen.c (strinfo_pool): Likewise.
* tree-ssa-structalias.c (variable_info_pool): Likewise.
* var-tracking.c (attrs_def_pool, var_pool, valvar_pool,
location_chain_pool, shared_hash_pool, loc_exp_dep_pool): Likewise.
gcc/c-family/
* c-format.c (check_format_arg): Adjust to use common block size in all
object pools.
From-SVN: r227817
2015-09-16 02:56:54 +02:00
|
|
|
object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
|
|
|
/* Initializes an instance of asan_mem_ref. */
|
|
|
|
|
|
|
|
static void
|
2014-05-30 20:37:05 +02:00
|
|
|
asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
|
|
|
ref->start = start;
|
|
|
|
ref->access_size = access_size;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Allocates memory for an instance of asan_mem_ref into the memory
|
|
|
|
pool returned by asan_mem_ref_get_alloc_pool and initialize it.
|
|
|
|
START is the address of (or the expression pointing to) the
|
|
|
|
beginning of memory reference. ACCESS_SIZE is the size of the
|
|
|
|
access to the referenced memory. */
|
|
|
|
|
|
|
|
static asan_mem_ref*
|
2014-05-30 20:37:05 +02:00
|
|
|
asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
2015-07-16 13:26:05 +02:00
|
|
|
asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
|
|
|
asan_mem_ref_init (ref, start, access_size);
|
|
|
|
return ref;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* This builds and returns a pointer to the end of the memory region
|
|
|
|
that starts at START and of length LEN. */
|
|
|
|
|
|
|
|
tree
|
|
|
|
asan_mem_ref_get_end (tree start, tree len)
|
|
|
|
{
|
|
|
|
if (len == NULL_TREE || integer_zerop (len))
|
|
|
|
return start;
|
|
|
|
|
2014-09-01 09:47:37 +02:00
|
|
|
if (!ptrofftype_p (len))
|
|
|
|
len = convert_to_ptrofftype (len);
|
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Return a tree expression that represents the end of the referenced
|
|
|
|
memory region. Beware that this function can actually build a new
|
|
|
|
tree expression. */
|
|
|
|
|
|
|
|
tree
|
|
|
|
asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
|
|
|
|
{
|
|
|
|
return asan_mem_ref_get_end (ref->start, len);
|
|
|
|
}
|
|
|
|
|
2015-06-25 19:06:13 +02:00
|
|
|
struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
remove need for store_values_directly
This switches all hash_table users to use the layout that stores
elements of type value_type in the hash table instead of the one storing
value_type *. Since it becomes unused support for the value_type *
layout is removed.
gcc/
* hash-table.h: Remove version of hash_table that stored value_type *.
* asan.c, attribs.c, bitmap.c, cfg.c, cgraph.h, config/arm/arm.c,
config/i386/winnt.c, config/ia64/ia64.c, config/mips/mips.c,
config/sol2.c, coverage.c, cselib.c, dse.c, dwarf2cfi.c,
dwarf2out.c, except.c, gcse.c, genmatch.c, ggc-common.c,
gimple-ssa-strength-reduction.c, gimplify.c, haifa-sched.c,
hard-reg-set.h, hash-map.h, hash-set.h, ipa-devirt.c, ipa-icf.h,
ipa-profile.c, ira-color.c, ira-costs.c, loop-invariant.c,
loop-iv.c, loop-unroll.c, lto-streamer.h, plugin.c, postreload-gcse.c,
reginfo.c, statistics.c, store-motion.c, trans-mem.c, tree-cfg.c,
tree-eh.c, tree-hasher.h, tree-into-ssa.c, tree-parloops.c,
tree-sra.c, tree-ssa-coalesce.c, tree-ssa-dom.c, tree-ssa-live.c,
tree-ssa-loop-im.c, tree-ssa-loop-ivopts.c, tree-ssa-phiopt.c,
tree-ssa-pre.c, tree-ssa-reassoc.c, tree-ssa-sccvn.c,
tree-ssa-structalias.c, tree-ssa-tail-merge.c,
tree-ssa-threadupdate.c, tree-vectorizer.c, tree-vectorizer.h,
valtrack.h, var-tracking.c, vtable-verify.c, vtable-verify.h: Adjust.
libcc1/
* plugin.cc: Adjust for hash_table changes.
gcc/java/
* jcf-io.c: Adjust for hash_table changes.
gcc/lto/
* lto.c: Adjust for hash_table changes.
gcc/objc/
* objc-act.c: Adjust for hash_table changes.
From-SVN: r222213
2015-04-18 20:13:18 +02:00
|
|
|
static inline hashval_t hash (const asan_mem_ref *);
|
|
|
|
static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
/* Hash a memory reference. */
|
|
|
|
|
|
|
|
inline hashval_t
|
|
|
|
asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
|
|
|
|
{
|
2014-10-28 13:36:54 +01:00
|
|
|
return iterative_hash_expr (mem_ref->start, 0);
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Compare two memory references. We accept the length of either
|
|
|
|
memory references to be NULL_TREE. */
|
|
|
|
|
|
|
|
inline bool
|
|
|
|
asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
|
|
|
|
const asan_mem_ref *m2)
|
|
|
|
{
|
2014-10-28 13:36:54 +01:00
|
|
|
return operand_equal_p (m1->start, m2->start, 0);
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
}
|
|
|
|
|
Remove a layer of indirection from hash_table
gcc/
* hash-table.h: Remove a layer of indirection from hash_table so that
it contains the hash table's data instead of a pointer to the data.
* alloc-pool.c, asan.c, attribs.c, bitmap.c, cfg.c,
config/arm/arm.c, config/i386/winnt.c, config/ia64/ia64.c,
config/mips/mips.c, config/sol2.c, coverage.c, cselib.c,
data-streamer-out.c, dse.c, dwarf2cfi.c, dwarf2out.c, except.c,
fold-const.c, gcse.c, ggc-common.c,
gimple-ssa-strength-reduction.c, gimplify.c,
graphite-clast-to-gimple.c, graphite-dependences.c,
graphite-htab.h, graphite.c, haifa-sched.c, ipa-devirt.c,
ipa-profile.c, ira-color.c, ira-costs.c, loop-invariant.c,
loop-iv.c, loop-unroll.c, lto-streamer-in.c, lto-streamer-out.c,
lto-streamer.c, lto-streamer.h, passes.c, plugin.c,
postreload-gcse.c, sese.c, statistics.c, store-motion.c,
trans-mem.c, tree-browser.c, tree-cfg.c, tree-complex.c,
tree-eh.c, tree-into-ssa.c, tree-parloops.c, tree-sra.c,
tree-ssa-ccp.c, tree-ssa-coalesce.c, tree-ssa-dom.c,
tree-ssa-live.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivopts.c, tree-ssa-phiopt.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sccvn.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-tail-merge.c,
tree-ssa-threadupdate.c, tree-ssa-uncprop.c,
tree-vect-data-refs.c, tree-vect-loop.c, tree-vectorizer.c,
tree-vectorizer.h, valtrack.c, valtrack.h, var-tracking.c,
vtable-verify.c, vtable-verify.h: Adjust.
gcc/c/
* c-decl.c: Adjust.
gcc/cp/
* class.c, semantics.c, tree.c, vtable-class-hierarchy.c:
Adjust.
gcc/java/
* jcf-io.c: Adjust.
gcc/lto/
* lto.c: Adjust.
gcc/objc/
* objc-act.c: Adjust.
From-SVN: r211936
2014-06-24 15:21:35 +02:00
|
|
|
static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
|
|
|
/* Returns a reference to the hash table containing memory references.
|
|
|
|
This function ensures that the hash table is created. Note that
|
|
|
|
this hash table is updated by the function
|
|
|
|
update_mem_ref_hash_table. */
|
|
|
|
|
Remove a layer of indirection from hash_table
gcc/
* hash-table.h: Remove a layer of indirection from hash_table so that
it contains the hash table's data instead of a pointer to the data.
* alloc-pool.c, asan.c, attribs.c, bitmap.c, cfg.c,
config/arm/arm.c, config/i386/winnt.c, config/ia64/ia64.c,
config/mips/mips.c, config/sol2.c, coverage.c, cselib.c,
data-streamer-out.c, dse.c, dwarf2cfi.c, dwarf2out.c, except.c,
fold-const.c, gcse.c, ggc-common.c,
gimple-ssa-strength-reduction.c, gimplify.c,
graphite-clast-to-gimple.c, graphite-dependences.c,
graphite-htab.h, graphite.c, haifa-sched.c, ipa-devirt.c,
ipa-profile.c, ira-color.c, ira-costs.c, loop-invariant.c,
loop-iv.c, loop-unroll.c, lto-streamer-in.c, lto-streamer-out.c,
lto-streamer.c, lto-streamer.h, passes.c, plugin.c,
postreload-gcse.c, sese.c, statistics.c, store-motion.c,
trans-mem.c, tree-browser.c, tree-cfg.c, tree-complex.c,
tree-eh.c, tree-into-ssa.c, tree-parloops.c, tree-sra.c,
tree-ssa-ccp.c, tree-ssa-coalesce.c, tree-ssa-dom.c,
tree-ssa-live.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivopts.c, tree-ssa-phiopt.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sccvn.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-tail-merge.c,
tree-ssa-threadupdate.c, tree-ssa-uncprop.c,
tree-vect-data-refs.c, tree-vect-loop.c, tree-vectorizer.c,
tree-vectorizer.h, valtrack.c, valtrack.h, var-tracking.c,
vtable-verify.c, vtable-verify.h: Adjust.
gcc/c/
* c-decl.c: Adjust.
gcc/cp/
* class.c, semantics.c, tree.c, vtable-class-hierarchy.c:
Adjust.
gcc/java/
* jcf-io.c: Adjust.
gcc/lto/
* lto.c: Adjust.
gcc/objc/
* objc-act.c: Adjust.
From-SVN: r211936
2014-06-24 15:21:35 +02:00
|
|
|
static hash_table<asan_mem_ref_hasher> *
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
get_mem_ref_hash_table ()
|
|
|
|
{
|
Remove a layer of indirection from hash_table
gcc/
* hash-table.h: Remove a layer of indirection from hash_table so that
it contains the hash table's data instead of a pointer to the data.
* alloc-pool.c, asan.c, attribs.c, bitmap.c, cfg.c,
config/arm/arm.c, config/i386/winnt.c, config/ia64/ia64.c,
config/mips/mips.c, config/sol2.c, coverage.c, cselib.c,
data-streamer-out.c, dse.c, dwarf2cfi.c, dwarf2out.c, except.c,
fold-const.c, gcse.c, ggc-common.c,
gimple-ssa-strength-reduction.c, gimplify.c,
graphite-clast-to-gimple.c, graphite-dependences.c,
graphite-htab.h, graphite.c, haifa-sched.c, ipa-devirt.c,
ipa-profile.c, ira-color.c, ira-costs.c, loop-invariant.c,
loop-iv.c, loop-unroll.c, lto-streamer-in.c, lto-streamer-out.c,
lto-streamer.c, lto-streamer.h, passes.c, plugin.c,
postreload-gcse.c, sese.c, statistics.c, store-motion.c,
trans-mem.c, tree-browser.c, tree-cfg.c, tree-complex.c,
tree-eh.c, tree-into-ssa.c, tree-parloops.c, tree-sra.c,
tree-ssa-ccp.c, tree-ssa-coalesce.c, tree-ssa-dom.c,
tree-ssa-live.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivopts.c, tree-ssa-phiopt.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sccvn.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-tail-merge.c,
tree-ssa-threadupdate.c, tree-ssa-uncprop.c,
tree-vect-data-refs.c, tree-vect-loop.c, tree-vectorizer.c,
tree-vectorizer.h, valtrack.c, valtrack.h, var-tracking.c,
vtable-verify.c, vtable-verify.h: Adjust.
gcc/c/
* c-decl.c: Adjust.
gcc/cp/
* class.c, semantics.c, tree.c, vtable-class-hierarchy.c:
Adjust.
gcc/java/
* jcf-io.c: Adjust.
gcc/lto/
* lto.c: Adjust.
gcc/objc/
* objc-act.c: Adjust.
From-SVN: r211936
2014-06-24 15:21:35 +02:00
|
|
|
if (!asan_mem_ref_ht)
|
|
|
|
asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
|
|
|
return asan_mem_ref_ht;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Clear all entries from the memory references hash table. */
|
|
|
|
|
|
|
|
static void
|
|
|
|
empty_mem_ref_hash_table ()
|
|
|
|
{
|
Remove a layer of indirection from hash_table
gcc/
* hash-table.h: Remove a layer of indirection from hash_table so that
it contains the hash table's data instead of a pointer to the data.
* alloc-pool.c, asan.c, attribs.c, bitmap.c, cfg.c,
config/arm/arm.c, config/i386/winnt.c, config/ia64/ia64.c,
config/mips/mips.c, config/sol2.c, coverage.c, cselib.c,
data-streamer-out.c, dse.c, dwarf2cfi.c, dwarf2out.c, except.c,
fold-const.c, gcse.c, ggc-common.c,
gimple-ssa-strength-reduction.c, gimplify.c,
graphite-clast-to-gimple.c, graphite-dependences.c,
graphite-htab.h, graphite.c, haifa-sched.c, ipa-devirt.c,
ipa-profile.c, ira-color.c, ira-costs.c, loop-invariant.c,
loop-iv.c, loop-unroll.c, lto-streamer-in.c, lto-streamer-out.c,
lto-streamer.c, lto-streamer.h, passes.c, plugin.c,
postreload-gcse.c, sese.c, statistics.c, store-motion.c,
trans-mem.c, tree-browser.c, tree-cfg.c, tree-complex.c,
tree-eh.c, tree-into-ssa.c, tree-parloops.c, tree-sra.c,
tree-ssa-ccp.c, tree-ssa-coalesce.c, tree-ssa-dom.c,
tree-ssa-live.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivopts.c, tree-ssa-phiopt.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sccvn.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-tail-merge.c,
tree-ssa-threadupdate.c, tree-ssa-uncprop.c,
tree-vect-data-refs.c, tree-vect-loop.c, tree-vectorizer.c,
tree-vectorizer.h, valtrack.c, valtrack.h, var-tracking.c,
vtable-verify.c, vtable-verify.h: Adjust.
gcc/c/
* c-decl.c: Adjust.
gcc/cp/
* class.c, semantics.c, tree.c, vtable-class-hierarchy.c:
Adjust.
gcc/java/
* jcf-io.c: Adjust.
gcc/lto/
* lto.c: Adjust.
gcc/objc/
* objc-act.c: Adjust.
From-SVN: r211936
2014-06-24 15:21:35 +02:00
|
|
|
if (asan_mem_ref_ht)
|
|
|
|
asan_mem_ref_ht->empty ();
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Free the memory references hash table. */
|
|
|
|
|
|
|
|
static void
|
|
|
|
free_mem_ref_resources ()
|
|
|
|
{
|
Remove a layer of indirection from hash_table
gcc/
* hash-table.h: Remove a layer of indirection from hash_table so that
it contains the hash table's data instead of a pointer to the data.
* alloc-pool.c, asan.c, attribs.c, bitmap.c, cfg.c,
config/arm/arm.c, config/i386/winnt.c, config/ia64/ia64.c,
config/mips/mips.c, config/sol2.c, coverage.c, cselib.c,
data-streamer-out.c, dse.c, dwarf2cfi.c, dwarf2out.c, except.c,
fold-const.c, gcse.c, ggc-common.c,
gimple-ssa-strength-reduction.c, gimplify.c,
graphite-clast-to-gimple.c, graphite-dependences.c,
graphite-htab.h, graphite.c, haifa-sched.c, ipa-devirt.c,
ipa-profile.c, ira-color.c, ira-costs.c, loop-invariant.c,
loop-iv.c, loop-unroll.c, lto-streamer-in.c, lto-streamer-out.c,
lto-streamer.c, lto-streamer.h, passes.c, plugin.c,
postreload-gcse.c, sese.c, statistics.c, store-motion.c,
trans-mem.c, tree-browser.c, tree-cfg.c, tree-complex.c,
tree-eh.c, tree-into-ssa.c, tree-parloops.c, tree-sra.c,
tree-ssa-ccp.c, tree-ssa-coalesce.c, tree-ssa-dom.c,
tree-ssa-live.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivopts.c, tree-ssa-phiopt.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sccvn.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-tail-merge.c,
tree-ssa-threadupdate.c, tree-ssa-uncprop.c,
tree-vect-data-refs.c, tree-vect-loop.c, tree-vectorizer.c,
tree-vectorizer.h, valtrack.c, valtrack.h, var-tracking.c,
vtable-verify.c, vtable-verify.h: Adjust.
gcc/c/
* c-decl.c: Adjust.
gcc/cp/
* class.c, semantics.c, tree.c, vtable-class-hierarchy.c:
Adjust.
gcc/java/
* jcf-io.c: Adjust.
gcc/lto/
* lto.c: Adjust.
gcc/objc/
* objc-act.c: Adjust.
From-SVN: r211936
2014-06-24 15:21:35 +02:00
|
|
|
delete asan_mem_ref_ht;
|
|
|
|
asan_mem_ref_ht = NULL;
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
2015-07-16 13:26:05 +02:00
|
|
|
asan_mem_ref_pool.release ();
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Return true iff the memory reference REF has been instrumented. */
|
|
|
|
|
|
|
|
static bool
|
2014-05-30 20:37:05 +02:00
|
|
|
has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
|
|
|
asan_mem_ref r;
|
|
|
|
asan_mem_ref_init (&r, ref, access_size);
|
|
|
|
|
2014-10-28 13:36:54 +01:00
|
|
|
asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
|
|
|
|
return saved_ref && saved_ref->access_size >= access_size;
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Return true iff the memory reference REF has been instrumented. */
|
|
|
|
|
|
|
|
static bool
|
|
|
|
has_mem_ref_been_instrumented (const asan_mem_ref *ref)
|
|
|
|
{
|
|
|
|
return has_mem_ref_been_instrumented (ref->start, ref->access_size);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Return true iff access to memory region starting at REF and of
|
|
|
|
length LEN has been instrumented. */
|
|
|
|
|
|
|
|
static bool
|
|
|
|
has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
|
|
|
|
{
|
2014-10-28 13:36:54 +01:00
|
|
|
HOST_WIDE_INT size_in_bytes
|
|
|
|
= tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
2014-10-28 13:36:54 +01:00
|
|
|
return size_in_bytes != -1
|
|
|
|
&& has_mem_ref_been_instrumented (ref->start, size_in_bytes);
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Set REF to the memory reference present in a gimple assignment
|
|
|
|
ASSIGNMENT. Return true upon successful completion, false
|
|
|
|
otherwise. */
|
|
|
|
|
|
|
|
static bool
|
2014-11-19 18:00:54 +01:00
|
|
|
get_mem_ref_of_assignment (const gassign *assignment,
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
asan_mem_ref *ref,
|
|
|
|
bool *ref_is_store)
|
|
|
|
{
|
|
|
|
gcc_assert (gimple_assign_single_p (assignment));
|
|
|
|
|
2013-04-02 20:25:36 +02:00
|
|
|
if (gimple_store_p (assignment)
|
|
|
|
&& !gimple_clobber_p (assignment))
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
|
|
|
ref->start = gimple_assign_lhs (assignment);
|
|
|
|
*ref_is_store = true;
|
|
|
|
}
|
|
|
|
else if (gimple_assign_load_p (assignment))
|
|
|
|
{
|
|
|
|
ref->start = gimple_assign_rhs1 (assignment);
|
|
|
|
*ref_is_store = false;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
return false;
|
|
|
|
|
|
|
|
ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Return the memory references contained in a gimple statement
|
|
|
|
representing a builtin call that has to do with memory access. */
|
|
|
|
|
|
|
|
static bool
|
2014-11-19 18:00:54 +01:00
|
|
|
get_mem_refs_of_builtin_call (const gcall *call,
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
asan_mem_ref *src0,
|
|
|
|
tree *src0_len,
|
|
|
|
bool *src0_is_store,
|
|
|
|
asan_mem_ref *src1,
|
|
|
|
tree *src1_len,
|
|
|
|
bool *src1_is_store,
|
|
|
|
asan_mem_ref *dst,
|
|
|
|
tree *dst_len,
|
|
|
|
bool *dst_is_store,
|
2014-10-28 13:36:54 +01:00
|
|
|
bool *dest_is_deref,
|
|
|
|
bool *intercepted_p)
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
|
|
|
gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
|
|
|
|
|
|
|
|
tree callee = gimple_call_fndecl (call);
|
|
|
|
tree source0 = NULL_TREE, source1 = NULL_TREE,
|
|
|
|
dest = NULL_TREE, len = NULL_TREE;
|
|
|
|
bool is_store = true, got_reference_p = false;
|
2014-05-30 20:37:05 +02:00
|
|
|
HOST_WIDE_INT access_size = 1;
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
2014-10-28 13:36:54 +01:00
|
|
|
*intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
|
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
switch (DECL_FUNCTION_CODE (callee))
|
|
|
|
{
|
|
|
|
/* (s, s, n) style memops. */
|
|
|
|
case BUILT_IN_BCMP:
|
|
|
|
case BUILT_IN_MEMCMP:
|
|
|
|
source0 = gimple_call_arg (call, 0);
|
|
|
|
source1 = gimple_call_arg (call, 1);
|
|
|
|
len = gimple_call_arg (call, 2);
|
|
|
|
break;
|
|
|
|
|
|
|
|
/* (src, dest, n) style memops. */
|
|
|
|
case BUILT_IN_BCOPY:
|
|
|
|
source0 = gimple_call_arg (call, 0);
|
|
|
|
dest = gimple_call_arg (call, 1);
|
|
|
|
len = gimple_call_arg (call, 2);
|
|
|
|
break;
|
|
|
|
|
|
|
|
/* (dest, src, n) style memops. */
|
|
|
|
case BUILT_IN_MEMCPY:
|
|
|
|
case BUILT_IN_MEMCPY_CHK:
|
|
|
|
case BUILT_IN_MEMMOVE:
|
|
|
|
case BUILT_IN_MEMMOVE_CHK:
|
|
|
|
case BUILT_IN_MEMPCPY:
|
|
|
|
case BUILT_IN_MEMPCPY_CHK:
|
|
|
|
dest = gimple_call_arg (call, 0);
|
|
|
|
source0 = gimple_call_arg (call, 1);
|
|
|
|
len = gimple_call_arg (call, 2);
|
|
|
|
break;
|
|
|
|
|
|
|
|
/* (dest, n) style memops. */
|
|
|
|
case BUILT_IN_BZERO:
|
|
|
|
dest = gimple_call_arg (call, 0);
|
|
|
|
len = gimple_call_arg (call, 1);
|
|
|
|
break;
|
|
|
|
|
|
|
|
/* (dest, x, n) style memops*/
|
|
|
|
case BUILT_IN_MEMSET:
|
|
|
|
case BUILT_IN_MEMSET_CHK:
|
|
|
|
dest = gimple_call_arg (call, 0);
|
|
|
|
len = gimple_call_arg (call, 2);
|
|
|
|
break;
|
|
|
|
|
|
|
|
case BUILT_IN_STRLEN:
|
|
|
|
source0 = gimple_call_arg (call, 0);
|
|
|
|
len = gimple_call_lhs (call);
|
|
|
|
break ;
|
|
|
|
|
|
|
|
/* And now the __atomic* and __sync builtins.
|
|
|
|
These are handled differently from the classical memory memory
|
|
|
|
access builtins above. */
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_LOAD_1:
|
|
|
|
case BUILT_IN_ATOMIC_LOAD_2:
|
|
|
|
case BUILT_IN_ATOMIC_LOAD_4:
|
|
|
|
case BUILT_IN_ATOMIC_LOAD_8:
|
|
|
|
case BUILT_IN_ATOMIC_LOAD_16:
|
|
|
|
is_store = false;
|
|
|
|
/* fall through. */
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_ADD_1:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_ADD_2:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_ADD_4:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_ADD_8:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_ADD_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_SUB_1:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_SUB_2:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_SUB_4:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_SUB_8:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_SUB_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_OR_1:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_OR_2:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_OR_4:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_OR_8:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_OR_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_AND_1:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_AND_2:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_AND_4:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_AND_8:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_AND_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_XOR_1:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_XOR_2:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_XOR_4:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_XOR_8:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_XOR_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_NAND_1:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_NAND_2:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_NAND_4:
|
|
|
|
case BUILT_IN_SYNC_FETCH_AND_NAND_8:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_ADD_AND_FETCH_1:
|
|
|
|
case BUILT_IN_SYNC_ADD_AND_FETCH_2:
|
|
|
|
case BUILT_IN_SYNC_ADD_AND_FETCH_4:
|
|
|
|
case BUILT_IN_SYNC_ADD_AND_FETCH_8:
|
|
|
|
case BUILT_IN_SYNC_ADD_AND_FETCH_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_SUB_AND_FETCH_1:
|
|
|
|
case BUILT_IN_SYNC_SUB_AND_FETCH_2:
|
|
|
|
case BUILT_IN_SYNC_SUB_AND_FETCH_4:
|
|
|
|
case BUILT_IN_SYNC_SUB_AND_FETCH_8:
|
|
|
|
case BUILT_IN_SYNC_SUB_AND_FETCH_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_OR_AND_FETCH_1:
|
|
|
|
case BUILT_IN_SYNC_OR_AND_FETCH_2:
|
|
|
|
case BUILT_IN_SYNC_OR_AND_FETCH_4:
|
|
|
|
case BUILT_IN_SYNC_OR_AND_FETCH_8:
|
|
|
|
case BUILT_IN_SYNC_OR_AND_FETCH_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_AND_AND_FETCH_1:
|
|
|
|
case BUILT_IN_SYNC_AND_AND_FETCH_2:
|
|
|
|
case BUILT_IN_SYNC_AND_AND_FETCH_4:
|
|
|
|
case BUILT_IN_SYNC_AND_AND_FETCH_8:
|
|
|
|
case BUILT_IN_SYNC_AND_AND_FETCH_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_XOR_AND_FETCH_1:
|
|
|
|
case BUILT_IN_SYNC_XOR_AND_FETCH_2:
|
|
|
|
case BUILT_IN_SYNC_XOR_AND_FETCH_4:
|
|
|
|
case BUILT_IN_SYNC_XOR_AND_FETCH_8:
|
|
|
|
case BUILT_IN_SYNC_XOR_AND_FETCH_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_NAND_AND_FETCH_1:
|
|
|
|
case BUILT_IN_SYNC_NAND_AND_FETCH_2:
|
|
|
|
case BUILT_IN_SYNC_NAND_AND_FETCH_4:
|
|
|
|
case BUILT_IN_SYNC_NAND_AND_FETCH_8:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
|
|
|
|
case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
|
|
|
|
case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
|
|
|
|
case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
|
|
|
|
case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
|
|
|
|
case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
|
|
|
|
case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
|
|
|
|
case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
|
|
|
|
case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
|
|
|
|
case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
|
|
|
|
case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
|
|
|
|
case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
|
|
|
|
case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
|
|
|
|
|
|
|
|
case BUILT_IN_SYNC_LOCK_RELEASE_1:
|
|
|
|
case BUILT_IN_SYNC_LOCK_RELEASE_2:
|
|
|
|
case BUILT_IN_SYNC_LOCK_RELEASE_4:
|
|
|
|
case BUILT_IN_SYNC_LOCK_RELEASE_8:
|
|
|
|
case BUILT_IN_SYNC_LOCK_RELEASE_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_EXCHANGE_1:
|
|
|
|
case BUILT_IN_ATOMIC_EXCHANGE_2:
|
|
|
|
case BUILT_IN_ATOMIC_EXCHANGE_4:
|
|
|
|
case BUILT_IN_ATOMIC_EXCHANGE_8:
|
|
|
|
case BUILT_IN_ATOMIC_EXCHANGE_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
|
|
|
|
case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
|
|
|
|
case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
|
|
|
|
case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
|
|
|
|
case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_STORE_1:
|
|
|
|
case BUILT_IN_ATOMIC_STORE_2:
|
|
|
|
case BUILT_IN_ATOMIC_STORE_4:
|
|
|
|
case BUILT_IN_ATOMIC_STORE_8:
|
|
|
|
case BUILT_IN_ATOMIC_STORE_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_ADD_FETCH_1:
|
|
|
|
case BUILT_IN_ATOMIC_ADD_FETCH_2:
|
|
|
|
case BUILT_IN_ATOMIC_ADD_FETCH_4:
|
|
|
|
case BUILT_IN_ATOMIC_ADD_FETCH_8:
|
|
|
|
case BUILT_IN_ATOMIC_ADD_FETCH_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_SUB_FETCH_1:
|
|
|
|
case BUILT_IN_ATOMIC_SUB_FETCH_2:
|
|
|
|
case BUILT_IN_ATOMIC_SUB_FETCH_4:
|
|
|
|
case BUILT_IN_ATOMIC_SUB_FETCH_8:
|
|
|
|
case BUILT_IN_ATOMIC_SUB_FETCH_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_AND_FETCH_1:
|
|
|
|
case BUILT_IN_ATOMIC_AND_FETCH_2:
|
|
|
|
case BUILT_IN_ATOMIC_AND_FETCH_4:
|
|
|
|
case BUILT_IN_ATOMIC_AND_FETCH_8:
|
|
|
|
case BUILT_IN_ATOMIC_AND_FETCH_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_NAND_FETCH_1:
|
|
|
|
case BUILT_IN_ATOMIC_NAND_FETCH_2:
|
|
|
|
case BUILT_IN_ATOMIC_NAND_FETCH_4:
|
|
|
|
case BUILT_IN_ATOMIC_NAND_FETCH_8:
|
|
|
|
case BUILT_IN_ATOMIC_NAND_FETCH_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_XOR_FETCH_1:
|
|
|
|
case BUILT_IN_ATOMIC_XOR_FETCH_2:
|
|
|
|
case BUILT_IN_ATOMIC_XOR_FETCH_4:
|
|
|
|
case BUILT_IN_ATOMIC_XOR_FETCH_8:
|
|
|
|
case BUILT_IN_ATOMIC_XOR_FETCH_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_OR_FETCH_1:
|
|
|
|
case BUILT_IN_ATOMIC_OR_FETCH_2:
|
|
|
|
case BUILT_IN_ATOMIC_OR_FETCH_4:
|
|
|
|
case BUILT_IN_ATOMIC_OR_FETCH_8:
|
|
|
|
case BUILT_IN_ATOMIC_OR_FETCH_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_ADD_1:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_ADD_2:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_ADD_4:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_ADD_8:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_ADD_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_SUB_1:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_SUB_2:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_SUB_4:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_SUB_8:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_SUB_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_AND_1:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_AND_2:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_AND_4:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_AND_8:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_AND_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_NAND_1:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_NAND_2:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_NAND_4:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_NAND_8:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_NAND_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_XOR_1:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_XOR_2:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_XOR_4:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_XOR_8:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_XOR_16:
|
|
|
|
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_OR_1:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_OR_2:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_OR_4:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_OR_8:
|
|
|
|
case BUILT_IN_ATOMIC_FETCH_OR_16:
|
|
|
|
{
|
|
|
|
dest = gimple_call_arg (call, 0);
|
|
|
|
/* DEST represents the address of a memory location.
|
|
|
|
instrument_derefs wants the memory location, so lets
|
|
|
|
dereference the address DEST before handing it to
|
|
|
|
instrument_derefs. */
|
|
|
|
if (TREE_CODE (dest) == ADDR_EXPR)
|
|
|
|
dest = TREE_OPERAND (dest, 0);
|
2013-11-07 13:04:45 +01:00
|
|
|
else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
|
|
|
|
dest, build_int_cst (TREE_TYPE (dest), 0));
|
|
|
|
else
|
|
|
|
gcc_unreachable ();
|
|
|
|
|
|
|
|
access_size = int_size_in_bytes (TREE_TYPE (dest));
|
|
|
|
}
|
|
|
|
|
|
|
|
default:
|
|
|
|
/* The other builtins memory access are not instrumented in this
|
|
|
|
function because they either don't have any length parameter,
|
|
|
|
or their length parameter is just a limit. */
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len != NULL_TREE)
|
|
|
|
{
|
|
|
|
if (source0 != NULL_TREE)
|
|
|
|
{
|
|
|
|
src0->start = source0;
|
|
|
|
src0->access_size = access_size;
|
|
|
|
*src0_len = len;
|
|
|
|
*src0_is_store = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (source1 != NULL_TREE)
|
|
|
|
{
|
|
|
|
src1->start = source1;
|
|
|
|
src1->access_size = access_size;
|
|
|
|
*src1_len = len;
|
|
|
|
*src1_is_store = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (dest != NULL_TREE)
|
|
|
|
{
|
|
|
|
dst->start = dest;
|
|
|
|
dst->access_size = access_size;
|
|
|
|
*dst_len = len;
|
|
|
|
*dst_is_store = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
got_reference_p = true;
|
|
|
|
}
|
2013-02-16 10:32:56 +01:00
|
|
|
else if (dest)
|
|
|
|
{
|
|
|
|
dst->start = dest;
|
|
|
|
dst->access_size = access_size;
|
|
|
|
*dst_len = NULL_TREE;
|
|
|
|
*dst_is_store = is_store;
|
|
|
|
*dest_is_deref = true;
|
|
|
|
got_reference_p = true;
|
|
|
|
}
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
2013-02-16 10:32:56 +01:00
|
|
|
return got_reference_p;
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Return true iff a given gimple statement has been instrumented.
|
|
|
|
Note that the statement is "defined" by the memory references it
|
|
|
|
contains. */
|
|
|
|
|
|
|
|
static bool
|
2015-09-20 02:52:59 +02:00
|
|
|
has_stmt_been_instrumented_p (gimple *stmt)
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
|
|
|
if (gimple_assign_single_p (stmt))
|
|
|
|
{
|
|
|
|
bool r_is_store;
|
|
|
|
asan_mem_ref r;
|
|
|
|
asan_mem_ref_init (&r, NULL, 1);
|
|
|
|
|
2014-11-19 18:00:54 +01:00
|
|
|
if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
|
|
|
|
&r_is_store))
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
return has_mem_ref_been_instrumented (&r);
|
|
|
|
}
|
|
|
|
else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
|
|
|
|
{
|
|
|
|
asan_mem_ref src0, src1, dest;
|
|
|
|
asan_mem_ref_init (&src0, NULL, 1);
|
|
|
|
asan_mem_ref_init (&src1, NULL, 1);
|
|
|
|
asan_mem_ref_init (&dest, NULL, 1);
|
|
|
|
|
|
|
|
tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
|
|
|
|
bool src0_is_store = false, src1_is_store = false,
|
2014-10-28 13:36:54 +01:00
|
|
|
dest_is_store = false, dest_is_deref = false, intercepted_p = true;
|
2014-11-19 18:00:54 +01:00
|
|
|
if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
&src0, &src0_len, &src0_is_store,
|
|
|
|
&src1, &src1_len, &src1_is_store,
|
|
|
|
&dest, &dest_len, &dest_is_store,
|
2014-10-28 13:36:54 +01:00
|
|
|
&dest_is_deref, &intercepted_p))
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
|
|
|
if (src0.start != NULL_TREE
|
|
|
|
&& !has_mem_ref_been_instrumented (&src0, src0_len))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
if (src1.start != NULL_TREE
|
|
|
|
&& !has_mem_ref_been_instrumented (&src1, src1_len))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
if (dest.start != NULL_TREE
|
|
|
|
&& !has_mem_ref_been_instrumented (&dest, dest_len))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
2016-02-04 12:50:40 +01:00
|
|
|
else if (is_gimple_call (stmt) && gimple_store_p (stmt))
|
|
|
|
{
|
|
|
|
asan_mem_ref r;
|
|
|
|
asan_mem_ref_init (&r, NULL, 1);
|
|
|
|
|
|
|
|
r.start = gimple_call_lhs (stmt);
|
|
|
|
r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
|
|
|
|
return has_mem_ref_been_instrumented (&r);
|
|
|
|
}
|
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Insert a memory reference into the hash table. */
|
|
|
|
|
|
|
|
static void
|
2014-05-30 20:37:05 +02:00
|
|
|
update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
Remove a layer of indirection from hash_table
gcc/
* hash-table.h: Remove a layer of indirection from hash_table so that
it contains the hash table's data instead of a pointer to the data.
* alloc-pool.c, asan.c, attribs.c, bitmap.c, cfg.c,
config/arm/arm.c, config/i386/winnt.c, config/ia64/ia64.c,
config/mips/mips.c, config/sol2.c, coverage.c, cselib.c,
data-streamer-out.c, dse.c, dwarf2cfi.c, dwarf2out.c, except.c,
fold-const.c, gcse.c, ggc-common.c,
gimple-ssa-strength-reduction.c, gimplify.c,
graphite-clast-to-gimple.c, graphite-dependences.c,
graphite-htab.h, graphite.c, haifa-sched.c, ipa-devirt.c,
ipa-profile.c, ira-color.c, ira-costs.c, loop-invariant.c,
loop-iv.c, loop-unroll.c, lto-streamer-in.c, lto-streamer-out.c,
lto-streamer.c, lto-streamer.h, passes.c, plugin.c,
postreload-gcse.c, sese.c, statistics.c, store-motion.c,
trans-mem.c, tree-browser.c, tree-cfg.c, tree-complex.c,
tree-eh.c, tree-into-ssa.c, tree-parloops.c, tree-sra.c,
tree-ssa-ccp.c, tree-ssa-coalesce.c, tree-ssa-dom.c,
tree-ssa-live.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivopts.c, tree-ssa-phiopt.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sccvn.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-tail-merge.c,
tree-ssa-threadupdate.c, tree-ssa-uncprop.c,
tree-vect-data-refs.c, tree-vect-loop.c, tree-vectorizer.c,
tree-vectorizer.h, valtrack.c, valtrack.h, var-tracking.c,
vtable-verify.c, vtable-verify.h: Adjust.
gcc/c/
* c-decl.c: Adjust.
gcc/cp/
* class.c, semantics.c, tree.c, vtable-class-hierarchy.c:
Adjust.
gcc/java/
* jcf-io.c: Adjust.
gcc/lto/
* lto.c: Adjust.
gcc/objc/
* objc-act.c: Adjust.
From-SVN: r211936
2014-06-24 15:21:35 +02:00
|
|
|
hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
|
|
|
asan_mem_ref r;
|
|
|
|
asan_mem_ref_init (&r, ref, access_size);
|
|
|
|
|
Remove a layer of indirection from hash_table
gcc/
* hash-table.h: Remove a layer of indirection from hash_table so that
it contains the hash table's data instead of a pointer to the data.
* alloc-pool.c, asan.c, attribs.c, bitmap.c, cfg.c,
config/arm/arm.c, config/i386/winnt.c, config/ia64/ia64.c,
config/mips/mips.c, config/sol2.c, coverage.c, cselib.c,
data-streamer-out.c, dse.c, dwarf2cfi.c, dwarf2out.c, except.c,
fold-const.c, gcse.c, ggc-common.c,
gimple-ssa-strength-reduction.c, gimplify.c,
graphite-clast-to-gimple.c, graphite-dependences.c,
graphite-htab.h, graphite.c, haifa-sched.c, ipa-devirt.c,
ipa-profile.c, ira-color.c, ira-costs.c, loop-invariant.c,
loop-iv.c, loop-unroll.c, lto-streamer-in.c, lto-streamer-out.c,
lto-streamer.c, lto-streamer.h, passes.c, plugin.c,
postreload-gcse.c, sese.c, statistics.c, store-motion.c,
trans-mem.c, tree-browser.c, tree-cfg.c, tree-complex.c,
tree-eh.c, tree-into-ssa.c, tree-parloops.c, tree-sra.c,
tree-ssa-ccp.c, tree-ssa-coalesce.c, tree-ssa-dom.c,
tree-ssa-live.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivopts.c, tree-ssa-phiopt.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sccvn.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-tail-merge.c,
tree-ssa-threadupdate.c, tree-ssa-uncprop.c,
tree-vect-data-refs.c, tree-vect-loop.c, tree-vectorizer.c,
tree-vectorizer.h, valtrack.c, valtrack.h, var-tracking.c,
vtable-verify.c, vtable-verify.h: Adjust.
gcc/c/
* c-decl.c: Adjust.
gcc/cp/
* class.c, semantics.c, tree.c, vtable-class-hierarchy.c:
Adjust.
gcc/java/
* jcf-io.c: Adjust.
gcc/lto/
* lto.c: Adjust.
gcc/objc/
* objc-act.c: Adjust.
From-SVN: r211936
2014-06-24 15:21:35 +02:00
|
|
|
asan_mem_ref **slot = ht->find_slot (&r, INSERT);
|
2014-10-28 13:36:54 +01:00
|
|
|
if (*slot == NULL || (*slot)->access_size < access_size)
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
*slot = asan_mem_ref_new (ref, access_size);
|
|
|
|
}
|
|
|
|
|
2012-12-10 13:14:36 +01:00
|
|
|
/* Initialize shadow_ptr_types array. */
|
|
|
|
|
|
|
|
static void
|
|
|
|
asan_init_shadow_ptr_types (void)
|
|
|
|
{
|
|
|
|
asan_shadow_set = new_alias_set ();
|
2016-11-07 11:23:38 +01:00
|
|
|
tree types[3] = { signed_char_type_node, short_integer_type_node,
|
|
|
|
integer_type_node };
|
|
|
|
|
|
|
|
for (unsigned i = 0; i < 3; i++)
|
|
|
|
{
|
|
|
|
shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
|
|
|
|
TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
|
|
|
|
shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
|
|
|
|
}
|
|
|
|
|
2012-12-10 13:14:36 +01:00
|
|
|
initialize_sanitizer_builtins ();
|
|
|
|
}
|
|
|
|
|
2013-08-05 07:16:29 +02:00
|
|
|
/* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|
|
|
|
static tree
|
2013-08-05 07:16:29 +02:00
|
|
|
asan_pp_string (pretty_printer *pp)
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
{
|
2013-08-05 07:16:29 +02:00
|
|
|
const char *buf = pp_formatted_text (pp);
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
size_t len = strlen (buf);
|
|
|
|
tree ret = build_string (len + 1, buf);
|
|
|
|
TREE_TYPE (ret)
|
2012-12-10 13:14:36 +01:00
|
|
|
= build_array_type (TREE_TYPE (shadow_ptr_types[0]),
|
|
|
|
build_index_type (size_int (len)));
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
TREE_READONLY (ret) = 1;
|
|
|
|
TREE_STATIC (ret) = 1;
|
2012-12-10 13:14:36 +01:00
|
|
|
return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
}
|
|
|
|
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
/* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
|
|
|
|
|
|
|
|
static rtx
|
|
|
|
asan_shadow_cst (unsigned char shadow_bytes[4])
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
unsigned HOST_WIDE_INT val = 0;
|
|
|
|
gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
|
|
|
|
for (i = 0; i < 4; i++)
|
|
|
|
val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
|
|
|
|
<< (BITS_PER_UNIT * i);
|
2013-09-09 11:23:54 +02:00
|
|
|
return gen_int_mode (val, SImode);
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
}
|
|
|
|
|
2013-01-08 18:01:58 +01:00
|
|
|
/* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
|
|
|
|
though. */
|
|
|
|
|
|
|
|
static void
|
|
|
|
asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
|
|
|
|
{
|
2014-08-21 22:12:30 +02:00
|
|
|
rtx_insn *insn, *insns, *jump;
|
|
|
|
rtx_code_label *top_label;
|
|
|
|
rtx end, addr, tmp;
|
2013-01-08 18:01:58 +01:00
|
|
|
|
|
|
|
start_sequence ();
|
|
|
|
clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
|
|
|
|
insns = get_insns ();
|
|
|
|
end_sequence ();
|
|
|
|
for (insn = insns; insn; insn = NEXT_INSN (insn))
|
|
|
|
if (CALL_P (insn))
|
|
|
|
break;
|
|
|
|
if (insn == NULL_RTX)
|
|
|
|
{
|
|
|
|
emit_insn (insns);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
gcc_assert ((len & 3) == 0);
|
|
|
|
top_label = gen_label_rtx ();
|
2013-10-31 13:10:01 +01:00
|
|
|
addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
|
2013-01-08 18:01:58 +01:00
|
|
|
shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
|
|
|
|
end = force_reg (Pmode, plus_constant (Pmode, addr, len));
|
|
|
|
emit_label (top_label);
|
|
|
|
|
|
|
|
emit_move_insn (shadow_mem, const0_rtx);
|
asan.c (asan_clear_shadow): Use gen_int_mode with the mode of the associated expand_* call.
gcc/
* asan.c (asan_clear_shadow): Use gen_int_mode with the mode
of the associated expand_* call.
(asan_emit_stack_protection): Likewise.
* builtins.c (round_trampoline_addr): Likewise.
* explow.c (allocate_dynamic_stack_space, probe_stack_range): Likewise.
* expmed.c (expand_smod_pow2, expand_sdiv_pow2, expand_divmod)
(emit_store_flag): Likewise.
* expr.c (emit_move_resolve_push, push_block, emit_single_push_insn_1)
(emit_push_insn, optimize_bitfield_assignment_op, expand_expr_real_1):
Likewise.
* function.c (instantiate_virtual_regs_in_insn): Likewise.
* ifcvt.c (noce_try_store_flag_constants): Likewise.
* loop-unroll.c (unroll_loop_runtime_iterations): Likewise.
* modulo-sched.c (generate_prolog_epilog): Likewise.
* optabs.c (expand_binop, widen_leading, expand_doubleword_clz)
(expand_ctz, expand_ffs, expand_unop): Likewise.
From-SVN: r202392
2013-09-09 15:06:53 +02:00
|
|
|
tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
|
2014-08-11 08:12:12 +02:00
|
|
|
true, OPTAB_LIB_WIDEN);
|
2013-01-08 18:01:58 +01:00
|
|
|
if (tmp != addr)
|
|
|
|
emit_move_insn (addr, tmp);
|
|
|
|
emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
|
|
|
|
jump = get_last_insn ();
|
|
|
|
gcc_assert (JUMP_P (jump));
|
rtl.texi (REG_NOTES): Say that int_list can also be used.
gcc/
* doc/rtl.texi (REG_NOTES): Say that int_list can also be used.
(REG_BR_PROB): Say that the probability is stored in an int_list.
* reg-notes.def: Update commentary to mention INT_LIST.
* rtl.def (EXPR_LIST, INSN_LIST): Capitalize comments.
(INT_LIST): New rtx.
* rtl.h (add_int_reg_note, add_shallow_copy_of_reg_note): Declare.
* rtlanal.c (int_reg_note_p): New function.
(alloc_reg_note): Assert that the note does not have an int argument.
(add_int_reg_note, add_shallow_copy_of_reg_note): New functions.
* combine.c (distribute_notes): Use add_shallow_copy_of_rtx.
* cse.c (cse_process_notes_1): Expect REG_EQUAL to be an EXPR_LIST
rather than an INSN_LIST. Handle INT_LIST.
* ifcvt.c (cond_exec_process_insns): Take the probability as an int
rather than an rtx. Use gen_rtx_INT_LIST to create a REG_BR_PROB note.
(cond_exec_process_if_block): Use XINT to extract REG_BR_PROB values.
Manipulate them as ints rather than rtxes.
* reg-stack.c (subst_asm_stack_regs): Only handle EXPR_LIST notes.
* regmove.c (copy_src_to_dest): Likewise.
* sched-vis.c (print_insn_with_notes): Handle INT_LIST.
* config/i386/winnt.c (i386_pe_seh_unwind_emit): Sink pat assignment
into the cases that need it.
* config/arm/arm.c (arm_unwind_emit): Likewise.
* asan.c (asan_clear_shadow): Use add_int_reg_note for REG_BR_PROB.
* emit-rtl.c (try_split, emit_copy_of_insn_after): Likewise.
* loop-doloop.c (add_test, doloop_modify): Likewise.
* loop-unswitch.c (compare_and_jump_seq): Likewise.
* optabs.c (emit_cmp_and_jump_insn_1): Likewise.
* predict.c (combine_predictions_for_insn): Likewise.
* print-rtl.c (print_rtx): Handle INT_LIST.
* config/aarch64/aarch64.c (aarch64_emit_unlikely_jump): Likewise.
* config/alpha/alpha.c (emit_unlikely_jump): Likewise.
* config/arm/arm.c (emit_unlikely_jump): Likewise.
* config/i386/i386.c (ix86_expand_split_stack_prologue): Likewise.
(ix86_split_fp_branch, predict_jump): Likewise.
* config/rs6000/rs6000.c (emit_unlikely_jump): Likewise.
* config/sh/sh.c (expand_cbranchsi4): Likewise.
* config/spu/spu.c (ea_load_store_inline): Likewise.
* cfgbuild.c (compute_outgoing_frequencies): Use XINT to access the
value of a REG_BR_PROB note.
* cfgrtl.c (force_nonfallthru_and_redirect): Likewise.
(update_br_prob_note, rtl_verify_edges, purge_dead_edges): Likewise.
* emit-rtl.c (try_split): Likewise.
* predict.c (br_prob_note_reliable_p): Likewise.
(invert_br_probabilities, combine_predictions_for_insn): Likewise.
* reorg.c (mostly_true_jump): Likewise.
* config/bfin/bfin.c (cbranch_predicted_taken_p): Likewise.
* config/frv/frv.c (frv_print_operand_jump_hint): Likewise.
* config/i386/i386.c (ix86_print_operand): Likewise.
* config/ia64/ia64.c (ia64_print_operand): Likewise.
* config/mmix/mmix.c (mmix_print_operand): Likewise.
* config/rs6000/rs6000.c (output_cbranch): Likewise.
* config/s390/s390.c (s390_expand_tbegin): Likewise.
* config/sh/sh.c (sh_print_operand, sh_adjust_cost): Likewise.
* config/sparc/sparc.c (output_cbranch): Likewise.
* config/spu/spu.c (get_branch_target): Likewise.
* config/tilegx/tilegx.c (cbranch_predicted_p): Likewise.
* config/tilepro/tilepro.c (cbranch_predicted_p): Likewise.
From-SVN: r202839
2013-09-23 19:41:02 +02:00
|
|
|
add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
|
2013-01-08 18:01:58 +01:00
|
|
|
}
|
|
|
|
|
2013-11-04 22:33:31 +01:00
|
|
|
void
|
|
|
|
asan_function_start (void)
|
|
|
|
{
|
|
|
|
section *fnsec = function_section (current_function_decl);
|
|
|
|
switch_to_section (fnsec);
|
|
|
|
ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
|
2014-08-11 08:12:12 +02:00
|
|
|
current_function_funcdef_no);
|
2013-11-04 22:33:31 +01:00
|
|
|
}
|
|
|
|
|
2016-11-07 11:23:38 +01:00
|
|
|
/* Return number of shadow bytes that are occupied by a local variable
|
|
|
|
of SIZE bytes. */
|
|
|
|
|
|
|
|
static unsigned HOST_WIDE_INT
|
|
|
|
shadow_mem_size (unsigned HOST_WIDE_INT size)
|
|
|
|
{
|
|
|
|
return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
|
|
|
|
}
|
|
|
|
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
/* Insert code to protect stack vars. The prologue sequence should be emitted
|
|
|
|
directly, epilogue sequence returned. BASE is the register holding the
|
|
|
|
stack base, against which OFFSETS array offsets are relative to, OFFSETS
|
|
|
|
array contains pairs of offsets in reverse order, always the end offset
|
|
|
|
of some gap that needs protection followed by starting offset,
|
|
|
|
and DECLS is an array of representative decls for each var partition.
|
|
|
|
LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
|
|
|
|
elements long (OFFSETS include gap before the first variable as well
|
2013-11-28 09:18:59 +01:00
|
|
|
as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
|
|
|
|
register which stack vars DECL_RTLs are based on. Either BASE should be
|
|
|
|
assigned to PBASE, when not doing use after return protection, or
|
|
|
|
corresponding address based on __asan_stack_malloc* return value. */
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2014-08-19 21:48:08 +02:00
|
|
|
rtx_insn *
|
2013-11-28 09:18:59 +01:00
|
|
|
asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
|
|
|
|
HOST_WIDE_INT *offsets, tree *decls, int length)
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
{
|
use rtx_code_label more
gcc/
* asan.c, cfgexpand.c, config/alpha/alpha.md, config/arm/arm.c,
config/epiphany/epiphany.md, config/h8300/h8300.c, config/i386/i386.md,
config/m32r/m32r.c, config/mcore/mcore.md, config/mips/mips.c,
config/mips/mips.md, config/nios2/nios2.c, config/pa/pa.c,
config/s390/s390.c, config/s390/s390.md, config/sh/sh-mem.cc,
config/sh/sh.c, config/sparc/sparc.c, dojump.c, function.c, optabs.c,
stmt.c: Assign the result of gen_label_rtx to rtx_code_label * instead
of rtx.
From-SVN: r214841
2014-09-03 02:52:11 +02:00
|
|
|
rtx shadow_base, shadow_mem, ret, mem, orig_base;
|
|
|
|
rtx_code_label *lab;
|
2014-08-19 21:48:08 +02:00
|
|
|
rtx_insn *insns;
|
2017-01-09 21:08:49 +01:00
|
|
|
char buf[32];
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
unsigned char shadow_bytes[4];
|
2013-11-28 09:18:59 +01:00
|
|
|
HOST_WIDE_INT base_offset = offsets[length - 1];
|
|
|
|
HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
|
|
|
|
HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
|
2016-11-07 11:23:38 +01:00
|
|
|
HOST_WIDE_INT last_offset;
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
int l;
|
|
|
|
unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
|
2013-11-04 22:33:31 +01:00
|
|
|
tree str_cst, decl, id;
|
2013-11-28 09:18:59 +01:00
|
|
|
int use_after_return_class = -1;
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2012-12-10 13:14:36 +01:00
|
|
|
if (shadow_ptr_types[0] == NULL_TREE)
|
|
|
|
asan_init_shadow_ptr_types ();
|
|
|
|
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
/* First of all, prepare the description string. */
|
2013-08-05 07:16:29 +02:00
|
|
|
pretty_printer asan_pp;
|
2013-08-22 12:14:46 +02:00
|
|
|
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
pp_decimal_int (&asan_pp, length / 2 - 1);
|
|
|
|
pp_space (&asan_pp);
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
for (l = length - 2; l; l -= 2)
|
|
|
|
{
|
|
|
|
tree decl = decls[l / 2 - 1];
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
pp_wide_integer (&asan_pp, offsets[l] - base_offset);
|
|
|
|
pp_space (&asan_pp);
|
|
|
|
pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
|
|
|
|
pp_space (&asan_pp);
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
if (DECL_P (decl) && DECL_NAME (decl))
|
|
|
|
{
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
|
|
|
|
pp_space (&asan_pp);
|
2013-08-05 06:00:57 +02:00
|
|
|
pp_tree_identifier (&asan_pp, DECL_NAME (decl));
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
}
|
|
|
|
else
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
pp_string (&asan_pp, "9 <unknown>");
|
|
|
|
pp_space (&asan_pp);
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
}
|
2013-08-05 07:16:29 +02:00
|
|
|
str_cst = asan_pp_string (&asan_pp);
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
|
|
|
/* Emit the prologue sequence. */
|
2014-01-09 08:31:05 +01:00
|
|
|
if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
|
|
|
|
&& ASAN_USE_AFTER_RETURN)
|
2013-11-28 09:18:59 +01:00
|
|
|
{
|
|
|
|
use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
|
|
|
|
/* __asan_stack_malloc_N guarantees alignment
|
2014-08-11 08:12:12 +02:00
|
|
|
N < 6 ? (64 << N) : 4096 bytes. */
|
2013-11-28 09:18:59 +01:00
|
|
|
if (alignb > (use_after_return_class < 6
|
|
|
|
? (64U << use_after_return_class) : 4096U))
|
|
|
|
use_after_return_class = -1;
|
|
|
|
else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
|
|
|
|
base_align_bias = ((asan_frame_size + alignb - 1)
|
|
|
|
& ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
|
|
|
|
}
|
2014-04-22 07:55:40 +02:00
|
|
|
/* Align base if target is STRICT_ALIGNMENT. */
|
|
|
|
if (STRICT_ALIGNMENT)
|
|
|
|
base = expand_binop (Pmode, and_optab, base,
|
|
|
|
gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
|
|
|
|
<< ASAN_SHADOW_SHIFT)
|
|
|
|
/ BITS_PER_UNIT), Pmode), NULL_RTX,
|
|
|
|
1, OPTAB_DIRECT);
|
|
|
|
|
2013-11-28 09:18:59 +01:00
|
|
|
if (use_after_return_class == -1 && pbase)
|
|
|
|
emit_move_insn (pbase, base);
|
2014-04-22 07:55:40 +02:00
|
|
|
|
asan.c (asan_clear_shadow): Use gen_int_mode with the mode of the associated expand_* call.
gcc/
* asan.c (asan_clear_shadow): Use gen_int_mode with the mode
of the associated expand_* call.
(asan_emit_stack_protection): Likewise.
* builtins.c (round_trampoline_addr): Likewise.
* explow.c (allocate_dynamic_stack_space, probe_stack_range): Likewise.
* expmed.c (expand_smod_pow2, expand_sdiv_pow2, expand_divmod)
(emit_store_flag): Likewise.
* expr.c (emit_move_resolve_push, push_block, emit_single_push_insn_1)
(emit_push_insn, optimize_bitfield_assignment_op, expand_expr_real_1):
Likewise.
* function.c (instantiate_virtual_regs_in_insn): Likewise.
* ifcvt.c (noce_try_store_flag_constants): Likewise.
* loop-unroll.c (unroll_loop_runtime_iterations): Likewise.
* modulo-sched.c (generate_prolog_epilog): Likewise.
* optabs.c (expand_binop, widen_leading, expand_doubleword_clz)
(expand_ctz, expand_ffs, expand_unop): Likewise.
From-SVN: r202392
2013-09-09 15:06:53 +02:00
|
|
|
base = expand_binop (Pmode, add_optab, base,
|
2013-11-28 09:18:59 +01:00
|
|
|
gen_int_mode (base_offset - base_align_bias, Pmode),
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
NULL_RTX, 1, OPTAB_DIRECT);
|
2013-11-28 09:18:59 +01:00
|
|
|
orig_base = NULL_RTX;
|
|
|
|
if (use_after_return_class != -1)
|
|
|
|
{
|
|
|
|
if (asan_detect_stack_use_after_return == NULL_TREE)
|
|
|
|
{
|
|
|
|
id = get_identifier ("__asan_option_detect_stack_use_after_return");
|
|
|
|
decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
|
|
|
|
integer_type_node);
|
|
|
|
SET_DECL_ASSEMBLER_NAME (decl, id);
|
|
|
|
TREE_ADDRESSABLE (decl) = 1;
|
|
|
|
DECL_ARTIFICIAL (decl) = 1;
|
|
|
|
DECL_IGNORED_P (decl) = 1;
|
|
|
|
DECL_EXTERNAL (decl) = 1;
|
|
|
|
TREE_STATIC (decl) = 1;
|
|
|
|
TREE_PUBLIC (decl) = 1;
|
|
|
|
TREE_USED (decl) = 1;
|
|
|
|
asan_detect_stack_use_after_return = decl;
|
|
|
|
}
|
|
|
|
orig_base = gen_reg_rtx (Pmode);
|
|
|
|
emit_move_insn (orig_base, base);
|
|
|
|
ret = expand_normal (asan_detect_stack_use_after_return);
|
|
|
|
lab = gen_label_rtx ();
|
|
|
|
int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
|
|
|
|
emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
|
|
|
|
VOIDmode, 0, lab, very_likely);
|
|
|
|
snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
|
|
|
|
use_after_return_class);
|
|
|
|
ret = init_one_libfunc (buf);
|
2015-10-21 09:40:54 +02:00
|
|
|
ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 1,
|
2013-11-28 09:18:59 +01:00
|
|
|
GEN_INT (asan_frame_size
|
|
|
|
+ base_align_bias),
|
2015-10-21 09:40:54 +02:00
|
|
|
TYPE_MODE (pointer_sized_int_node));
|
|
|
|
/* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
|
|
|
|
and NULL otherwise. Check RET value is NULL here and jump over the
|
|
|
|
BASE reassignment in this case. Otherwise, reassign BASE to RET. */
|
|
|
|
int very_unlikely = REG_BR_PROB_BASE / 2000 - 1;
|
|
|
|
emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
|
|
|
|
VOIDmode, 0, lab, very_unlikely);
|
2013-11-28 09:18:59 +01:00
|
|
|
ret = convert_memory_address (Pmode, ret);
|
|
|
|
emit_move_insn (base, ret);
|
|
|
|
emit_label (lab);
|
|
|
|
emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
|
|
|
|
gen_int_mode (base_align_bias
|
|
|
|
- base_offset, Pmode),
|
|
|
|
NULL_RTX, 1, OPTAB_DIRECT));
|
|
|
|
}
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
mem = gen_rtx_MEM (ptr_mode, base);
|
2013-11-28 09:18:59 +01:00
|
|
|
mem = adjust_address (mem, VOIDmode, base_align_bias);
|
2013-09-09 15:07:10 +02:00
|
|
|
emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
|
|
|
|
emit_move_insn (mem, expand_normal (str_cst));
|
2013-11-04 22:33:31 +01:00
|
|
|
mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
|
|
|
|
ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
|
|
|
|
id = get_identifier (buf);
|
|
|
|
decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
|
2014-08-11 08:12:12 +02:00
|
|
|
VAR_DECL, id, char_type_node);
|
2013-11-04 22:33:31 +01:00
|
|
|
SET_DECL_ASSEMBLER_NAME (decl, id);
|
|
|
|
TREE_ADDRESSABLE (decl) = 1;
|
|
|
|
TREE_READONLY (decl) = 1;
|
|
|
|
DECL_ARTIFICIAL (decl) = 1;
|
|
|
|
DECL_IGNORED_P (decl) = 1;
|
|
|
|
TREE_STATIC (decl) = 1;
|
|
|
|
TREE_PUBLIC (decl) = 0;
|
|
|
|
TREE_USED (decl) = 1;
|
2013-11-14 19:28:43 +01:00
|
|
|
DECL_INITIAL (decl) = decl;
|
|
|
|
TREE_ASM_WRITTEN (decl) = 1;
|
|
|
|
TREE_ASM_WRITTEN (id) = 1;
|
2013-11-04 22:33:31 +01:00
|
|
|
emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
shadow_base = expand_binop (Pmode, lshr_optab, base,
|
|
|
|
GEN_INT (ASAN_SHADOW_SHIFT),
|
|
|
|
NULL_RTX, 1, OPTAB_DIRECT);
|
2013-11-28 09:18:59 +01:00
|
|
|
shadow_base
|
|
|
|
= plus_constant (Pmode, shadow_base,
|
2014-10-28 10:46:29 +01:00
|
|
|
asan_shadow_offset ()
|
2013-11-28 09:18:59 +01:00
|
|
|
+ (base_align_bias >> ASAN_SHADOW_SHIFT));
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
gcc_assert (asan_shadow_set != -1
|
|
|
|
&& (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
|
|
|
|
shadow_mem = gen_rtx_MEM (SImode, shadow_base);
|
|
|
|
set_mem_alias_set (shadow_mem, asan_shadow_set);
|
2014-04-22 07:55:40 +02:00
|
|
|
if (STRICT_ALIGNMENT)
|
|
|
|
set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
prev_offset = base_offset;
|
|
|
|
for (l = length; l; l -= 2)
|
|
|
|
{
|
|
|
|
if (l == 2)
|
|
|
|
cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
|
|
|
|
offset = offsets[l - 1];
|
|
|
|
if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
HOST_WIDE_INT aoff
|
|
|
|
= base_offset + ((offset - base_offset)
|
|
|
|
& ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
|
|
|
|
shadow_mem = adjust_address (shadow_mem, VOIDmode,
|
|
|
|
(aoff - prev_offset)
|
|
|
|
>> ASAN_SHADOW_SHIFT);
|
|
|
|
prev_offset = aoff;
|
2016-11-07 11:23:38 +01:00
|
|
|
for (i = 0; i < 4; i++, aoff += ASAN_SHADOW_GRANULARITY)
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
if (aoff < offset)
|
|
|
|
{
|
2016-11-07 11:23:38 +01:00
|
|
|
if (aoff < offset - (HOST_WIDE_INT)ASAN_SHADOW_GRANULARITY + 1)
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
shadow_bytes[i] = 0;
|
|
|
|
else
|
|
|
|
shadow_bytes[i] = offset - aoff;
|
|
|
|
}
|
|
|
|
else
|
2016-11-08 23:15:35 +01:00
|
|
|
shadow_bytes[i] = ASAN_STACK_MAGIC_MIDDLE;
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
|
|
|
|
offset = aoff;
|
|
|
|
}
|
|
|
|
while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
|
|
|
|
{
|
|
|
|
shadow_mem = adjust_address (shadow_mem, VOIDmode,
|
|
|
|
(offset - prev_offset)
|
|
|
|
>> ASAN_SHADOW_SHIFT);
|
|
|
|
prev_offset = offset;
|
|
|
|
memset (shadow_bytes, cur_shadow_byte, 4);
|
|
|
|
emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
|
|
|
|
offset += ASAN_RED_ZONE_SIZE;
|
|
|
|
}
|
|
|
|
cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
|
|
|
|
}
|
|
|
|
do_pending_stack_adjust ();
|
|
|
|
|
|
|
|
/* Construct epilogue sequence. */
|
|
|
|
start_sequence ();
|
|
|
|
|
use rtx_code_label more
gcc/
* asan.c, cfgexpand.c, config/alpha/alpha.md, config/arm/arm.c,
config/epiphany/epiphany.md, config/h8300/h8300.c, config/i386/i386.md,
config/m32r/m32r.c, config/mcore/mcore.md, config/mips/mips.c,
config/mips/mips.md, config/nios2/nios2.c, config/pa/pa.c,
config/s390/s390.c, config/s390/s390.md, config/sh/sh-mem.cc,
config/sh/sh.c, config/sparc/sparc.c, dojump.c, function.c, optabs.c,
stmt.c: Assign the result of gen_label_rtx to rtx_code_label * instead
of rtx.
From-SVN: r214841
2014-09-03 02:52:11 +02:00
|
|
|
lab = NULL;
|
2013-11-28 09:18:59 +01:00
|
|
|
if (use_after_return_class != -1)
|
|
|
|
{
|
use rtx_code_label more
gcc/
* asan.c, cfgexpand.c, config/alpha/alpha.md, config/arm/arm.c,
config/epiphany/epiphany.md, config/h8300/h8300.c, config/i386/i386.md,
config/m32r/m32r.c, config/mcore/mcore.md, config/mips/mips.c,
config/mips/mips.md, config/nios2/nios2.c, config/pa/pa.c,
config/s390/s390.c, config/s390/s390.md, config/sh/sh-mem.cc,
config/sh/sh.c, config/sparc/sparc.c, dojump.c, function.c, optabs.c,
stmt.c: Assign the result of gen_label_rtx to rtx_code_label * instead
of rtx.
From-SVN: r214841
2014-09-03 02:52:11 +02:00
|
|
|
rtx_code_label *lab2 = gen_label_rtx ();
|
2013-11-28 09:18:59 +01:00
|
|
|
char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
|
|
|
|
int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
|
|
|
|
emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
|
|
|
|
VOIDmode, 0, lab2, very_likely);
|
|
|
|
shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
|
|
|
|
set_mem_alias_set (shadow_mem, asan_shadow_set);
|
|
|
|
mem = gen_rtx_MEM (ptr_mode, base);
|
|
|
|
mem = adjust_address (mem, VOIDmode, base_align_bias);
|
|
|
|
emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
|
|
|
|
unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
|
|
|
|
if (use_after_return_class < 5
|
|
|
|
&& can_store_by_pieces (sz, builtin_memset_read_str, &c,
|
|
|
|
BITS_PER_UNIT, true))
|
|
|
|
store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
|
|
|
|
BITS_PER_UNIT, true, 0);
|
|
|
|
else if (use_after_return_class >= 5
|
|
|
|
|| !set_storage_via_setmem (shadow_mem,
|
|
|
|
GEN_INT (sz),
|
|
|
|
gen_int_mode (c, QImode),
|
|
|
|
BITS_PER_UNIT, BITS_PER_UNIT,
|
|
|
|
-1, sz, sz, sz))
|
|
|
|
{
|
|
|
|
snprintf (buf, sizeof buf, "__asan_stack_free_%d",
|
|
|
|
use_after_return_class);
|
|
|
|
ret = init_one_libfunc (buf);
|
|
|
|
rtx addr = convert_memory_address (ptr_mode, base);
|
|
|
|
rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
|
|
|
|
emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
|
|
|
|
GEN_INT (asan_frame_size + base_align_bias),
|
|
|
|
TYPE_MODE (pointer_sized_int_node),
|
|
|
|
orig_addr, ptr_mode);
|
|
|
|
}
|
|
|
|
lab = gen_label_rtx ();
|
|
|
|
emit_jump (lab);
|
|
|
|
emit_label (lab2);
|
|
|
|
}
|
|
|
|
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
|
|
|
|
set_mem_alias_set (shadow_mem, asan_shadow_set);
|
2014-04-22 07:55:40 +02:00
|
|
|
|
|
|
|
if (STRICT_ALIGNMENT)
|
|
|
|
set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
|
|
|
|
|
2016-11-07 11:23:38 +01:00
|
|
|
/* Unpoison shadow memory of a stack at the very end of a function.
|
|
|
|
As we're poisoning stack variables at the end of their scope,
|
|
|
|
shadow memory must be properly unpoisoned here. The easiest approach
|
|
|
|
would be to collect all variables that should not be unpoisoned and
|
|
|
|
we unpoison shadow memory of the whole stack except ranges
|
|
|
|
occupied by these variables. */
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
last_offset = base_offset;
|
2016-11-07 11:23:38 +01:00
|
|
|
HOST_WIDE_INT current_offset = last_offset;
|
|
|
|
if (length)
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
{
|
2016-11-07 11:23:38 +01:00
|
|
|
HOST_WIDE_INT var_end_offset = 0;
|
|
|
|
HOST_WIDE_INT stack_start = offsets[length - 1];
|
|
|
|
gcc_assert (last_offset == stack_start);
|
|
|
|
|
|
|
|
for (int l = length - 2; l > 0; l -= 2)
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
{
|
2016-11-07 11:23:38 +01:00
|
|
|
HOST_WIDE_INT var_offset = offsets[l];
|
|
|
|
current_offset = var_offset;
|
|
|
|
var_end_offset = offsets[l - 1];
|
|
|
|
HOST_WIDE_INT rounded_size = ROUND_UP (var_end_offset - var_offset,
|
|
|
|
BITS_PER_UNIT);
|
|
|
|
|
|
|
|
/* Should we unpoison the variable? */
|
|
|
|
if (asan_handled_variables != NULL
|
|
|
|
&& asan_handled_variables->contains (decl))
|
|
|
|
{
|
|
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
|
|
|
{
|
|
|
|
const char *n = (DECL_NAME (decl)
|
|
|
|
? IDENTIFIER_POINTER (DECL_NAME (decl))
|
|
|
|
: "<unknown>");
|
|
|
|
fprintf (dump_file, "Unpoisoning shadow stack for variable: "
|
|
|
|
"%s (%" PRId64 "B)\n", n,
|
|
|
|
var_end_offset - var_offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned HOST_WIDE_INT s
|
|
|
|
= shadow_mem_size (current_offset - last_offset);
|
|
|
|
asan_clear_shadow (shadow_mem, s);
|
|
|
|
HOST_WIDE_INT shift
|
|
|
|
= shadow_mem_size (current_offset - last_offset + rounded_size);
|
|
|
|
shadow_mem = adjust_address (shadow_mem, VOIDmode, shift);
|
|
|
|
last_offset = var_offset + rounded_size;
|
|
|
|
current_offset = last_offset;
|
|
|
|
}
|
|
|
|
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
}
|
2016-11-07 11:23:38 +01:00
|
|
|
|
|
|
|
/* Handle last redzone. */
|
|
|
|
current_offset = offsets[0];
|
|
|
|
asan_clear_shadow (shadow_mem,
|
|
|
|
shadow_mem_size (current_offset - last_offset));
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
}
|
|
|
|
|
2016-11-07 11:23:38 +01:00
|
|
|
/* Clean-up set with instrumented stack variables. */
|
|
|
|
delete asan_handled_variables;
|
|
|
|
asan_handled_variables = NULL;
|
|
|
|
delete asan_used_labels;
|
|
|
|
asan_used_labels = NULL;
|
|
|
|
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
do_pending_stack_adjust ();
|
2013-11-28 09:18:59 +01:00
|
|
|
if (lab)
|
|
|
|
emit_label (lab);
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
|
2014-08-19 21:48:08 +02:00
|
|
|
insns = get_insns ();
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
end_sequence ();
|
2014-08-19 21:48:08 +02:00
|
|
|
return insns;
|
Implement protection of stack variables
This patch implements the protection of stack variables.
It lays out stack variables as well as the different red zones,
emits some prologue code to populate the shadow memory as to poison
(mark as non-accessible) the regions of the red zones and mark the
regions of stack variables as accessible, and emit some epilogue code
to un-poison (mark as accessible) the regions of red zones right
before the function exits.
* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
(cfgexpand.o): Depend on asan.h.
* asan.c: Include expr.h and optabs.h.
(asan_shadow_set): New variable.
(asan_shadow_cst, asan_emit_stack_protection): New functions.
(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
* cfgexpand.c: Include asan.h. Define HOST_WIDE_INT heap vector.
(partition_stack_vars): If i is large alignment and j small
alignment or vice versa, break out of the loop instead of continue,
and put the test earlier. If flag_asan, break out of the loop
if for small alignment size is different.
(struct stack_vars_data): New type.
(expand_stack_vars): Add DATA argument. Change PRED type to
function taking size_t argument instead of tree. Adjust pred
calls. Fill DATA in and add needed padding in between variables
if -faddress-sanitizer.
(defer_stack_allocation): Defer everything for flag_asan.
(stack_protect_decl_phase_1, stack_protect_decl_phase_2): Take
size_t index into stack_vars array instead of the decl directly.
(asan_decl_phase_3): New function.
(expand_used_vars): Return var destruction sequence. Adjust
expand_stack_vars calls, add another one for flag_asan. Call
asan_emit_stack_protection if expand_stack_vars added anything
to the vectors.
(expand_gimple_basic_block): Add disable_tail_calls argument.
(gimple_expand_cfg): Pass true to it if expand_used_vars returned
non-NULL. Emit the sequence returned by expand_used_vars after
return_label.
* asan.h (asan_emit_stack_protection): New prototype.
(asan_shadow_set): New decl.
(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
(asan_protect_stack_decl): New inline.
* toplev.c (process_options): Also disable -faddress-sanitizer on
!FRAME_GROWS_DOWNWARDS targets.
From-SVN: r193436
2012-11-12 16:52:26 +01:00
|
|
|
}
|
|
|
|
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
/* Return true if DECL, a global var, might be overridden and needs
|
|
|
|
therefore a local alias. */
|
|
|
|
|
|
|
|
static bool
|
|
|
|
asan_needs_local_alias (tree decl)
|
|
|
|
{
|
|
|
|
return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
|
|
|
|
}
|
|
|
|
|
2016-12-02 08:39:27 +01:00
|
|
|
/* Return true if DECL, a global var, is an artificial ODR indicator symbol
|
|
|
|
therefore doesn't need protection. */
|
|
|
|
|
|
|
|
static bool
|
|
|
|
is_odr_indicator (tree decl)
|
|
|
|
{
|
|
|
|
return (DECL_ARTIFICIAL (decl)
|
|
|
|
&& lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
|
|
|
|
}
|
|
|
|
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
/* Return true if DECL is a VAR_DECL that should be protected
|
|
|
|
by Address Sanitizer, by appending a red zone with protected
|
|
|
|
shadow memory after it and aligning it to at least
|
|
|
|
ASAN_RED_ZONE_SIZE bytes. */
|
|
|
|
|
|
|
|
bool
|
|
|
|
asan_protect_global (tree decl)
|
|
|
|
{
|
2014-01-09 08:31:05 +01:00
|
|
|
if (!ASAN_GLOBALS)
|
|
|
|
return false;
|
|
|
|
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
rtx rtl, symbol;
|
|
|
|
|
2012-12-10 13:14:36 +01:00
|
|
|
if (TREE_CODE (decl) == STRING_CST)
|
|
|
|
{
|
|
|
|
/* Instrument all STRING_CSTs except those created
|
|
|
|
by asan_pp_string here. */
|
|
|
|
if (shadow_ptr_types[0] != NULL_TREE
|
|
|
|
&& TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
|
|
|
|
&& TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
|
|
|
|
return false;
|
|
|
|
return true;
|
|
|
|
}
|
tree-ssa.c (target_for_debug_bind, [...]): Use VAR_P and/or VAR_OR_FUNCTION_DECL_P macros.
* tree-ssa.c (target_for_debug_bind, verify_phi_args,
ssa_undefined_value_p, maybe_optimize_var): Use VAR_P and/or
VAR_OR_FUNCTION_DECL_P macros.
* tree-chkp.c (chkp_register_var_initializer, chkp_make_static_bounds,
chkp_get_bounds_for_decl_addr, chkp_parse_array_and_component_ref,
chkp_find_bounds_1): Likewise.
* ipa-polymorphic-call.c (decl_maybe_in_construction_p): Likewise.
* hsa-gen.c (get_symbol_for_decl): Likewise.
* cgraphunit.c (check_global_declaration, analyze_functions,
handle_alias_pairs, thunk_adjust, cgraph_node::expand_thunk):
Likewise.
* gimple-fold.c (can_refer_decl_in_current_unit_p,
canonicalize_constructor_val, gimple_get_virt_method_for_vtable):
Likewise.
* tree.c (set_decl_section_name, copy_node_stat,
need_assembler_name_p, free_lang_data_in_decl, find_decls_types_r,
merge_dllimport_decl_attributes, handle_dll_attribute,
decl_init_priority_insert, auto_var_in_fn_p, array_at_struct_end_p,
verify_type): Likewise.
* gimple-ssa-isolate-paths.c (find_implicit_erroneous_behavior,
find_explicit_erroneous_behavior): Likewise.
* sdbout.c (sdbout_toplevel_data, sdbout_late_global_decl): Likewise.
* ipa.c (process_references): Likewise.
* tree-chkp-opt.c (chkp_get_check_result): Likewise.
* varasm.c (get_block_for_decl, use_blocks_for_decl_p, make_decl_rtl,
notice_global_symbol, assemble_variable, mark_decl_referenced,
build_constant_desc, output_constant_def_contents, do_assemble_alias,
make_decl_one_only, default_section_type_flags,
categorize_decl_for_section, default_encode_section_info): Likewise.
* trans-mem.c (requires_barrier): Likewise.
* gimple-expr.c (mark_addressable): Likewise.
* cfgexpand.c (add_scope_conflicts_1, expand_one_var,
expand_used_vars_for_block, clear_tree_used, stack_protect_decl_p,
expand_debug_expr): Likewise.
* tree-dump.c (dequeue_and_dump): Likewise.
* ubsan.c (instrument_bool_enum_load): Likewise.
* tree-pretty-print.c (print_declaration): Likewise.
* simplify-rtx.c (delegitimize_mem_from_attrs): Likewise.
* tree-ssa-uninit.c (warn_uninitialized_vars): Likewise.
* asan.c (asan_protect_global, instrument_derefs): Likewise.
* tree-into-ssa.c (rewrite_stmt, maybe_register_def,
pass_build_ssa::execute): Likewise.
* var-tracking.c (var_debug_decl, track_expr_p): Likewise.
* tree-ssa-loop-ivopts.c (force_expr_to_var_cost, split_address_cost):
Likewise.
* ipa-split.c (test_nonssa_use, consider_split, mark_nonssa_use):
Likewise.
* tree-inline.c (insert_debug_decl_map, remap_ssa_name,
can_be_nonlocal, remap_decls, copy_debug_stmt,
initialize_inlined_parameters, add_local_variables,
reset_debug_binding, replace_locals_op): Likewise.
* dse.c (can_escape): Likewise.
* ipa-devirt.c (compare_virtual_tables, referenced_from_vtable_p):
Likewise.
* tree-diagnostic.c (default_tree_printer): Likewise.
* tree-streamer-in.c (unpack_ts_decl_common_value_fields,
unpack_ts_decl_with_vis_value_fields,
lto_input_ts_decl_common_tree_pointers): Likewise.
* builtins.c (builtin_save_expr, fold_builtin_expect,
readonly_data_expr): Likewise.
* tree-ssa-structalias.c (new_var_info, get_constraint_for_ssa_var,
create_variable_info_for, set_uids_in_ptset, visit_loadstore):
Likewise.
* gimple-streamer-out.c (output_gimple_stmt): Likewise.
* gimplify.c (force_constant_size, gimplify_bind_expr,
gimplify_decl_expr, gimplify_var_or_parm_decl,
gimplify_compound_lval, gimplify_init_constructor,
gimplify_modify_expr, gimplify_asm_expr, gimplify_oacc_declare,
gimplify_type_sizes): Likewise.
* cgraphbuild.c (record_reference, record_type_list, mark_address,
mark_load, mark_store, pass_build_cgraph_edges::execute): Likewise.
* tree-ssa-live.c (mark_all_vars_used_1, remove_unused_scope_block_p,
remove_unused_locals): Likewise.
* tree-ssa-alias.c (ptr_deref_may_alias_decl_p, ptrs_compare_unequal,
ref_maybe_used_by_call_p_1, call_may_clobber_ref_p_1): Likewise.
* function.c (instantiate_expr, instantiate_decls_1,
setjmp_vars_warning, add_local_decl): Likewise.
* alias.c (ao_ref_from_mem, get_alias_set, compare_base_symbol_refs):
Likewise.
* tree-stdarg.c (find_va_list_reference, va_list_counter_struct_op,
va_list_ptr_read, va_list_ptr_write, check_all_va_list_escapes,
optimize_va_list_gpr_fpr_size): Likewise.
* tree-nrv.c (pass_nrv::execute): Likewise.
* tsan.c (instrument_expr): Likewise.
* tree-ssa-dce.c (remove_dead_stmt): Likewise.
* vtable-verify.c (verify_bb_vtables): Likewise.
* tree-dfa.c (ssa_default_def, set_ssa_default_def,
get_ref_base_and_extent): Likewise.
* toplev.c (wrapup_global_declaration_1, wrapup_global_declaration_2):
Likewise.
* tree-sra.c (static bool constant_decl_p, find_var_candidates,
analyze_all_variable_accesses): Likewise.
* tree-nested.c (get_nonlocal_debug_decl,
convert_nonlocal_omp_clauses, note_nonlocal_vla_type,
note_nonlocal_block_vlas, convert_nonlocal_reference_stmt,
get_local_debug_decl, convert_local_omp_clauses,
convert_local_reference_stmt, nesting_copy_decl, remap_vla_decls):
Likewise.
* tree-vect-data-refs.c (vect_can_force_dr_alignment_p): Likewise.
* stmt.c (decl_overlaps_hard_reg_set_p): Likewise.
* dbxout.c (dbxout_late_global_decl, dbxout_type_fields,
dbxout_symbol, dbxout_common_check): Likewise.
* expr.c (expand_assignment, expand_expr_real_2, expand_expr_real_1,
string_constant): Likewise.
* hsa.c (hsa_get_declaration_name): Likewise.
* passes.c (rest_of_decl_compilation): Likewise.
* tree-ssanames.c (make_ssa_name_fn): Likewise.
* tree-streamer-out.c (pack_ts_decl_common_value_fields,
pack_ts_decl_with_vis_value_fields,
write_ts_decl_common_tree_pointers): Likewise.
* stor-layout.c (place_field): Likewise.
* symtab.c (symtab_node::maybe_create_reference,
symtab_node::verify_base, symtab_node::make_decl_local,
symtab_node::copy_visibility_from,
symtab_node::can_increase_alignment_p): Likewise.
* dwarf2out.c (add_var_loc_to_decl, tls_mem_loc_descriptor,
decl_by_reference_p, reference_to_unused, rtl_for_decl_location,
fortran_common, add_location_or_const_value_attribute,
add_scalar_info, add_linkage_name, set_block_abstract_flags,
local_function_static, gen_variable_die, dwarf2out_late_global_decl,
optimize_one_addr_into_implicit_ptr,
optimize_location_into_implicit_ptr): Likewise.
* gimple-low.c (record_vars_into): Likewise.
* ipa-visibility.c (update_vtable_references): Likewise.
* tree-ssa-address.c (fixed_address_object_p, copy_ref_info):
Likewise.
* lto-streamer-out.c (tree_is_indexable, get_symbol_initial_value,
DFS::DFS_write_tree_body, write_symbol): Likewise.
* langhooks.c (lhd_warn_unused_global_decl,
lhd_set_decl_assembler_name): Likewise.
* attribs.c (decl_attributes): Likewise.
* except.c (output_ttype): Likewise.
* varpool.c (varpool_node::get_create, ctor_for_folding,
varpool_node::assemble_decl, varpool_node::create_alias): Likewise.
* fold-const.c (fold_unary_loc): Likewise.
* ipa-prop.c (ipa_compute_jump_functions_for_edge,
ipa_find_agg_cst_from_init): Likewise.
* omp-low.c (expand_omp_regimplify_p, expand_omp_taskreg,
expand_omp_target, lower_omp_regimplify_p,
grid_reg_assignment_to_local_var_p, grid_remap_prebody_decls,
find_link_var_op): Likewise.
* tree-chrec.c (chrec_contains_symbols): Likewise.
* tree-cfg.c (verify_address, verify_expr, verify_expr_location_1,
gimple_duplicate_bb, move_stmt_op, replace_block_vars_by_duplicates,
execute_fixup_cfg): Likewise.
From-SVN: r240900
2016-10-09 13:19:48 +02:00
|
|
|
if (!VAR_P (decl)
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
/* TLS vars aren't statically protectable. */
|
|
|
|
|| DECL_THREAD_LOCAL_P (decl)
|
|
|
|
/* Externs will be protected elsewhere. */
|
|
|
|
|| DECL_EXTERNAL (decl)
|
|
|
|
|| !DECL_RTL_SET_P (decl)
|
|
|
|
/* Comdat vars pose an ABI problem, we can't know if
|
|
|
|
the var that is selected by the linker will have
|
|
|
|
padding or not. */
|
|
|
|
|| DECL_ONE_ONLY (decl)
|
2014-12-02 19:58:59 +01:00
|
|
|
/* Similarly for common vars. People can use -fno-common.
|
|
|
|
Note: Linux kernel is built with -fno-common, so we do instrument
|
|
|
|
globals there even if it is C. */
|
2012-12-11 13:06:07 +01:00
|
|
|
|| (DECL_COMMON (decl) && TREE_PUBLIC (decl))
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
/* Don't protect if using user section, often vars placed
|
|
|
|
into user section from multiple TUs are then assumed
|
|
|
|
to be an array of such vars, putting padding in there
|
|
|
|
breaks this assumption. */
|
symtab.c (section_hash): New hash.
* symtab.c (section_hash): New hash.
(symtab_unregister_node): Clear section before freeing.
(hash_section_hash_entry): New haser.
(eq_sections): New function.
(symtab_node::set_section_for_node): New method.
(set_section_1): Update.
(symtab_node::set_section): Take string instead of tree as parameter.
(symtab_resolve_alias): Update.
* cgraph.h (section_hash_entry_d): New structure.
(section_hash_entry): New typedef.
(cgraph_node): Change comdat_group_ to x_comdat_group,
change section_ to x_section and turn into section_hash_entry;
update accestors; put set_section_for_node offline.
* tree.c (decl_section_name): Turn into string.
(set_decl_section_name): Change parameter to be string.
* tree.h (decl_section_name, set_decl_section_name): Update prototypes.
* sdbout.c (sdbout_one_type): Update.
* tree-vect-data-refs.c (vect_can_force_dr_alignment_p): Update.
* varasm.c (IN_NAMED_SECTION, get_named_section, resolve_unique_section,
hot_function_section, get_named_text_section, USE_SELECT_SECTION_FOR_FUNCTIONS,
default_function_rodata_section, make_decl_rtl, default_unique_section):
Update.
* config/c6x/c6x.c (c6x_in_small_data_p): Update.
(c6x_elf_unique_section): Update.
* config/nios2/nios2.c (nios2_in_small_data_p): Update.
* config/pa/pa.c (pa_function_section): Update.
* config/pa/pa.h (IN_NAMED_SECTION_P): Update.
* config/ia64/ia64.c (ia64_in_small_data_p): Update.
* config/arc/arc.c (arc_in_small_data_p): Update.
* config/arm/unknown-elf.h (IN_NAMED_SECTION_P): Update.
* config/mcore/mcore.c (mcore_unique_section): Update.
* config/mips/mips.c (mips16_build_function_stub): Update.
(mips16_build_call_stub): Update.
(mips_function_rodata_section): Update.
(mips_in_small_data_p): Update.
* config/score/score.c (score_in_small_data_p): Update.
* config/rx/rx.c (rx_in_small_data): Update.
* config/rs6000/rs6000.c (rs6000_elf_in_small_data_p): Update.
(rs6000_xcoff_asm_named_section): Update.
(rs6000_xcoff_unique_section): Update.
* config/frv/frv.c (frv_string_begins_with): Update.
(frv_in_small_data_p): Update.
* config/v850/v850.c (v850_encode_data_area): Update.
* config/bfin/bfin.c (DECL_SECTION_NAME): Update.
(bfin_handle_l1_data_attribute): Update.
(bfin_handle_l2_attribute): Update.
* config/mep/mep.c (mep_unique_section): Update.
* config/microblaze/microblaze.c (microblaze_elf_in_small_data_p): Update.
* config/h8300/h8300.c (h8300_handle_eightbit_data_attribute): Update.
(h8300_handle_tiny_data_attribute): Update.
* config/m32r/m32r.c (m32r_in_small_data_p): Update.
(m32r_in_small_data_p): Update.
* config/alpha/alpha.c (alpha_in_small_data_p): Update.
* config/i386/i386.c (ix86_in_large_data_p): Update.
* config/i386/winnt.c (i386_pe_unique_section): Update.
* config/darwin.c (darwin_function_section): Update.
* config/lm32/lm32.c (lm32_in_small_data_p): Update.
* tree-emutls.c (get_emutls_init_templ_addr): Update.
(new_emutls_decl): Update.
* lto-cgraph.c (lto_output_node, input_node, input_varpool_node,
input_varpool_node): Update.
(ead_string_cst): Turn to ...
(read_string): ... this one.
* dwarf2out.c (secname_for_decl): Update.
* asan.c (asan_protect_global): Update.
* c-family/c-common.c (handle_section_attribute): Update handling for
section names that are no longer trees.
* java/class.c (build_utf8_ref): Update handling for section names
that are no longer trees.
(emit_register_classes_in_jcr_section): Update.
* vtable-class-hierarchy.c: Update handling for section names
that are no longer trees.
* decl.c (duplicate_decls): Likewise.
* gcc-interface/utils.c: Update handling for section names
that are no longer trees.
From-SVN: r211489
2014-06-12 06:03:49 +02:00
|
|
|
|| (DECL_SECTION_NAME (decl) != NULL
|
2015-04-17 09:51:02 +02:00
|
|
|
&& !symtab_node::get (decl)->implicit_section
|
|
|
|
&& !section_sanitized_p (DECL_SECTION_NAME (decl)))
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|| DECL_SIZE (decl) == 0
|
|
|
|
|| ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
|
|
|
|
|| !valid_constant_size_p (DECL_SIZE_UNIT (decl))
|
2014-10-06 09:44:13 +02:00
|
|
|
|| DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
|
2016-12-02 08:39:27 +01:00
|
|
|
|| TREE_TYPE (decl) == ubsan_get_source_location_type ()
|
|
|
|
|| is_odr_indicator (decl))
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
return false;
|
|
|
|
|
|
|
|
rtl = DECL_RTL (decl);
|
|
|
|
if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
|
|
|
|
return false;
|
|
|
|
symbol = XEXP (rtl, 0);
|
|
|
|
|
|
|
|
if (CONSTANT_POOL_ADDRESS_P (symbol)
|
|
|
|
|| TREE_CONSTANT_POOL_ADDRESS_P (symbol))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
#ifndef ASM_OUTPUT_DEF
|
|
|
|
if (asan_needs_local_alias (decl))
|
|
|
|
return false;
|
|
|
|
#endif
|
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
return true;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
}
|
|
|
|
|
2014-05-30 20:37:05 +02:00
|
|
|
/* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
|
|
|
|
IS_STORE is either 1 (for a store) or 0 (for a load). */
|
2012-11-12 16:51:13 +01:00
|
|
|
|
|
|
|
static tree
|
2014-10-28 11:33:04 +01:00
|
|
|
report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
|
|
|
|
int *nargs)
|
2012-11-12 16:51:13 +01:00
|
|
|
{
|
2014-10-28 11:33:04 +01:00
|
|
|
static enum built_in_function report[2][2][6]
|
|
|
|
= { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
|
|
|
|
BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
|
|
|
|
BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
|
|
|
|
{ BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
|
|
|
|
BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
|
|
|
|
BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
|
|
|
|
{ { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
|
|
|
|
BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
|
|
|
|
BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
|
|
|
|
BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
|
|
|
|
BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
|
|
|
|
BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
|
|
|
|
{ BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
|
|
|
|
BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
|
|
|
|
BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
|
|
|
|
BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
|
|
|
|
BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
|
|
|
|
BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
|
2014-06-16 10:43:47 +02:00
|
|
|
if (size_in_bytes == -1)
|
|
|
|
{
|
|
|
|
*nargs = 2;
|
2014-10-28 11:33:04 +01:00
|
|
|
return builtin_decl_implicit (report[recover_p][is_store][5]);
|
2014-06-16 10:43:47 +02:00
|
|
|
}
|
|
|
|
*nargs = 1;
|
2014-10-28 11:33:04 +01:00
|
|
|
int size_log2 = exact_log2 (size_in_bytes);
|
|
|
|
return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
|
2012-11-12 16:51:13 +01:00
|
|
|
}
|
|
|
|
|
2014-06-16 10:43:47 +02:00
|
|
|
/* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
|
|
|
|
IS_STORE is either 1 (for a store) or 0 (for a load). */
|
|
|
|
|
|
|
|
static tree
|
2014-10-28 11:33:04 +01:00
|
|
|
check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
|
|
|
|
int *nargs)
|
2014-06-16 10:43:47 +02:00
|
|
|
{
|
2014-10-28 11:33:04 +01:00
|
|
|
static enum built_in_function check[2][2][6]
|
|
|
|
= { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
|
|
|
|
BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
|
|
|
|
BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
|
|
|
|
{ BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
|
|
|
|
BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
|
|
|
|
BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
|
|
|
|
{ { BUILT_IN_ASAN_LOAD1_NOABORT,
|
|
|
|
BUILT_IN_ASAN_LOAD2_NOABORT,
|
|
|
|
BUILT_IN_ASAN_LOAD4_NOABORT,
|
|
|
|
BUILT_IN_ASAN_LOAD8_NOABORT,
|
|
|
|
BUILT_IN_ASAN_LOAD16_NOABORT,
|
|
|
|
BUILT_IN_ASAN_LOADN_NOABORT },
|
|
|
|
{ BUILT_IN_ASAN_STORE1_NOABORT,
|
|
|
|
BUILT_IN_ASAN_STORE2_NOABORT,
|
|
|
|
BUILT_IN_ASAN_STORE4_NOABORT,
|
|
|
|
BUILT_IN_ASAN_STORE8_NOABORT,
|
|
|
|
BUILT_IN_ASAN_STORE16_NOABORT,
|
|
|
|
BUILT_IN_ASAN_STOREN_NOABORT } } };
|
2014-06-16 10:43:47 +02:00
|
|
|
if (size_in_bytes == -1)
|
|
|
|
{
|
|
|
|
*nargs = 2;
|
2014-10-28 11:33:04 +01:00
|
|
|
return builtin_decl_implicit (check[recover_p][is_store][5]);
|
2014-06-16 10:43:47 +02:00
|
|
|
}
|
|
|
|
*nargs = 1;
|
2014-10-28 11:33:04 +01:00
|
|
|
int size_log2 = exact_log2 (size_in_bytes);
|
|
|
|
return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
|
2014-06-16 10:43:47 +02:00
|
|
|
}
|
|
|
|
|
2012-11-12 16:53:12 +01:00
|
|
|
/* Split the current basic block and create a condition statement
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
insertion point right before or after the statement pointed to by
|
|
|
|
ITER. Return an iterator to the point at which the caller might
|
|
|
|
safely insert the condition statement.
|
2012-11-12 16:53:12 +01:00
|
|
|
|
|
|
|
THEN_BLOCK must be set to the address of an uninitialized instance
|
|
|
|
of basic_block. The function will then set *THEN_BLOCK to the
|
|
|
|
'then block' of the condition statement to be inserted by the
|
|
|
|
caller.
|
|
|
|
|
2013-02-13 21:47:39 +01:00
|
|
|
If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
|
|
|
|
*THEN_BLOCK to *FALLTHROUGH_BLOCK.
|
|
|
|
|
2012-11-12 16:53:12 +01:00
|
|
|
Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
|
|
|
|
block' of the condition statement to be inserted by the caller.
|
|
|
|
|
|
|
|
Note that *FALLTHROUGH_BLOCK is a new block that contains the
|
|
|
|
statements starting from *ITER, and *THEN_BLOCK is a new empty
|
|
|
|
block.
|
|
|
|
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
*ITER is adjusted to point to always point to the first statement
|
|
|
|
of the basic block * FALLTHROUGH_BLOCK. That statement is the
|
|
|
|
same as what ITER was pointing to prior to calling this function,
|
|
|
|
if BEFORE_P is true; otherwise, it is its following statement. */
|
2012-11-12 16:53:12 +01:00
|
|
|
|
2013-12-20 10:05:04 +01:00
|
|
|
gimple_stmt_iterator
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
create_cond_insert_point (gimple_stmt_iterator *iter,
|
|
|
|
bool before_p,
|
|
|
|
bool then_more_likely_p,
|
2013-02-13 21:47:39 +01:00
|
|
|
bool create_then_fallthru_edge,
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
basic_block *then_block,
|
|
|
|
basic_block *fallthrough_block)
|
2012-11-12 16:53:12 +01:00
|
|
|
{
|
|
|
|
gimple_stmt_iterator gsi = *iter;
|
|
|
|
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
if (!gsi_end_p (gsi) && before_p)
|
2012-11-12 16:53:12 +01:00
|
|
|
gsi_prev (&gsi);
|
|
|
|
|
|
|
|
basic_block cur_bb = gsi_bb (*iter);
|
|
|
|
|
|
|
|
edge e = split_block (cur_bb, gsi_stmt (gsi));
|
|
|
|
|
|
|
|
/* Get a hold on the 'condition block', the 'then block' and the
|
|
|
|
'else block'. */
|
|
|
|
basic_block cond_bb = e->src;
|
|
|
|
basic_block fallthru_bb = e->dest;
|
|
|
|
basic_block then_bb = create_empty_bb (cond_bb);
|
2013-04-26 10:01:19 +02:00
|
|
|
if (current_loops)
|
|
|
|
{
|
|
|
|
add_bb_to_loop (then_bb, cond_bb->loop_father);
|
|
|
|
loops_state_set (LOOPS_NEED_FIXUP);
|
|
|
|
}
|
2012-11-12 16:53:12 +01:00
|
|
|
|
|
|
|
/* Set up the newly created 'then block'. */
|
|
|
|
e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
|
|
|
|
int fallthrough_probability
|
|
|
|
= then_more_likely_p
|
|
|
|
? PROB_VERY_UNLIKELY
|
|
|
|
: PROB_ALWAYS - PROB_VERY_UNLIKELY;
|
|
|
|
e->probability = PROB_ALWAYS - fallthrough_probability;
|
2013-02-13 21:47:39 +01:00
|
|
|
if (create_then_fallthru_edge)
|
|
|
|
make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
|
2012-11-12 16:53:12 +01:00
|
|
|
|
|
|
|
/* Set up the fallthrough basic block. */
|
|
|
|
e = find_edge (cond_bb, fallthru_bb);
|
|
|
|
e->flags = EDGE_FALSE_VALUE;
|
|
|
|
e->count = cond_bb->count;
|
|
|
|
e->probability = fallthrough_probability;
|
|
|
|
|
|
|
|
/* Update dominance info for the newly created then_bb; note that
|
|
|
|
fallthru_bb's dominance info has already been updated by
|
|
|
|
split_bock. */
|
|
|
|
if (dom_info_available_p (CDI_DOMINATORS))
|
|
|
|
set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
|
|
|
|
|
|
|
|
*then_block = then_bb;
|
|
|
|
*fallthrough_block = fallthru_bb;
|
|
|
|
*iter = gsi_start_bb (fallthru_bb);
|
|
|
|
|
|
|
|
return gsi_last_bb (cond_bb);
|
|
|
|
}
|
|
|
|
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
/* Insert an if condition followed by a 'then block' right before the
|
|
|
|
statement pointed to by ITER. The fallthrough block -- which is the
|
|
|
|
else block of the condition as well as the destination of the
|
|
|
|
outcoming edge of the 'then block' -- starts with the statement
|
|
|
|
pointed to by ITER.
|
|
|
|
|
2012-11-12 17:18:59 +01:00
|
|
|
COND is the condition of the if.
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
|
|
|
|
If THEN_MORE_LIKELY_P is true, the probability of the edge to the
|
|
|
|
'then block' is higher than the probability of the edge to the
|
|
|
|
fallthrough block.
|
|
|
|
|
|
|
|
Upon completion of the function, *THEN_BB is set to the newly
|
|
|
|
inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
|
|
|
|
fallthrough block.
|
|
|
|
|
|
|
|
*ITER is adjusted to still point to the same statement it was
|
|
|
|
pointing to initially. */
|
|
|
|
|
|
|
|
static void
|
2014-11-19 18:00:54 +01:00
|
|
|
insert_if_then_before_iter (gcond *cond,
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
gimple_stmt_iterator *iter,
|
|
|
|
bool then_more_likely_p,
|
|
|
|
basic_block *then_bb,
|
|
|
|
basic_block *fallthrough_bb)
|
|
|
|
{
|
|
|
|
gimple_stmt_iterator cond_insert_point =
|
|
|
|
create_cond_insert_point (iter,
|
|
|
|
/*before_p=*/true,
|
|
|
|
then_more_likely_p,
|
2013-02-13 21:47:39 +01:00
|
|
|
/*create_then_fallthru_edge=*/true,
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
then_bb,
|
|
|
|
fallthrough_bb);
|
|
|
|
gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
|
|
|
|
}
|
|
|
|
|
2016-11-07 11:23:38 +01:00
|
|
|
/* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
|
|
|
|
If RETURN_ADDRESS is set to true, return memory location instread
|
|
|
|
of a value in the shadow memory. */
|
2014-05-30 20:37:05 +02:00
|
|
|
|
|
|
|
static tree
|
|
|
|
build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
|
2016-11-07 11:23:38 +01:00
|
|
|
tree base_addr, tree shadow_ptr_type,
|
|
|
|
bool return_address = false)
|
2014-05-30 20:37:05 +02:00
|
|
|
{
|
|
|
|
tree t, uintptr_type = TREE_TYPE (base_addr);
|
|
|
|
tree shadow_type = TREE_TYPE (shadow_ptr_type);
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *g;
|
2014-05-30 20:37:05 +02:00
|
|
|
|
|
|
|
t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
|
gimple.h (gimple_build_assign_stat): Remove prototype.
* gimple.h (gimple_build_assign_stat): Remove prototype.
(gimple_build_assign): Remove define. Add overload prototypes
with tree lhs and either a tree rhs, or enum tree_code and
1, 2 or 3 tree operands.
* gimple.c (gimple_build_assign_stat): Renamed to...
(gimple_build_assign): ... this. Add overloads with
enum tree_code and 1, 2 or 3 tree operands.
(gimple_build_assign_with_ops): Remove 1 and 2 operand overloads.
Rename the 3 operand overload to ...
(gimple_build_assign_1): ... this. Make it static inline.
* tree-ssa-strlen.c (get_string_length): Use gimple_build_assign
instead of gimple_build_assign_with_ops, swap the order of first
two arguments and adjust formatting where necessary.
* tree-vect-slp.c (vect_get_constant_vectors,
vect_create_mask_and_perm): Likewise.
* tree-ssa-forwprop.c (simplify_rotate): Likewise.
* asan.c (build_shadow_mem_access, maybe_create_ssa_name,
maybe_cast_to_ptrmode, asan_expand_check_ifn): Likewise.
* tsan.c (instrument_builtin_call): Likewise.
* tree-chkp.c (chkp_compute_bounds_for_assignment,
chkp_generate_extern_var_bounds): Likewise.
* tree-loop-distribution.c (generate_memset_builtin): Likewise.
* tree-ssa-loop-im.c (rewrite_reciprocal): Likewise.
* gimple-builder.c (build_assign, build_type_cast): Likewise.
* tree-vect-loop-manip.c (vect_create_cond_for_align_checks): Likewise.
* value-prof.c (gimple_divmod_fixed_value, gimple_mod_pow2,
gimple_mod_subtract): Likewise.
* gimple-match-head.c (maybe_push_res_to_seq): Likewise.
* tree-vect-patterns.c (vect_recog_dot_prod_pattern,
vect_recog_sad_pattern, vect_handle_widen_op_by_const,
vect_recog_widen_mult_pattern, vect_recog_pow_pattern,
vect_recog_widen_sum_pattern, vect_operation_fits_smaller_type,
vect_recog_over_widening_pattern, vect_recog_widen_shift_pattern,
vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
adjust_bool_pattern_cast, adjust_bool_pattern,
vect_recog_bool_pattern): Likewise.
* gimple-ssa-strength-reduction.c (create_add_on_incoming_edge,
insert_initializers, introduce_cast_before_cand,
replace_one_candidate): Likewise.
* tree-ssa-math-opts.c (insert_reciprocals, powi_as_mults_1,
powi_as_mults, build_and_insert_binop, build_and_insert_cast,
pass_cse_sincos::execute, bswap_replace, convert_mult_to_fma):
Likewise.
* tree-tailcall.c (adjust_return_value_with_ops,
update_accumulator_with_ops): Likewise.
* tree-predcom.c (reassociate_to_the_same_stmt): Likewise.
* tree-ssa-reassoc.c (build_and_add_sum,
optimize_range_tests_to_bit_test, update_ops,
maybe_optimize_range_tests, rewrite_expr_tree, linearize_expr,
negate_value, repropagate_negates, attempt_builtin_powi,
reassociate_bb): Likewise.
* tree-vect-loop.c (vect_is_simple_reduction_1,
get_initial_def_for_induction, vect_create_epilog_for_reduction):
Likewise.
* ipa-split.c (split_function): Likewise.
* tree-ssa-phiopt.c (conditional_replacement, minmax_replacement,
abs_replacement, neg_replacement): Likewise.
* tree-profile.c (gimple_gen_edge_profiler): Likewise.
* tree-vrp.c (simplify_truth_ops_using_ranges,
simplify_float_conversion_using_ranges,
simplify_internal_call_using_ranges): Likewise.
* gimple-fold.c (rewrite_to_defined_overflow, gimple_build): Likewise.
* tree-vect-generic.c (expand_vector_divmod,
optimize_vector_constructor): Likewise.
* ubsan.c (ubsan_expand_null_ifn, ubsan_expand_objsize_ifn,
instrument_bool_enum_load): Likewise.
* tree-ssa-loop-manip.c (create_iv): Likewise.
* omp-low.c (lower_rec_input_clauses, expand_omp_for_generic,
expand_omp_for_static_nochunk, expand_omp_for_static_chunk,
expand_cilk_for, simd_clone_adjust): Likewise.
* trans-mem.c (expand_transaction): Likewise.
* tree-vect-data-refs.c (bump_vector_ptr, vect_permute_store_chain,
vect_setup_realignment, vect_permute_load_chain,
vect_shift_permute_load_chain): Likewise.
* tree-vect-stmts.c (vect_init_vector, vectorizable_mask_load_store,
vectorizable_simd_clone_call, vect_gen_widened_results_half,
vect_create_vectorized_demotion_stmts, vectorizable_conversion,
vectorizable_shift, vectorizable_operation, vectorizable_store,
permute_vec_elements, vectorizable_load): Likewise.
From-SVN: r218216
2014-12-01 14:58:10 +01:00
|
|
|
g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
|
|
|
|
base_addr, t);
|
2014-05-30 20:37:05 +02:00
|
|
|
gimple_set_location (g, location);
|
|
|
|
gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
|
|
|
|
2014-10-28 10:46:29 +01:00
|
|
|
t = build_int_cst (uintptr_type, asan_shadow_offset ());
|
gimple.h (gimple_build_assign_stat): Remove prototype.
* gimple.h (gimple_build_assign_stat): Remove prototype.
(gimple_build_assign): Remove define. Add overload prototypes
with tree lhs and either a tree rhs, or enum tree_code and
1, 2 or 3 tree operands.
* gimple.c (gimple_build_assign_stat): Renamed to...
(gimple_build_assign): ... this. Add overloads with
enum tree_code and 1, 2 or 3 tree operands.
(gimple_build_assign_with_ops): Remove 1 and 2 operand overloads.
Rename the 3 operand overload to ...
(gimple_build_assign_1): ... this. Make it static inline.
* tree-ssa-strlen.c (get_string_length): Use gimple_build_assign
instead of gimple_build_assign_with_ops, swap the order of first
two arguments and adjust formatting where necessary.
* tree-vect-slp.c (vect_get_constant_vectors,
vect_create_mask_and_perm): Likewise.
* tree-ssa-forwprop.c (simplify_rotate): Likewise.
* asan.c (build_shadow_mem_access, maybe_create_ssa_name,
maybe_cast_to_ptrmode, asan_expand_check_ifn): Likewise.
* tsan.c (instrument_builtin_call): Likewise.
* tree-chkp.c (chkp_compute_bounds_for_assignment,
chkp_generate_extern_var_bounds): Likewise.
* tree-loop-distribution.c (generate_memset_builtin): Likewise.
* tree-ssa-loop-im.c (rewrite_reciprocal): Likewise.
* gimple-builder.c (build_assign, build_type_cast): Likewise.
* tree-vect-loop-manip.c (vect_create_cond_for_align_checks): Likewise.
* value-prof.c (gimple_divmod_fixed_value, gimple_mod_pow2,
gimple_mod_subtract): Likewise.
* gimple-match-head.c (maybe_push_res_to_seq): Likewise.
* tree-vect-patterns.c (vect_recog_dot_prod_pattern,
vect_recog_sad_pattern, vect_handle_widen_op_by_const,
vect_recog_widen_mult_pattern, vect_recog_pow_pattern,
vect_recog_widen_sum_pattern, vect_operation_fits_smaller_type,
vect_recog_over_widening_pattern, vect_recog_widen_shift_pattern,
vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
adjust_bool_pattern_cast, adjust_bool_pattern,
vect_recog_bool_pattern): Likewise.
* gimple-ssa-strength-reduction.c (create_add_on_incoming_edge,
insert_initializers, introduce_cast_before_cand,
replace_one_candidate): Likewise.
* tree-ssa-math-opts.c (insert_reciprocals, powi_as_mults_1,
powi_as_mults, build_and_insert_binop, build_and_insert_cast,
pass_cse_sincos::execute, bswap_replace, convert_mult_to_fma):
Likewise.
* tree-tailcall.c (adjust_return_value_with_ops,
update_accumulator_with_ops): Likewise.
* tree-predcom.c (reassociate_to_the_same_stmt): Likewise.
* tree-ssa-reassoc.c (build_and_add_sum,
optimize_range_tests_to_bit_test, update_ops,
maybe_optimize_range_tests, rewrite_expr_tree, linearize_expr,
negate_value, repropagate_negates, attempt_builtin_powi,
reassociate_bb): Likewise.
* tree-vect-loop.c (vect_is_simple_reduction_1,
get_initial_def_for_induction, vect_create_epilog_for_reduction):
Likewise.
* ipa-split.c (split_function): Likewise.
* tree-ssa-phiopt.c (conditional_replacement, minmax_replacement,
abs_replacement, neg_replacement): Likewise.
* tree-profile.c (gimple_gen_edge_profiler): Likewise.
* tree-vrp.c (simplify_truth_ops_using_ranges,
simplify_float_conversion_using_ranges,
simplify_internal_call_using_ranges): Likewise.
* gimple-fold.c (rewrite_to_defined_overflow, gimple_build): Likewise.
* tree-vect-generic.c (expand_vector_divmod,
optimize_vector_constructor): Likewise.
* ubsan.c (ubsan_expand_null_ifn, ubsan_expand_objsize_ifn,
instrument_bool_enum_load): Likewise.
* tree-ssa-loop-manip.c (create_iv): Likewise.
* omp-low.c (lower_rec_input_clauses, expand_omp_for_generic,
expand_omp_for_static_nochunk, expand_omp_for_static_chunk,
expand_cilk_for, simd_clone_adjust): Likewise.
* trans-mem.c (expand_transaction): Likewise.
* tree-vect-data-refs.c (bump_vector_ptr, vect_permute_store_chain,
vect_setup_realignment, vect_permute_load_chain,
vect_shift_permute_load_chain): Likewise.
* tree-vect-stmts.c (vect_init_vector, vectorizable_mask_load_store,
vectorizable_simd_clone_call, vect_gen_widened_results_half,
vect_create_vectorized_demotion_stmts, vectorizable_conversion,
vectorizable_shift, vectorizable_operation, vectorizable_store,
permute_vec_elements, vectorizable_load): Likewise.
From-SVN: r218216
2014-12-01 14:58:10 +01:00
|
|
|
g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
|
|
|
|
gimple_assign_lhs (g), t);
|
2014-05-30 20:37:05 +02:00
|
|
|
gimple_set_location (g, location);
|
|
|
|
gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
|
|
|
|
gimple.h (gimple_build_assign_stat): Remove prototype.
* gimple.h (gimple_build_assign_stat): Remove prototype.
(gimple_build_assign): Remove define. Add overload prototypes
with tree lhs and either a tree rhs, or enum tree_code and
1, 2 or 3 tree operands.
* gimple.c (gimple_build_assign_stat): Renamed to...
(gimple_build_assign): ... this. Add overloads with
enum tree_code and 1, 2 or 3 tree operands.
(gimple_build_assign_with_ops): Remove 1 and 2 operand overloads.
Rename the 3 operand overload to ...
(gimple_build_assign_1): ... this. Make it static inline.
* tree-ssa-strlen.c (get_string_length): Use gimple_build_assign
instead of gimple_build_assign_with_ops, swap the order of first
two arguments and adjust formatting where necessary.
* tree-vect-slp.c (vect_get_constant_vectors,
vect_create_mask_and_perm): Likewise.
* tree-ssa-forwprop.c (simplify_rotate): Likewise.
* asan.c (build_shadow_mem_access, maybe_create_ssa_name,
maybe_cast_to_ptrmode, asan_expand_check_ifn): Likewise.
* tsan.c (instrument_builtin_call): Likewise.
* tree-chkp.c (chkp_compute_bounds_for_assignment,
chkp_generate_extern_var_bounds): Likewise.
* tree-loop-distribution.c (generate_memset_builtin): Likewise.
* tree-ssa-loop-im.c (rewrite_reciprocal): Likewise.
* gimple-builder.c (build_assign, build_type_cast): Likewise.
* tree-vect-loop-manip.c (vect_create_cond_for_align_checks): Likewise.
* value-prof.c (gimple_divmod_fixed_value, gimple_mod_pow2,
gimple_mod_subtract): Likewise.
* gimple-match-head.c (maybe_push_res_to_seq): Likewise.
* tree-vect-patterns.c (vect_recog_dot_prod_pattern,
vect_recog_sad_pattern, vect_handle_widen_op_by_const,
vect_recog_widen_mult_pattern, vect_recog_pow_pattern,
vect_recog_widen_sum_pattern, vect_operation_fits_smaller_type,
vect_recog_over_widening_pattern, vect_recog_widen_shift_pattern,
vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
adjust_bool_pattern_cast, adjust_bool_pattern,
vect_recog_bool_pattern): Likewise.
* gimple-ssa-strength-reduction.c (create_add_on_incoming_edge,
insert_initializers, introduce_cast_before_cand,
replace_one_candidate): Likewise.
* tree-ssa-math-opts.c (insert_reciprocals, powi_as_mults_1,
powi_as_mults, build_and_insert_binop, build_and_insert_cast,
pass_cse_sincos::execute, bswap_replace, convert_mult_to_fma):
Likewise.
* tree-tailcall.c (adjust_return_value_with_ops,
update_accumulator_with_ops): Likewise.
* tree-predcom.c (reassociate_to_the_same_stmt): Likewise.
* tree-ssa-reassoc.c (build_and_add_sum,
optimize_range_tests_to_bit_test, update_ops,
maybe_optimize_range_tests, rewrite_expr_tree, linearize_expr,
negate_value, repropagate_negates, attempt_builtin_powi,
reassociate_bb): Likewise.
* tree-vect-loop.c (vect_is_simple_reduction_1,
get_initial_def_for_induction, vect_create_epilog_for_reduction):
Likewise.
* ipa-split.c (split_function): Likewise.
* tree-ssa-phiopt.c (conditional_replacement, minmax_replacement,
abs_replacement, neg_replacement): Likewise.
* tree-profile.c (gimple_gen_edge_profiler): Likewise.
* tree-vrp.c (simplify_truth_ops_using_ranges,
simplify_float_conversion_using_ranges,
simplify_internal_call_using_ranges): Likewise.
* gimple-fold.c (rewrite_to_defined_overflow, gimple_build): Likewise.
* tree-vect-generic.c (expand_vector_divmod,
optimize_vector_constructor): Likewise.
* ubsan.c (ubsan_expand_null_ifn, ubsan_expand_objsize_ifn,
instrument_bool_enum_load): Likewise.
* tree-ssa-loop-manip.c (create_iv): Likewise.
* omp-low.c (lower_rec_input_clauses, expand_omp_for_generic,
expand_omp_for_static_nochunk, expand_omp_for_static_chunk,
expand_cilk_for, simd_clone_adjust): Likewise.
* trans-mem.c (expand_transaction): Likewise.
* tree-vect-data-refs.c (bump_vector_ptr, vect_permute_store_chain,
vect_setup_realignment, vect_permute_load_chain,
vect_shift_permute_load_chain): Likewise.
* tree-vect-stmts.c (vect_init_vector, vectorizable_mask_load_store,
vectorizable_simd_clone_call, vect_gen_widened_results_half,
vect_create_vectorized_demotion_stmts, vectorizable_conversion,
vectorizable_shift, vectorizable_operation, vectorizable_store,
permute_vec_elements, vectorizable_load): Likewise.
From-SVN: r218216
2014-12-01 14:58:10 +01:00
|
|
|
g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
|
|
|
|
gimple_assign_lhs (g));
|
2014-05-30 20:37:05 +02:00
|
|
|
gimple_set_location (g, location);
|
|
|
|
gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
|
|
|
|
2016-11-07 11:23:38 +01:00
|
|
|
if (!return_address)
|
|
|
|
{
|
|
|
|
t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
|
|
|
|
build_int_cst (shadow_ptr_type, 0));
|
|
|
|
g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
|
|
|
|
gimple_set_location (g, location);
|
|
|
|
gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
|
|
|
}
|
|
|
|
|
2014-05-30 20:37:05 +02:00
|
|
|
return gimple_assign_lhs (g);
|
|
|
|
}
|
|
|
|
|
2014-06-16 10:43:47 +02:00
|
|
|
/* BASE can already be an SSA_NAME; in that case, do not create a
|
|
|
|
new SSA_NAME for it. */
|
|
|
|
|
|
|
|
static tree
|
|
|
|
maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
|
|
|
|
bool before_p)
|
|
|
|
{
|
|
|
|
if (TREE_CODE (base) == SSA_NAME)
|
|
|
|
return base;
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
|
gimple.h (gimple_build_assign_stat): Remove prototype.
* gimple.h (gimple_build_assign_stat): Remove prototype.
(gimple_build_assign): Remove define. Add overload prototypes
with tree lhs and either a tree rhs, or enum tree_code and
1, 2 or 3 tree operands.
* gimple.c (gimple_build_assign_stat): Renamed to...
(gimple_build_assign): ... this. Add overloads with
enum tree_code and 1, 2 or 3 tree operands.
(gimple_build_assign_with_ops): Remove 1 and 2 operand overloads.
Rename the 3 operand overload to ...
(gimple_build_assign_1): ... this. Make it static inline.
* tree-ssa-strlen.c (get_string_length): Use gimple_build_assign
instead of gimple_build_assign_with_ops, swap the order of first
two arguments and adjust formatting where necessary.
* tree-vect-slp.c (vect_get_constant_vectors,
vect_create_mask_and_perm): Likewise.
* tree-ssa-forwprop.c (simplify_rotate): Likewise.
* asan.c (build_shadow_mem_access, maybe_create_ssa_name,
maybe_cast_to_ptrmode, asan_expand_check_ifn): Likewise.
* tsan.c (instrument_builtin_call): Likewise.
* tree-chkp.c (chkp_compute_bounds_for_assignment,
chkp_generate_extern_var_bounds): Likewise.
* tree-loop-distribution.c (generate_memset_builtin): Likewise.
* tree-ssa-loop-im.c (rewrite_reciprocal): Likewise.
* gimple-builder.c (build_assign, build_type_cast): Likewise.
* tree-vect-loop-manip.c (vect_create_cond_for_align_checks): Likewise.
* value-prof.c (gimple_divmod_fixed_value, gimple_mod_pow2,
gimple_mod_subtract): Likewise.
* gimple-match-head.c (maybe_push_res_to_seq): Likewise.
* tree-vect-patterns.c (vect_recog_dot_prod_pattern,
vect_recog_sad_pattern, vect_handle_widen_op_by_const,
vect_recog_widen_mult_pattern, vect_recog_pow_pattern,
vect_recog_widen_sum_pattern, vect_operation_fits_smaller_type,
vect_recog_over_widening_pattern, vect_recog_widen_shift_pattern,
vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
adjust_bool_pattern_cast, adjust_bool_pattern,
vect_recog_bool_pattern): Likewise.
* gimple-ssa-strength-reduction.c (create_add_on_incoming_edge,
insert_initializers, introduce_cast_before_cand,
replace_one_candidate): Likewise.
* tree-ssa-math-opts.c (insert_reciprocals, powi_as_mults_1,
powi_as_mults, build_and_insert_binop, build_and_insert_cast,
pass_cse_sincos::execute, bswap_replace, convert_mult_to_fma):
Likewise.
* tree-tailcall.c (adjust_return_value_with_ops,
update_accumulator_with_ops): Likewise.
* tree-predcom.c (reassociate_to_the_same_stmt): Likewise.
* tree-ssa-reassoc.c (build_and_add_sum,
optimize_range_tests_to_bit_test, update_ops,
maybe_optimize_range_tests, rewrite_expr_tree, linearize_expr,
negate_value, repropagate_negates, attempt_builtin_powi,
reassociate_bb): Likewise.
* tree-vect-loop.c (vect_is_simple_reduction_1,
get_initial_def_for_induction, vect_create_epilog_for_reduction):
Likewise.
* ipa-split.c (split_function): Likewise.
* tree-ssa-phiopt.c (conditional_replacement, minmax_replacement,
abs_replacement, neg_replacement): Likewise.
* tree-profile.c (gimple_gen_edge_profiler): Likewise.
* tree-vrp.c (simplify_truth_ops_using_ranges,
simplify_float_conversion_using_ranges,
simplify_internal_call_using_ranges): Likewise.
* gimple-fold.c (rewrite_to_defined_overflow, gimple_build): Likewise.
* tree-vect-generic.c (expand_vector_divmod,
optimize_vector_constructor): Likewise.
* ubsan.c (ubsan_expand_null_ifn, ubsan_expand_objsize_ifn,
instrument_bool_enum_load): Likewise.
* tree-ssa-loop-manip.c (create_iv): Likewise.
* omp-low.c (lower_rec_input_clauses, expand_omp_for_generic,
expand_omp_for_static_nochunk, expand_omp_for_static_chunk,
expand_cilk_for, simd_clone_adjust): Likewise.
* trans-mem.c (expand_transaction): Likewise.
* tree-vect-data-refs.c (bump_vector_ptr, vect_permute_store_chain,
vect_setup_realignment, vect_permute_load_chain,
vect_shift_permute_load_chain): Likewise.
* tree-vect-stmts.c (vect_init_vector, vectorizable_mask_load_store,
vectorizable_simd_clone_call, vect_gen_widened_results_half,
vect_create_vectorized_demotion_stmts, vectorizable_conversion,
vectorizable_shift, vectorizable_operation, vectorizable_store,
permute_vec_elements, vectorizable_load): Likewise.
From-SVN: r218216
2014-12-01 14:58:10 +01:00
|
|
|
TREE_CODE (base), base);
|
2014-06-16 10:43:47 +02:00
|
|
|
gimple_set_location (g, loc);
|
|
|
|
if (before_p)
|
|
|
|
gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
|
|
else
|
|
|
|
gsi_insert_after (iter, g, GSI_NEW_STMT);
|
|
|
|
return gimple_assign_lhs (g);
|
|
|
|
}
|
|
|
|
|
2014-09-01 09:47:37 +02:00
|
|
|
/* LEN can already have necessary size and precision;
|
|
|
|
in that case, do not create a new variable. */
|
|
|
|
|
|
|
|
tree
|
|
|
|
maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
|
|
|
|
bool before_p)
|
|
|
|
{
|
|
|
|
if (ptrofftype_p (len))
|
|
|
|
return len;
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
gimple.h (gimple_build_assign_stat): Remove prototype.
* gimple.h (gimple_build_assign_stat): Remove prototype.
(gimple_build_assign): Remove define. Add overload prototypes
with tree lhs and either a tree rhs, or enum tree_code and
1, 2 or 3 tree operands.
* gimple.c (gimple_build_assign_stat): Renamed to...
(gimple_build_assign): ... this. Add overloads with
enum tree_code and 1, 2 or 3 tree operands.
(gimple_build_assign_with_ops): Remove 1 and 2 operand overloads.
Rename the 3 operand overload to ...
(gimple_build_assign_1): ... this. Make it static inline.
* tree-ssa-strlen.c (get_string_length): Use gimple_build_assign
instead of gimple_build_assign_with_ops, swap the order of first
two arguments and adjust formatting where necessary.
* tree-vect-slp.c (vect_get_constant_vectors,
vect_create_mask_and_perm): Likewise.
* tree-ssa-forwprop.c (simplify_rotate): Likewise.
* asan.c (build_shadow_mem_access, maybe_create_ssa_name,
maybe_cast_to_ptrmode, asan_expand_check_ifn): Likewise.
* tsan.c (instrument_builtin_call): Likewise.
* tree-chkp.c (chkp_compute_bounds_for_assignment,
chkp_generate_extern_var_bounds): Likewise.
* tree-loop-distribution.c (generate_memset_builtin): Likewise.
* tree-ssa-loop-im.c (rewrite_reciprocal): Likewise.
* gimple-builder.c (build_assign, build_type_cast): Likewise.
* tree-vect-loop-manip.c (vect_create_cond_for_align_checks): Likewise.
* value-prof.c (gimple_divmod_fixed_value, gimple_mod_pow2,
gimple_mod_subtract): Likewise.
* gimple-match-head.c (maybe_push_res_to_seq): Likewise.
* tree-vect-patterns.c (vect_recog_dot_prod_pattern,
vect_recog_sad_pattern, vect_handle_widen_op_by_const,
vect_recog_widen_mult_pattern, vect_recog_pow_pattern,
vect_recog_widen_sum_pattern, vect_operation_fits_smaller_type,
vect_recog_over_widening_pattern, vect_recog_widen_shift_pattern,
vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
adjust_bool_pattern_cast, adjust_bool_pattern,
vect_recog_bool_pattern): Likewise.
* gimple-ssa-strength-reduction.c (create_add_on_incoming_edge,
insert_initializers, introduce_cast_before_cand,
replace_one_candidate): Likewise.
* tree-ssa-math-opts.c (insert_reciprocals, powi_as_mults_1,
powi_as_mults, build_and_insert_binop, build_and_insert_cast,
pass_cse_sincos::execute, bswap_replace, convert_mult_to_fma):
Likewise.
* tree-tailcall.c (adjust_return_value_with_ops,
update_accumulator_with_ops): Likewise.
* tree-predcom.c (reassociate_to_the_same_stmt): Likewise.
* tree-ssa-reassoc.c (build_and_add_sum,
optimize_range_tests_to_bit_test, update_ops,
maybe_optimize_range_tests, rewrite_expr_tree, linearize_expr,
negate_value, repropagate_negates, attempt_builtin_powi,
reassociate_bb): Likewise.
* tree-vect-loop.c (vect_is_simple_reduction_1,
get_initial_def_for_induction, vect_create_epilog_for_reduction):
Likewise.
* ipa-split.c (split_function): Likewise.
* tree-ssa-phiopt.c (conditional_replacement, minmax_replacement,
abs_replacement, neg_replacement): Likewise.
* tree-profile.c (gimple_gen_edge_profiler): Likewise.
* tree-vrp.c (simplify_truth_ops_using_ranges,
simplify_float_conversion_using_ranges,
simplify_internal_call_using_ranges): Likewise.
* gimple-fold.c (rewrite_to_defined_overflow, gimple_build): Likewise.
* tree-vect-generic.c (expand_vector_divmod,
optimize_vector_constructor): Likewise.
* ubsan.c (ubsan_expand_null_ifn, ubsan_expand_objsize_ifn,
instrument_bool_enum_load): Likewise.
* tree-ssa-loop-manip.c (create_iv): Likewise.
* omp-low.c (lower_rec_input_clauses, expand_omp_for_generic,
expand_omp_for_static_nochunk, expand_omp_for_static_chunk,
expand_cilk_for, simd_clone_adjust): Likewise.
* trans-mem.c (expand_transaction): Likewise.
* tree-vect-data-refs.c (bump_vector_ptr, vect_permute_store_chain,
vect_setup_realignment, vect_permute_load_chain,
vect_shift_permute_load_chain): Likewise.
* tree-vect-stmts.c (vect_init_vector, vectorizable_mask_load_store,
vectorizable_simd_clone_call, vect_gen_widened_results_half,
vect_create_vectorized_demotion_stmts, vectorizable_conversion,
vectorizable_shift, vectorizable_operation, vectorizable_store,
permute_vec_elements, vectorizable_load): Likewise.
From-SVN: r218216
2014-12-01 14:58:10 +01:00
|
|
|
NOP_EXPR, len);
|
2014-09-01 09:47:37 +02:00
|
|
|
gimple_set_location (g, loc);
|
|
|
|
if (before_p)
|
|
|
|
gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
|
|
else
|
|
|
|
gsi_insert_after (iter, g, GSI_NEW_STMT);
|
|
|
|
return gimple_assign_lhs (g);
|
|
|
|
}
|
|
|
|
|
2012-11-12 16:52:56 +01:00
|
|
|
/* Instrument the memory access instruction BASE. Insert new
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
statements before or after ITER.
|
2012-11-12 16:52:56 +01:00
|
|
|
|
|
|
|
Note that the memory access represented by BASE can be either an
|
|
|
|
SSA_NAME, or a non-SSA expression. LOCATION is the source code
|
|
|
|
location. IS_STORE is TRUE for a store, FALSE for a load.
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
BEFORE_P is TRUE for inserting the instrumentation code before
|
2014-06-16 10:43:47 +02:00
|
|
|
ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
|
|
|
|
for a scalar memory access and FALSE for memory region access.
|
|
|
|
NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
|
|
|
|
length. ALIGN tells alignment of accessed memory object.
|
|
|
|
|
|
|
|
START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
|
|
|
|
memory region have already been instrumented.
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
|
|
|
|
If BEFORE_P is TRUE, *ITER is arranged to still point to the
|
|
|
|
statement it was pointing to prior to calling this function,
|
|
|
|
otherwise, it points to the statement logically following it. */
|
2012-11-12 16:51:13 +01:00
|
|
|
|
|
|
|
static void
|
2014-08-11 08:12:12 +02:00
|
|
|
build_check_stmt (location_t loc, tree base, tree len,
|
2014-06-16 10:43:47 +02:00
|
|
|
HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
|
2014-08-11 08:12:12 +02:00
|
|
|
bool is_non_zero_len, bool before_p, bool is_store,
|
2014-10-28 13:36:54 +01:00
|
|
|
bool is_scalar_access, unsigned int align = 0)
|
2012-11-12 16:51:13 +01:00
|
|
|
{
|
2014-06-16 10:43:47 +02:00
|
|
|
gimple_stmt_iterator gsi = *iter;
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *g;
|
2014-06-16 10:43:47 +02:00
|
|
|
|
2014-08-11 08:12:12 +02:00
|
|
|
gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
|
2014-06-16 10:43:47 +02:00
|
|
|
|
2014-08-11 08:12:12 +02:00
|
|
|
gsi = *iter;
|
|
|
|
|
|
|
|
base = unshare_expr (base);
|
|
|
|
base = maybe_create_ssa_name (loc, base, &gsi, before_p);
|
|
|
|
|
2014-06-16 10:43:47 +02:00
|
|
|
if (len)
|
2014-09-01 09:47:37 +02:00
|
|
|
{
|
|
|
|
len = unshare_expr (len);
|
|
|
|
len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
|
|
|
|
}
|
2014-06-16 10:43:47 +02:00
|
|
|
else
|
|
|
|
{
|
|
|
|
gcc_assert (size_in_bytes != -1);
|
|
|
|
len = build_int_cst (pointer_sized_int_node, size_in_bytes);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (size_in_bytes > 1)
|
2014-05-30 20:37:59 +02:00
|
|
|
{
|
2014-06-16 10:43:47 +02:00
|
|
|
if ((size_in_bytes & (size_in_bytes - 1)) != 0
|
|
|
|
|| size_in_bytes > 16)
|
2014-08-11 08:12:12 +02:00
|
|
|
is_scalar_access = false;
|
2014-06-16 10:43:47 +02:00
|
|
|
else if (align && align < size_in_bytes * BITS_PER_UNIT)
|
|
|
|
{
|
|
|
|
/* On non-strict alignment targets, if
|
|
|
|
16-byte access is just 8-byte aligned,
|
|
|
|
this will result in misaligned shadow
|
|
|
|
memory 2 byte load, but otherwise can
|
|
|
|
be handled using one read. */
|
|
|
|
if (size_in_bytes != 16
|
|
|
|
|| STRICT_ALIGNMENT
|
|
|
|
|| align < 8 * BITS_PER_UNIT)
|
2014-08-11 08:12:12 +02:00
|
|
|
is_scalar_access = false;
|
2014-05-30 20:37:05 +02:00
|
|
|
}
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
}
|
2012-11-12 16:51:13 +01:00
|
|
|
|
2014-08-11 08:12:12 +02:00
|
|
|
HOST_WIDE_INT flags = 0;
|
|
|
|
if (is_store)
|
|
|
|
flags |= ASAN_CHECK_STORE;
|
|
|
|
if (is_non_zero_len)
|
|
|
|
flags |= ASAN_CHECK_NON_ZERO_LEN;
|
|
|
|
if (is_scalar_access)
|
|
|
|
flags |= ASAN_CHECK_SCALAR_ACCESS;
|
|
|
|
|
2014-09-19 10:29:04 +02:00
|
|
|
g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
|
2014-08-11 08:12:12 +02:00
|
|
|
build_int_cst (integer_type_node, flags),
|
2014-09-19 10:29:04 +02:00
|
|
|
base, len,
|
|
|
|
build_int_cst (integer_type_node,
|
|
|
|
align / BITS_PER_UNIT));
|
2014-08-11 08:12:12 +02:00
|
|
|
gimple_set_location (g, loc);
|
|
|
|
if (before_p)
|
|
|
|
gsi_insert_before (&gsi, g, GSI_SAME_STMT);
|
2014-06-16 10:43:47 +02:00
|
|
|
else
|
|
|
|
{
|
|
|
|
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
|
2014-08-11 08:12:12 +02:00
|
|
|
gsi_next (&gsi);
|
|
|
|
*iter = gsi;
|
2014-06-16 10:43:47 +02:00
|
|
|
}
|
2012-11-12 16:51:13 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* If T represents a memory access, add instrumentation code before ITER.
|
|
|
|
LOCATION is source code location.
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
IS_STORE is either TRUE (for a store) or FALSE (for a load). */
|
2012-11-12 16:51:13 +01:00
|
|
|
|
|
|
|
static void
|
|
|
|
instrument_derefs (gimple_stmt_iterator *iter, tree t,
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
location_t location, bool is_store)
|
2012-11-12 16:51:13 +01:00
|
|
|
{
|
2014-01-09 08:31:05 +01:00
|
|
|
if (is_store && !ASAN_INSTRUMENT_WRITES)
|
|
|
|
return;
|
|
|
|
if (!is_store && !ASAN_INSTRUMENT_READS)
|
|
|
|
return;
|
|
|
|
|
2012-11-12 16:51:13 +01:00
|
|
|
tree type, base;
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
HOST_WIDE_INT size_in_bytes;
|
2016-04-08 12:46:13 +02:00
|
|
|
if (location == UNKNOWN_LOCATION)
|
|
|
|
location = EXPR_LOCATION (t);
|
2012-11-12 16:51:13 +01:00
|
|
|
|
|
|
|
type = TREE_TYPE (t);
|
|
|
|
switch (TREE_CODE (t))
|
|
|
|
{
|
|
|
|
case ARRAY_REF:
|
|
|
|
case COMPONENT_REF:
|
|
|
|
case INDIRECT_REF:
|
|
|
|
case MEM_REF:
|
2013-11-22 21:04:45 +01:00
|
|
|
case VAR_DECL:
|
2014-10-13 12:44:45 +02:00
|
|
|
case BIT_FIELD_REF:
|
2012-11-12 16:51:13 +01:00
|
|
|
break;
|
2013-11-22 21:04:45 +01:00
|
|
|
/* FALLTHRU */
|
2012-11-12 16:51:13 +01:00
|
|
|
default:
|
|
|
|
return;
|
|
|
|
}
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
|
|
|
|
size_in_bytes = int_size_in_bytes (type);
|
2014-05-30 20:37:05 +02:00
|
|
|
if (size_in_bytes <= 0)
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
return;
|
|
|
|
|
|
|
|
HOST_WIDE_INT bitsize, bitpos;
|
|
|
|
tree offset;
|
decl.c, [...]: Remove redundant enum from machine_mode.
gcc/ada/
* gcc-interface/decl.c, gcc-interface/gigi.h, gcc-interface/misc.c,
gcc-interface/trans.c, gcc-interface/utils.c, gcc-interface/utils2.c:
Remove redundant enum from machine_mode.
gcc/c-family/
* c-common.c, c-common.h, c-cppbuiltin.c, c-lex.c: Remove redundant
enum from machine_mode.
gcc/c/
* c-decl.c, c-tree.h, c-typeck.c: Remove redundant enum from
machine_mode.
gcc/cp/
* constexpr.c: Remove redundant enum from machine_mode.
gcc/fortran/
* trans-types.c, trans-types.h: Remove redundant enum from
machine_mode.
gcc/go/
* go-lang.c: Remove redundant enum from machine_mode.
gcc/java/
* builtins.c, java-tree.h, typeck.c: Remove redundant enum from
machine_mode.
gcc/lto/
* lto-lang.c: Remove redundant enum from machine_mode.
gcc/
* addresses.h, alias.c, asan.c, auto-inc-dec.c, bt-load.c, builtins.c,
builtins.h, caller-save.c, calls.c, calls.h, cfgexpand.c, cfgloop.h,
cfgrtl.c, combine.c, compare-elim.c, config/aarch64/aarch64-builtins.c,
config/aarch64/aarch64-protos.h, config/aarch64/aarch64-simd.md,
config/aarch64/aarch64.c, config/aarch64/aarch64.h,
config/aarch64/aarch64.md, config/alpha/alpha-protos.h,
config/alpha/alpha.c, config/arc/arc-protos.h, config/arc/arc.c,
config/arc/arc.h, config/arc/predicates.md,
config/arm/aarch-common-protos.h, config/arm/aarch-common.c,
config/arm/arm-protos.h, config/arm/arm.c, config/arm/arm.h,
config/arm/arm.md, config/arm/neon.md, config/arm/thumb2.md,
config/avr/avr-log.c, config/avr/avr-protos.h, config/avr/avr.c,
config/avr/avr.md, config/bfin/bfin-protos.h, config/bfin/bfin.c,
config/c6x/c6x-protos.h, config/c6x/c6x.c, config/c6x/c6x.md,
config/cr16/cr16-protos.h, config/cr16/cr16.c,
config/cris/cris-protos.h, config/cris/cris.c, config/cris/cris.md,
config/darwin-protos.h, config/darwin.c,
config/epiphany/epiphany-protos.h, config/epiphany/epiphany.c,
config/epiphany/epiphany.md, config/fr30/fr30.c,
config/frv/frv-protos.h, config/frv/frv.c, config/frv/predicates.md,
config/h8300/h8300-protos.h, config/h8300/h8300.c,
config/i386/i386-builtin-types.awk, config/i386/i386-protos.h,
config/i386/i386.c, config/i386/i386.md, config/i386/predicates.md,
config/i386/sse.md, config/i386/sync.md, config/ia64/ia64-protos.h,
config/ia64/ia64.c, config/iq2000/iq2000-protos.h,
config/iq2000/iq2000.c, config/iq2000/iq2000.md,
config/lm32/lm32-protos.h, config/lm32/lm32.c,
config/m32c/m32c-protos.h, config/m32c/m32c.c,
config/m32r/m32r-protos.h, config/m32r/m32r.c,
config/m68k/m68k-protos.h, config/m68k/m68k.c,
config/mcore/mcore-protos.h, config/mcore/mcore.c,
config/mcore/mcore.md, config/mep/mep-protos.h, config/mep/mep.c,
config/microblaze/microblaze-protos.h, config/microblaze/microblaze.c,
config/mips/mips-protos.h, config/mips/mips.c,
config/mmix/mmix-protos.h, config/mmix/mmix.c,
config/mn10300/mn10300-protos.h, config/mn10300/mn10300.c,
config/moxie/moxie.c, config/msp430/msp430-protos.h,
config/msp430/msp430.c, config/nds32/nds32-cost.c,
config/nds32/nds32-intrinsic.c, config/nds32/nds32-md-auxiliary.c,
config/nds32/nds32-protos.h, config/nds32/nds32.c,
config/nios2/nios2-protos.h, config/nios2/nios2.c,
config/pa/pa-protos.h, config/pa/pa.c, config/pdp11/pdp11-protos.h,
config/pdp11/pdp11.c, config/rl78/rl78-protos.h, config/rl78/rl78.c,
config/rs6000/altivec.md, config/rs6000/rs6000-c.c,
config/rs6000/rs6000-protos.h, config/rs6000/rs6000.c,
config/rs6000/rs6000.h, config/rx/rx-protos.h, config/rx/rx.c,
config/s390/predicates.md, config/s390/s390-protos.h,
config/s390/s390.c, config/s390/s390.h, config/s390/s390.md,
config/sh/predicates.md, config/sh/sh-protos.h, config/sh/sh.c,
config/sh/sh.md, config/sparc/predicates.md,
config/sparc/sparc-protos.h, config/sparc/sparc.c,
config/sparc/sparc.md, config/spu/spu-protos.h, config/spu/spu.c,
config/stormy16/stormy16-protos.h, config/stormy16/stormy16.c,
config/tilegx/tilegx-protos.h, config/tilegx/tilegx.c,
config/tilegx/tilegx.md, config/tilepro/tilepro-protos.h,
config/tilepro/tilepro.c, config/v850/v850-protos.h,
config/v850/v850.c, config/v850/v850.md, config/vax/vax-protos.h,
config/vax/vax.c, config/vms/vms-c.c, config/xtensa/xtensa-protos.h,
config/xtensa/xtensa.c, coverage.c, cprop.c, cse.c, cselib.c, cselib.h,
dbxout.c, ddg.c, df-problems.c, dfp.c, dfp.h, doc/md.texi,
doc/rtl.texi, doc/tm.texi, doc/tm.texi.in, dojump.c, dse.c,
dwarf2cfi.c, dwarf2out.c, dwarf2out.h, emit-rtl.c, emit-rtl.h,
except.c, explow.c, expmed.c, expmed.h, expr.c, expr.h, final.c,
fixed-value.c, fixed-value.h, fold-const.c, function.c, function.h,
fwprop.c, gcse.c, gengenrtl.c, genmodes.c, genopinit.c, genoutput.c,
genpreds.c, genrecog.c, gensupport.c, gimple-ssa-strength-reduction.c,
graphite-clast-to-gimple.c, haifa-sched.c, hooks.c, hooks.h, ifcvt.c,
internal-fn.c, ira-build.c, ira-color.c, ira-conflicts.c, ira-costs.c,
ira-emit.c, ira-int.h, ira-lives.c, ira.c, ira.h, jump.c, langhooks.h,
libfuncs.h, lists.c, loop-doloop.c, loop-invariant.c, loop-iv.c,
loop-unroll.c, lower-subreg.c, lower-subreg.h, lra-assigns.c,
lra-constraints.c, lra-eliminations.c, lra-int.h, lra-lives.c,
lra-spills.c, lra.c, lra.h, machmode.h, omp-low.c, optabs.c, optabs.h,
output.h, postreload.c, print-tree.c, read-rtl.c, real.c, real.h,
recog.c, recog.h, ree.c, reg-stack.c, regcprop.c, reginfo.c,
regrename.c, regs.h, reload.c, reload.h, reload1.c, rtl.c, rtl.h,
rtlanal.c, rtlhash.c, rtlhooks-def.h, rtlhooks.c, sched-deps.c,
sel-sched-dump.c, sel-sched-ir.c, sel-sched-ir.h, sel-sched.c,
simplify-rtx.c, stmt.c, stor-layout.c, stor-layout.h, target.def,
targhooks.c, targhooks.h, tree-affine.c, tree-call-cdce.c,
tree-complex.c, tree-data-ref.c, tree-dfa.c, tree-if-conv.c,
tree-inline.c, tree-outof-ssa.c, tree-scalar-evolution.c,
tree-ssa-address.c, tree-ssa-ccp.c, tree-ssa-loop-ivopts.c,
tree-ssa-loop-ivopts.h, tree-ssa-loop-manip.c,
tree-ssa-loop-prefetch.c, tree-ssa-math-opts.c, tree-ssa-reassoc.c,
tree-ssa-sccvn.c, tree-streamer-in.c, tree-switch-conversion.c,
tree-vect-data-refs.c, tree-vect-generic.c, tree-vect-loop.c,
tree-vect-patterns.c, tree-vect-slp.c, tree-vect-stmts.c,
tree-vrp.c, tree.c, tree.h, tsan.c, ubsan.c, valtrack.c,
var-tracking.c, varasm.c: Remove redundant enum from
machine_mode.
gcc/
* gengtype.c (main): Treat machine_mode as a scalar typedef.
* genmodes.c (emit_insn_modes_h): Hide inline functions if
USED_FOR_TARGET.
From-SVN: r216834
2014-10-29 13:02:45 +01:00
|
|
|
machine_mode mode;
|
2015-11-08 19:33:42 +01:00
|
|
|
int unsignedp, reversep, volatilep = 0;
|
|
|
|
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
|
Convert TYPE_ALIGN_OK to a TYPE_LANG_FLAG.
2016-07-11 Bernd Edlinger <bernd.edlinger@hotmail.de>
Convert TYPE_ALIGN_OK to a TYPE_LANG_FLAG.
* tree-core.h (tree_base::nothrow_flag): Adjust comment.
(tree_type_common::lang_flag_7): New.
(tree_type_common::spare): Reduce size.
* tree.h (TYPE_ALIGN_OK): Remove.
(TYPE_LANG_FLAG_7): New.
(get_inner_reference): Adjust header.
* print-tree.c (print_node): Adjust.
* expr.c (get_inner_reference): Remove parameter keep_aligning.
(get_bit_range, expand_assignment, expand_expr_addr_expr_1): Adjust
calls to get_inner_reference.
(expand_expr_real_1): Adjust call to get_inner_reference. Remove
handling of TYPE_ALIGN_OK.
* builtins.c (get_object_alignment_2): Adjust call to
get_inner_reference. Remove handling of VIEW_CONVERT_EXPR.
* emit-rtl.c (set_mem_attributes_minus_bitpos): Remove handling of
TYPE_ALIGN_OK.
* asan.c (instrument_derefs): Adjust calls to get_inner_reference.
* cfgexpand.c (expand_debug_expr): Likewise.
* dbxout.c (dbxout_expand_expr): Likewise.
* dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref,
loc_list_from_tree, fortran_common): Likewise.
* fold-const.c (optimize_bit_field_compare,
decode_field_reference, fold_unary_loc, fold_comparison,
split_address_to_core_and_offset): Likewise.
* gimple-laddress.c (execute): Likewise.
* gimple-ssa-strength-reduction.c (slsr_process_ref): Likewise.
* gimplify.c (gimplify_scan_omp_clauses): Likewise.
* hsa-gen.c (gen_hsa_addr): Likewise.
* simplifx-rtx.c (delegitimize_mem_from_attrs): Likewise.
* tsan.c (instrument_expr): Likewise.
* ubsan.c (instrument_bool_enum_load, instrument_object_size): Likewise.
* tree.c (verify_type_variant): Remove handling of TYPE_ALIGN_OK.
* tree-affine.c (tree_to_aff_combination,
get_inner_reference_aff): Adjust calls to get_inner_reference.
* tree-data-ref.c (split_constant_offset_1,
dr_analyze_innermost): Likewise.
* tree-scalar-evolution.c (interpret_rhs_expr): Likewise.
* tree-sra.c (ipa_sra_check_caller): Likewise.
* tree-ssa-loop-ivopts.c (split_address_cost): Likewise.
* tree-ssa-math-opts.c (find_bswap_or_nop_load,
bswap_replace): Likewise.
* tree-vect-data-refs.c (vect_check_gather,
vect_analyze_data_refs): Likewise.
* config/mips/mips.c (r10k_safe_mem_expr_p): Likewise.
* config/pa/pa.c (pa_emit_move_sequence): Remove handling of
TYPE_ALIGN_OK.
ada:
2016-07-11 Bernd Edlinger <bernd.edlinger@hotmail.de>
Convert TYPE_ALIGN_OK to a TYPE_LANG_FLAG.
* gcc-interface/ada-tree.h (TYPE_ALIGN_OK): Define.
* gcc-interface/trans.c (Attribute_to_gnu): Adjust call to
get_inner_reference.
* gcc-interface/utils2.c (build_unary_op): Likewise.
From-SVN: r238210
2016-07-11 17:02:12 +02:00
|
|
|
&unsignedp, &reversep, &volatilep);
|
2014-08-18 10:23:47 +02:00
|
|
|
|
|
|
|
if (TREE_CODE (t) == COMPONENT_REF
|
|
|
|
&& DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
|
2012-12-10 08:44:18 +01:00
|
|
|
{
|
2014-08-18 10:23:47 +02:00
|
|
|
tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
|
|
|
|
instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
|
|
|
|
TREE_OPERAND (t, 0), repr,
|
|
|
|
NULL_TREE), location, is_store);
|
2012-12-10 08:44:18 +01:00
|
|
|
return;
|
|
|
|
}
|
2014-08-18 10:23:47 +02:00
|
|
|
|
|
|
|
if (bitpos % BITS_PER_UNIT
|
|
|
|
|| bitsize != size_in_bytes * BITS_PER_UNIT)
|
2014-05-30 20:37:05 +02:00
|
|
|
return;
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
|
tree-ssa.c (target_for_debug_bind, [...]): Use VAR_P and/or VAR_OR_FUNCTION_DECL_P macros.
* tree-ssa.c (target_for_debug_bind, verify_phi_args,
ssa_undefined_value_p, maybe_optimize_var): Use VAR_P and/or
VAR_OR_FUNCTION_DECL_P macros.
* tree-chkp.c (chkp_register_var_initializer, chkp_make_static_bounds,
chkp_get_bounds_for_decl_addr, chkp_parse_array_and_component_ref,
chkp_find_bounds_1): Likewise.
* ipa-polymorphic-call.c (decl_maybe_in_construction_p): Likewise.
* hsa-gen.c (get_symbol_for_decl): Likewise.
* cgraphunit.c (check_global_declaration, analyze_functions,
handle_alias_pairs, thunk_adjust, cgraph_node::expand_thunk):
Likewise.
* gimple-fold.c (can_refer_decl_in_current_unit_p,
canonicalize_constructor_val, gimple_get_virt_method_for_vtable):
Likewise.
* tree.c (set_decl_section_name, copy_node_stat,
need_assembler_name_p, free_lang_data_in_decl, find_decls_types_r,
merge_dllimport_decl_attributes, handle_dll_attribute,
decl_init_priority_insert, auto_var_in_fn_p, array_at_struct_end_p,
verify_type): Likewise.
* gimple-ssa-isolate-paths.c (find_implicit_erroneous_behavior,
find_explicit_erroneous_behavior): Likewise.
* sdbout.c (sdbout_toplevel_data, sdbout_late_global_decl): Likewise.
* ipa.c (process_references): Likewise.
* tree-chkp-opt.c (chkp_get_check_result): Likewise.
* varasm.c (get_block_for_decl, use_blocks_for_decl_p, make_decl_rtl,
notice_global_symbol, assemble_variable, mark_decl_referenced,
build_constant_desc, output_constant_def_contents, do_assemble_alias,
make_decl_one_only, default_section_type_flags,
categorize_decl_for_section, default_encode_section_info): Likewise.
* trans-mem.c (requires_barrier): Likewise.
* gimple-expr.c (mark_addressable): Likewise.
* cfgexpand.c (add_scope_conflicts_1, expand_one_var,
expand_used_vars_for_block, clear_tree_used, stack_protect_decl_p,
expand_debug_expr): Likewise.
* tree-dump.c (dequeue_and_dump): Likewise.
* ubsan.c (instrument_bool_enum_load): Likewise.
* tree-pretty-print.c (print_declaration): Likewise.
* simplify-rtx.c (delegitimize_mem_from_attrs): Likewise.
* tree-ssa-uninit.c (warn_uninitialized_vars): Likewise.
* asan.c (asan_protect_global, instrument_derefs): Likewise.
* tree-into-ssa.c (rewrite_stmt, maybe_register_def,
pass_build_ssa::execute): Likewise.
* var-tracking.c (var_debug_decl, track_expr_p): Likewise.
* tree-ssa-loop-ivopts.c (force_expr_to_var_cost, split_address_cost):
Likewise.
* ipa-split.c (test_nonssa_use, consider_split, mark_nonssa_use):
Likewise.
* tree-inline.c (insert_debug_decl_map, remap_ssa_name,
can_be_nonlocal, remap_decls, copy_debug_stmt,
initialize_inlined_parameters, add_local_variables,
reset_debug_binding, replace_locals_op): Likewise.
* dse.c (can_escape): Likewise.
* ipa-devirt.c (compare_virtual_tables, referenced_from_vtable_p):
Likewise.
* tree-diagnostic.c (default_tree_printer): Likewise.
* tree-streamer-in.c (unpack_ts_decl_common_value_fields,
unpack_ts_decl_with_vis_value_fields,
lto_input_ts_decl_common_tree_pointers): Likewise.
* builtins.c (builtin_save_expr, fold_builtin_expect,
readonly_data_expr): Likewise.
* tree-ssa-structalias.c (new_var_info, get_constraint_for_ssa_var,
create_variable_info_for, set_uids_in_ptset, visit_loadstore):
Likewise.
* gimple-streamer-out.c (output_gimple_stmt): Likewise.
* gimplify.c (force_constant_size, gimplify_bind_expr,
gimplify_decl_expr, gimplify_var_or_parm_decl,
gimplify_compound_lval, gimplify_init_constructor,
gimplify_modify_expr, gimplify_asm_expr, gimplify_oacc_declare,
gimplify_type_sizes): Likewise.
* cgraphbuild.c (record_reference, record_type_list, mark_address,
mark_load, mark_store, pass_build_cgraph_edges::execute): Likewise.
* tree-ssa-live.c (mark_all_vars_used_1, remove_unused_scope_block_p,
remove_unused_locals): Likewise.
* tree-ssa-alias.c (ptr_deref_may_alias_decl_p, ptrs_compare_unequal,
ref_maybe_used_by_call_p_1, call_may_clobber_ref_p_1): Likewise.
* function.c (instantiate_expr, instantiate_decls_1,
setjmp_vars_warning, add_local_decl): Likewise.
* alias.c (ao_ref_from_mem, get_alias_set, compare_base_symbol_refs):
Likewise.
* tree-stdarg.c (find_va_list_reference, va_list_counter_struct_op,
va_list_ptr_read, va_list_ptr_write, check_all_va_list_escapes,
optimize_va_list_gpr_fpr_size): Likewise.
* tree-nrv.c (pass_nrv::execute): Likewise.
* tsan.c (instrument_expr): Likewise.
* tree-ssa-dce.c (remove_dead_stmt): Likewise.
* vtable-verify.c (verify_bb_vtables): Likewise.
* tree-dfa.c (ssa_default_def, set_ssa_default_def,
get_ref_base_and_extent): Likewise.
* toplev.c (wrapup_global_declaration_1, wrapup_global_declaration_2):
Likewise.
* tree-sra.c (static bool constant_decl_p, find_var_candidates,
analyze_all_variable_accesses): Likewise.
* tree-nested.c (get_nonlocal_debug_decl,
convert_nonlocal_omp_clauses, note_nonlocal_vla_type,
note_nonlocal_block_vlas, convert_nonlocal_reference_stmt,
get_local_debug_decl, convert_local_omp_clauses,
convert_local_reference_stmt, nesting_copy_decl, remap_vla_decls):
Likewise.
* tree-vect-data-refs.c (vect_can_force_dr_alignment_p): Likewise.
* stmt.c (decl_overlaps_hard_reg_set_p): Likewise.
* dbxout.c (dbxout_late_global_decl, dbxout_type_fields,
dbxout_symbol, dbxout_common_check): Likewise.
* expr.c (expand_assignment, expand_expr_real_2, expand_expr_real_1,
string_constant): Likewise.
* hsa.c (hsa_get_declaration_name): Likewise.
* passes.c (rest_of_decl_compilation): Likewise.
* tree-ssanames.c (make_ssa_name_fn): Likewise.
* tree-streamer-out.c (pack_ts_decl_common_value_fields,
pack_ts_decl_with_vis_value_fields,
write_ts_decl_common_tree_pointers): Likewise.
* stor-layout.c (place_field): Likewise.
* symtab.c (symtab_node::maybe_create_reference,
symtab_node::verify_base, symtab_node::make_decl_local,
symtab_node::copy_visibility_from,
symtab_node::can_increase_alignment_p): Likewise.
* dwarf2out.c (add_var_loc_to_decl, tls_mem_loc_descriptor,
decl_by_reference_p, reference_to_unused, rtl_for_decl_location,
fortran_common, add_location_or_const_value_attribute,
add_scalar_info, add_linkage_name, set_block_abstract_flags,
local_function_static, gen_variable_die, dwarf2out_late_global_decl,
optimize_one_addr_into_implicit_ptr,
optimize_location_into_implicit_ptr): Likewise.
* gimple-low.c (record_vars_into): Likewise.
* ipa-visibility.c (update_vtable_references): Likewise.
* tree-ssa-address.c (fixed_address_object_p, copy_ref_info):
Likewise.
* lto-streamer-out.c (tree_is_indexable, get_symbol_initial_value,
DFS::DFS_write_tree_body, write_symbol): Likewise.
* langhooks.c (lhd_warn_unused_global_decl,
lhd_set_decl_assembler_name): Likewise.
* attribs.c (decl_attributes): Likewise.
* except.c (output_ttype): Likewise.
* varpool.c (varpool_node::get_create, ctor_for_folding,
varpool_node::assemble_decl, varpool_node::create_alias): Likewise.
* fold-const.c (fold_unary_loc): Likewise.
* ipa-prop.c (ipa_compute_jump_functions_for_edge,
ipa_find_agg_cst_from_init): Likewise.
* omp-low.c (expand_omp_regimplify_p, expand_omp_taskreg,
expand_omp_target, lower_omp_regimplify_p,
grid_reg_assignment_to_local_var_p, grid_remap_prebody_decls,
find_link_var_op): Likewise.
* tree-chrec.c (chrec_contains_symbols): Likewise.
* tree-cfg.c (verify_address, verify_expr, verify_expr_location_1,
gimple_duplicate_bb, move_stmt_op, replace_block_vars_by_duplicates,
execute_fixup_cfg): Likewise.
From-SVN: r240900
2016-10-09 13:19:48 +02:00
|
|
|
if (VAR_P (inner)
|
2013-11-22 21:04:45 +01:00
|
|
|
&& offset == NULL_TREE
|
|
|
|
&& bitpos >= 0
|
|
|
|
&& DECL_SIZE (inner)
|
|
|
|
&& tree_fits_shwi_p (DECL_SIZE (inner))
|
|
|
|
&& bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
|
|
|
|
{
|
|
|
|
if (DECL_THREAD_LOCAL_P (inner))
|
|
|
|
return;
|
2015-03-11 08:39:20 +01:00
|
|
|
if (!ASAN_GLOBALS && is_global_var (inner))
|
|
|
|
return;
|
2013-11-22 21:04:45 +01:00
|
|
|
if (!TREE_STATIC (inner))
|
|
|
|
{
|
|
|
|
/* Automatic vars in the current function will be always
|
|
|
|
accessible. */
|
2016-11-07 11:23:38 +01:00
|
|
|
if (decl_function_context (inner) == current_function_decl
|
|
|
|
&& (!asan_sanitize_use_after_scope ()
|
|
|
|
|| !TREE_ADDRESSABLE (inner)))
|
2013-11-22 21:04:45 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
/* Always instrument external vars, they might be dynamically
|
|
|
|
initialized. */
|
|
|
|
else if (!DECL_EXTERNAL (inner))
|
|
|
|
{
|
|
|
|
/* For static vars if they are known not to be dynamically
|
|
|
|
initialized, they will be always accessible. */
|
2014-07-24 14:25:27 +02:00
|
|
|
varpool_node *vnode = varpool_node::get (inner);
|
2013-11-22 21:04:45 +01:00
|
|
|
if (vnode && !vnode->dynamically_initialized)
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
base = build_fold_addr_expr (t);
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
if (!has_mem_ref_been_instrumented (base, size_in_bytes))
|
|
|
|
{
|
2014-06-16 10:43:47 +02:00
|
|
|
unsigned int align = get_object_alignment (t);
|
|
|
|
build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
|
2014-08-11 08:12:12 +02:00
|
|
|
/*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
|
2014-06-16 10:43:47 +02:00
|
|
|
is_store, /*is_scalar_access*/true, align);
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
update_mem_ref_hash_table (base, size_in_bytes);
|
|
|
|
update_mem_ref_hash_table (t, size_in_bytes);
|
|
|
|
}
|
|
|
|
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
}
|
|
|
|
|
2014-10-28 13:36:54 +01:00
|
|
|
/* Insert a memory reference into the hash table if access length
|
|
|
|
can be determined in compile time. */
|
|
|
|
|
|
|
|
static void
|
|
|
|
maybe_update_mem_ref_hash_table (tree base, tree len)
|
|
|
|
{
|
|
|
|
if (!POINTER_TYPE_P (TREE_TYPE (base))
|
|
|
|
|| !INTEGRAL_TYPE_P (TREE_TYPE (len)))
|
|
|
|
return;
|
|
|
|
|
|
|
|
HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
|
|
|
|
|
|
|
|
if (size_in_bytes != -1)
|
|
|
|
update_mem_ref_hash_table (base, size_in_bytes);
|
|
|
|
}
|
|
|
|
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
/* Instrument an access to a contiguous memory region that starts at
|
|
|
|
the address pointed to by BASE, over a length of LEN (expressed in
|
|
|
|
the sizeof (*BASE) bytes). ITER points to the instruction before
|
|
|
|
which the instrumentation instructions must be inserted. LOCATION
|
|
|
|
is the source location that the instrumentation instructions must
|
|
|
|
have. If IS_STORE is true, then the memory access is a store;
|
|
|
|
otherwise, it's a load. */
|
|
|
|
|
|
|
|
static void
|
|
|
|
instrument_mem_region_access (tree base, tree len,
|
|
|
|
gimple_stmt_iterator *iter,
|
|
|
|
location_t location, bool is_store)
|
|
|
|
{
|
2012-12-03 16:04:57 +01:00
|
|
|
if (!POINTER_TYPE_P (TREE_TYPE (base))
|
|
|
|
|| !INTEGRAL_TYPE_P (TREE_TYPE (len))
|
|
|
|
|| integer_zerop (len))
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
return;
|
|
|
|
|
2014-06-16 10:43:47 +02:00
|
|
|
HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
2014-10-28 13:36:54 +01:00
|
|
|
if ((size_in_bytes == -1)
|
|
|
|
|| !has_mem_ref_been_instrumented (base, size_in_bytes))
|
|
|
|
{
|
|
|
|
build_check_stmt (location, base, len, size_in_bytes, iter,
|
|
|
|
/*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
|
|
|
|
is_store, /*is_scalar_access*/false, /*align*/0);
|
|
|
|
}
|
2013-02-16 10:32:56 +01:00
|
|
|
|
2014-10-28 13:36:54 +01:00
|
|
|
maybe_update_mem_ref_hash_table (base, len);
|
2013-02-16 10:32:56 +01:00
|
|
|
*iter = gsi_for_stmt (gsi_stmt (*iter));
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
}
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
/* Instrument the call to a built-in memory access function that is
|
|
|
|
pointed to by the iterator ITER.
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
Upon completion, return TRUE iff *ITER has been advanced to the
|
|
|
|
statement following the one it was originally pointing to. */
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
static bool
|
|
|
|
instrument_builtin_call (gimple_stmt_iterator *iter)
|
|
|
|
{
|
2014-01-09 08:31:05 +01:00
|
|
|
if (!ASAN_MEMINTRIN)
|
|
|
|
return false;
|
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
bool iter_advanced_p = false;
|
2014-11-19 18:00:54 +01:00
|
|
|
gcall *call = as_a <gcall *> (gsi_stmt (*iter));
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
location_t loc = gimple_location (call);
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
|
2014-10-28 13:36:54 +01:00
|
|
|
asan_mem_ref src0, src1, dest;
|
|
|
|
asan_mem_ref_init (&src0, NULL, 1);
|
|
|
|
asan_mem_ref_init (&src1, NULL, 1);
|
|
|
|
asan_mem_ref_init (&dest, NULL, 1);
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
2014-10-28 13:36:54 +01:00
|
|
|
tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
|
|
|
|
bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
|
|
|
|
dest_is_deref = false, intercepted_p = true;
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
2014-10-28 13:36:54 +01:00
|
|
|
if (get_mem_refs_of_builtin_call (call,
|
|
|
|
&src0, &src0_len, &src0_is_store,
|
|
|
|
&src1, &src1_len, &src1_is_store,
|
|
|
|
&dest, &dest_len, &dest_is_store,
|
|
|
|
&dest_is_deref, &intercepted_p))
|
|
|
|
{
|
|
|
|
if (dest_is_deref)
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
2014-10-28 13:36:54 +01:00
|
|
|
instrument_derefs (iter, dest.start, loc, dest_is_store);
|
|
|
|
gsi_next (iter);
|
|
|
|
iter_advanced_p = true;
|
|
|
|
}
|
|
|
|
else if (!intercepted_p
|
|
|
|
&& (src0_len || src1_len || dest_len))
|
|
|
|
{
|
|
|
|
if (src0.start != NULL_TREE)
|
|
|
|
instrument_mem_region_access (src0.start, src0_len,
|
|
|
|
iter, loc, /*is_store=*/false);
|
|
|
|
if (src1.start != NULL_TREE)
|
|
|
|
instrument_mem_region_access (src1.start, src1_len,
|
|
|
|
iter, loc, /*is_store=*/false);
|
|
|
|
if (dest.start != NULL_TREE)
|
|
|
|
instrument_mem_region_access (dest.start, dest_len,
|
|
|
|
iter, loc, /*is_store=*/true);
|
|
|
|
|
|
|
|
*iter = gsi_for_stmt (call);
|
|
|
|
gsi_next (iter);
|
|
|
|
iter_advanced_p = true;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if (src0.start != NULL_TREE)
|
|
|
|
maybe_update_mem_ref_hash_table (src0.start, src0_len);
|
|
|
|
if (src1.start != NULL_TREE)
|
|
|
|
maybe_update_mem_ref_hash_table (src1.start, src1_len);
|
|
|
|
if (dest.start != NULL_TREE)
|
|
|
|
maybe_update_mem_ref_hash_table (dest.start, dest_len);
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
}
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
}
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
return iter_advanced_p;
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Instrument the assignment statement ITER if it is subject to
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
instrumentation. Return TRUE iff instrumentation actually
|
|
|
|
happened. In that case, the iterator ITER is advanced to the next
|
|
|
|
logical expression following the one initially pointed to by ITER,
|
|
|
|
and the relevant memory reference that which access has been
|
|
|
|
instrumented is added to the memory references hash table. */
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
static bool
|
|
|
|
maybe_instrument_assignment (gimple_stmt_iterator *iter)
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
{
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *s = gsi_stmt (*iter);
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
|
|
|
|
gcc_assert (gimple_assign_single_p (s));
|
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
tree ref_expr = NULL_TREE;
|
|
|
|
bool is_store, is_instrumented = false;
|
|
|
|
|
2012-12-03 14:57:29 +01:00
|
|
|
if (gimple_store_p (s))
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
|
|
|
ref_expr = gimple_assign_lhs (s);
|
|
|
|
is_store = true;
|
|
|
|
instrument_derefs (iter, ref_expr,
|
|
|
|
gimple_location (s),
|
|
|
|
is_store);
|
|
|
|
is_instrumented = true;
|
|
|
|
}
|
2015-06-01 14:37:26 +02:00
|
|
|
|
2012-12-03 14:57:29 +01:00
|
|
|
if (gimple_assign_load_p (s))
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
{
|
|
|
|
ref_expr = gimple_assign_rhs1 (s);
|
|
|
|
is_store = false;
|
|
|
|
instrument_derefs (iter, ref_expr,
|
|
|
|
gimple_location (s),
|
|
|
|
is_store);
|
|
|
|
is_instrumented = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (is_instrumented)
|
|
|
|
gsi_next (iter);
|
|
|
|
|
|
|
|
return is_instrumented;
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Instrument the function call pointed to by the iterator ITER, if it
|
|
|
|
is subject to instrumentation. At the moment, the only function
|
|
|
|
calls that are instrumented are some built-in functions that access
|
|
|
|
memory. Look at instrument_builtin_call to learn more.
|
|
|
|
|
|
|
|
Upon completion return TRUE iff *ITER was advanced to the statement
|
|
|
|
following the one it was originally pointing to. */
|
|
|
|
|
|
|
|
static bool
|
|
|
|
maybe_instrument_call (gimple_stmt_iterator *iter)
|
|
|
|
{
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *stmt = gsi_stmt (*iter);
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
|
|
|
|
|
|
|
|
if (is_builtin && instrument_builtin_call (iter))
|
2012-12-11 11:26:56 +01:00
|
|
|
return true;
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
2012-12-11 11:26:56 +01:00
|
|
|
if (gimple_call_noreturn_p (stmt))
|
|
|
|
{
|
|
|
|
if (is_builtin)
|
|
|
|
{
|
|
|
|
tree callee = gimple_call_fndecl (stmt);
|
|
|
|
switch (DECL_FUNCTION_CODE (callee))
|
|
|
|
{
|
|
|
|
case BUILT_IN_UNREACHABLE:
|
|
|
|
case BUILT_IN_TRAP:
|
|
|
|
/* Don't instrument these. */
|
|
|
|
return false;
|
2014-09-24 19:23:56 +02:00
|
|
|
default:
|
|
|
|
break;
|
2012-12-11 11:26:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *g = gimple_build_call (decl, 0);
|
2012-12-11 11:26:56 +01:00
|
|
|
gimple_set_location (g, gimple_location (stmt));
|
|
|
|
gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
|
|
}
|
2016-02-04 12:50:40 +01:00
|
|
|
|
2016-04-08 12:46:13 +02:00
|
|
|
bool instrumented = false;
|
2016-02-04 12:50:40 +01:00
|
|
|
if (gimple_store_p (stmt))
|
|
|
|
{
|
|
|
|
tree ref_expr = gimple_call_lhs (stmt);
|
|
|
|
instrument_derefs (iter, ref_expr,
|
|
|
|
gimple_location (stmt),
|
|
|
|
/*is_store=*/true);
|
|
|
|
|
2016-04-08 12:46:13 +02:00
|
|
|
instrumented = true;
|
2016-02-04 12:50:40 +01:00
|
|
|
}
|
|
|
|
|
2016-04-08 12:46:13 +02:00
|
|
|
/* Walk through gimple_call arguments and check them id needed. */
|
|
|
|
unsigned args_num = gimple_call_num_args (stmt);
|
|
|
|
for (unsigned i = 0; i < args_num; ++i)
|
|
|
|
{
|
|
|
|
tree arg = gimple_call_arg (stmt, i);
|
|
|
|
/* If ARG is not a non-aggregate register variable, compiler in general
|
|
|
|
creates temporary for it and pass it as argument to gimple call.
|
|
|
|
But in some cases, e.g. when we pass by value a small structure that
|
|
|
|
fits to register, compiler can avoid extra overhead by pulling out
|
|
|
|
these temporaries. In this case, we should check the argument. */
|
|
|
|
if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
|
|
|
|
{
|
|
|
|
instrument_derefs (iter, arg,
|
|
|
|
gimple_location (stmt),
|
|
|
|
/*is_store=*/false);
|
|
|
|
instrumented = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (instrumented)
|
|
|
|
gsi_next (iter);
|
|
|
|
return instrumented;
|
2012-11-12 16:51:13 +01:00
|
|
|
}
|
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
/* Walk each instruction of all basic block and instrument those that
|
|
|
|
represent memory references: loads, stores, or function calls.
|
|
|
|
In a given basic block, this function avoids instrumenting memory
|
|
|
|
references that have already been instrumented. */
|
2012-11-12 16:51:13 +01:00
|
|
|
|
|
|
|
static void
|
|
|
|
transform_statements (void)
|
|
|
|
{
|
2013-02-13 21:47:39 +01:00
|
|
|
basic_block bb, last_bb = NULL;
|
2012-11-12 16:51:13 +01:00
|
|
|
gimple_stmt_iterator i;
|
Eliminate last_basic_block macro.
* basic-block.h (last_basic_block): Eliminate macro.
* asan.c (transform_statements): Eliminate use of last_basic_block
in favor of last_basic_block_for_fn, in order to make use of cfun
explicit.
* bb-reorder.c (copy_bb, reorder_basic_blocks): Likewise.
* bt-load.c (compute_defs_uses_and_gen, compute_kill, compute_out,
link_btr_uses, build_btr_def_use_webs, migrate_btr_defs): Likewise.
* cfg.c (compact_blocks): Likewise.
* cfganal.c (mark_dfs_back_edges,
control_dependences::control_dependences, post_order_compute,
pre_and_rev_post_order_compute_fn, dfs_enumerate_from, compute_idf,
single_pred_before_succ_order): Likewise.
* cfgbuild.c (make_edges): Likewise.
* cfgexpand.c (add_scope_conflicts, gimple_expand_cfg): Likewise.
* cfghooks.c (verify_flow_info): Likewise.
* cfgloop.c (verify_loop_structure): Likewise.
* cfgloopanal.c (just_once_each_iteration_p,
mark_irreducible_loops): Likewise.
* cfgloopmanip.c (fix_bb_placements, remove_path,
update_dominators_in_loop): Likewise.
* cfgrtl.c (create_basic_block_structure, rtl_create_basic_block,
break_superblocks, rtl_flow_call_edges_add): Likewise.
* config/epiphany/resolve-sw-modes.c (resolve_sw_modes): Likewise.
* config/frv/frv.c (frv_optimize_membar): Likewise.
* config/mips/mips.c (r10k_insert_cache_barriers): Likewise.
* config/spu/spu.c (spu_machine_dependent_reorg): Likewise.
* cprop.c (compute_local_properties, find_implicit_sets,
bypass_conditional_jumps, one_cprop_pass): Likewise.
* cse.c (cse_main): Likewise.
* df-core.c (rest_of_handle_df_initialize, df_worklist_dataflow,
df_analyze, df_grow_bb_info, df_compact_blocks): Likewise.
* df-problems.c (df_lr_verify_solution_start,
df_live_verify_solution_start, df_md_local_compute): Likewise.
* dominance.c (init_dom_info, calc_dfs_tree_nonrec, calc_dfs_tree,
calc_idoms): Likewise.
* domwalk.c (dom_walker::walk): Likewise.
* dse.c (dse_step0, dse_step3): Likewise.
* function.c (epilogue_done): Likewise.
* gcse.c (alloc_gcse_mem, compute_local_properties,
prune_insertions_deletions, compute_pre_data,
pre_expr_reaches_here_p, one_pre_gcse_pass,
compute_code_hoist_vbeinout, should_hoist_expr_to_dom, hoist_code,
one_code_hoisting_pass): Likewise.
* graph.c (draw_cfg_nodes_no_loops): Likewise.
* graphite-sese-to-poly.c (build_scop_bbs): Likewise.
* haifa-sched.c (unlink_bb_notes): Likewise.
* ipa-split.c (execute_split_functions): Likewise.
* ira-build.c (create_loop_tree_nodes,
remove_unnecessary_regions): Likewise.
* ira-emit.c (ira_emit): Likewise.
* ira.c (find_moveable_pseudos, ira): Likewise.
* lcm.c (compute_antinout_edge, compute_laterin,
compute_insert_delete, pre_edge_lcm, compute_available,
compute_nearerout, compute_rev_insert_delete,
pre_edge_rev_lcm): Likewise.
* loop-unroll.c (opt_info_start_duplication,
apply_opt_in_copies): Likewise.
* lower-subreg.c (decompose_multiword_subregs): Likewise.
* lra-lives.c (lra_create_live_ranges): Likewise.
* lra.c (lra): Likewise.
* mode-switching.c (optimize_mode_switching): Likewise.
* recog.c (split_all_insns): Likewise.
* regcprop.c (copyprop_hardreg_forward): Likewise.
* regrename.c (regrename_analyze): Likewise.
* reload1.c (reload): Likewise.
* resource.c (init_resource_info): Likewise.
* sched-rgn.c (haifa_find_rgns, extend_rgns, compute_trg_info,
realloc_bb_state_array, schedule_region, extend_regions): Likewise.
* sel-sched-ir.c (sel_extend_global_bb_info, extend_region_bb_info,
recompute_rev_top_order, sel_init_pipelining,
make_regions_from_the_rest): Likewise.
* store-motion.c (remove_reachable_equiv_notes,build_store_vectors)
Likewise.
* tracer.c (tail_duplicate): Likewise.
* trans-mem.c (tm_region_init, get_bb_regions_instrumented): Likewise.
* tree-cfg.c (create_bb, cleanup_dead_labels, gimple_dump_cfg,
gimple_flow_call_edges_add): Likewise.
* tree-cfgcleanup.c (split_bbs_on_noreturn_calls,
cleanup_tree_cfg_1): Likewise.
* tree-complex.c (tree_lower_complex): Likewise.
* tree-inline.c (copy_cfg_body): Likewise.
* tree-into-ssa.c (mark_phi_for_rewrite, rewrite_into_ssa,
prepare_def_site_for, update_ssa): Likewise.
* tree-ssa-dce.c (tree_dce_init, perform_tree_ssa_dce): Likewise.
* tree-ssa-dom.c (record_edge_info): Likewise.
* tree-ssa-live.c (new_tree_live_info, live_worklist): Likewise.
* tree-ssa-loop-im.c (fill_always_executed_in_1): Likewise.
* tree-ssa-loop-manip.c (copy_phi_node_args
gimple_duplicate_loop_to_header_edge): Likewise.
* tree-ssa-pre.c (compute_antic): Likewise.
* tree-ssa-propagate.c (ssa_prop_init): Likewise.
* tree-ssa-reassoc.c (init_reassoc): Likewise.
* tree-ssa-sccvn.c (init_scc_vn): Likewise.
* tree-ssa-tail-merge.c (init_worklist): Likewise.
* tree-ssa-uncprop.c (associate_equivalences_with_edges): Likewise.
* tree-stdarg.c (reachable_at_most_once): Likewise.
* tree-vrp.c (find_assert_locations): Likewise.
* var-tracking.c (vt_find_locations): Likewise.
From-SVN: r205826
2013-12-09 21:44:49 +01:00
|
|
|
int saved_last_basic_block = last_basic_block_for_fn (cfun);
|
2012-11-12 16:51:13 +01:00
|
|
|
|
Eliminate FOR_EACH_BB macro.
gcc/
* basic-block.h (FOR_EACH_BB): Eliminate macro.
* asan.c (transform_statements, execute_sanopt): Eliminate
use of FOR_EACH_BB in favor of FOR_EACH_BB_FN, to make use of cfun
explicit.
* auto-inc-dec.c (rest_of_handle_auto_inc_dec): Likewise.
* bb-reorder.c (find_rarely_executed_basic_blocks_and_crossing_edges,
set_edge_can_fallthru_flag, fix_up_fall_thru_edges,
fix_crossing_unconditional_branches, add_reg_crossing_jump_notes,
insert_section_boundary_note, rest_of_handle_reorder_blocks,
duplicate_computed_gotos): Likewise.
* cfg.c (clear_edges, compact_blocks, brief_dump_cfg): Likewise.
* cfganal.c (find_unreachable_blocks, add_noreturn_fake_exit_edges,
compute_dominance_frontiers_1, single_pred_before_succ_order): Likewise.
* cfgbuild.c (find_many_sub_basic_blocks): Likewise.
* cfgcleanup.c (try_optimize_cfg, delete_dead_jumptables): Likewise.
* cfgexpand.c (add_scope_conflicts, discover_nonconstant_array_refs):
Likewise.
* cfgloop.c (flow_loops_cfg_dump, get_loop_body, record_loop_exits,
verify_loop_structure): Likewise.
* cfgloopanal.c (mark_loop_exit_edges): Likewise.
* cfgrtl.c (compute_bb_for_insn, find_partition_fixes,
verify_hot_cold_block_grouping, purge_all_dead_edges,
fixup_abnormal_edges, record_effective_endpoints,
outof_cfg_layout_mode, fixup_reorder_chain, force_one_exit_fallthru,
break_superblocks): Likewise.
* cgraphbuild.c (build_cgraph_edges, rebuild_cgraph_edges,
cgraph_rebuild_references): Likewise.
* combine-stack-adj.c (combine_stack_adjustments): Likewise.
* combine.c (delete_noop_moves, create_log_links,
combine_instructions): Likewise.
* config/arm/arm.c (thumb1_reorg, thumb2_reorg): Likewise.
* config/bfin/bfin.c (bfin_gen_bundles, reorder_var_tracking_notes):
Likewise.
* config/c6x/c6x.c (c6x_gen_bundles, conditionalize_after_sched,
c6x_reorg): Likewise.
* config/epiphany/resolve-sw-modes.c (resolve_sw_modes): Likewise.
* config/frv/frv.c (frv_optimize_membar): Likewise.
* config/i386/i386.c (ix86_finalize_stack_realign_flags): Likewise.
* config/ia64/ia64.c (ia64_reorg): Likewise.
* config/mips/mips.c (mips_annotate_pic_calls): Likewise.
* config/picochip/picochip.c (reorder_var_tracking_notes): Likewise.
* config/rs6000/rs6000.c (rs6000_alloc_sdmode_stack_slot): Likewise.
* config/s390/s390.c (s390_regs_ever_clobbered): Likewise.
* config/sh/sh_treg_combine.cc (sh_treg_combine::execute): Likewise.
* config/spu/spu.c (spu_machine_dependent_reorg): Likewise.
* config/tilegx/tilegx.c (tilegx_gen_bundles,
reorder_var_tracking_notes): Likewise.
* config/tilepro/tilepro.c (tilepro_gen_bundles,
reorder_var_tracking_notes): Likewise.
* coverage.c (coverage_compute_cfg_checksum): Likewise.
* cprop.c (compute_hash_table_work, compute_cprop_data,
local_cprop_pass, find_implicit_sets): Likewise.
* cse.c (cse_condition_code_reg): Likewise.
* dce.c (prescan_insns_for_dce): Likewise.
* df-core.c (df_compact_blocks): Likewise.
* df-problems.c (df_word_lr_alloc): Likewise.
* df-scan.c (df_scan_start_dump, df_scan_blocks, df_insn_rescan_all,
df_update_entry_exit_and_calls): Likewise.
* dominance.c (calculate_dominance_info, verify_dominators,
debug_dominance_info): Likewise.
* dse.c (dse_step5_nospill): Likewise.
* except.c (finish_eh_generation): Likewise.
* final.c (compute_alignments): Likewise.
* function.c (thread_prologue_and_epilogue_insns,
rest_of_match_asm_constraints): Likewise.
* gcse.c (compute_hash_table_work, prune_expressions,
compute_pre_data, compute_code_hoist_vbeinout, hoist_code,
calculate_bb_reg_pressure, compute_ld_motion_mems): Likewise.
* gimple-iterator.c (gsi_commit_edge_inserts): Likewise.
* gimple-ssa-isolate-paths.c (find_implicit_erroneous_behaviour,
find_explicit_erroneous_behaviour): Likewise.
* graphite-sese-to-poly.c (rewrite_reductions_out_of_ssa,
rewrite_cross_bb_scalar_deps_out_of_ssa): Likewise.
* haifa-sched.c (haifa_sched_init): Likewise.
* hw-doloop.c (discover_loops, set_bb_indices, reorder_loops):
Likewise.
* ifcvt.c (if_convert): Likewise.
* init-regs.c (initialize_uninitialized_regs): Likewise.
* ipa-prop.c (ipcp_transform_function): Likewise.
* ipa-pure-const.c (analyze_function): Likewise.
* ipa-split.c (find_split_points, execute_split_functions): Likewise.
* ira-build.c (form_loop_tree): Likewise.
* ira-costs.c (find_costs_and_classes): Likewise.
* ira-emit.c (emit_moves, add_ranges_and_copies, ira_emit): Likewise.
* ira.c (decrease_live_ranges_number, compute_regs_asm_clobbered,
mark_elimination, update_equiv_regs, find_moveable_pseudos,
split_live_ranges_for_shrink_wrap, allocate_initial_values): Likewise.
* jump.c (mark_all_labels): Likewise.
* lcm.c (compute_laterin, compute_insert_delete, compute_available,
compute_nearerout, compute_rev_insert_delete): Likewise.
* loop-init.c (fix_loop_structure): Likewise.
* loop-invariant.c (calculate_loop_reg_pressure): Likewise.
* lower-subreg.c (decompose_multiword_subregs,
decompose_multiword_subregs): Likewise.
* lra-assigns.c (assign_by_spills): Likewise.
* lra-coalesce.c (lra_coalesce): Likewise.
* lra-constraints.c (lra_inheritance, remove_inheritance_pseudos):
Likewise.
* lra-eliminations.c (lra_init_elimination): Likewise.
* lra-spills.c (assign_spill_hard_regs, spill_pseudos,
lra_final_code_change): Likewise.
* lra.c (remove_scratches, check_rtl, has_nonexceptional_receiver,
update_inc_notes): Likewise.
* mcf.c (adjust_cfg_counts): Likewise.
* mode-switching.c (optimize_mode_switching): Likewise.
* modulo-sched.c (rest_of_handle_sms): Likewise.
* omp-low.c (optimize_omp_library_calls, expand_omp_taskreg,
expand_omp_target): Likewise.
* postreload-gcse.c (alloc_mem, compute_hash_table): Likewise.
* postreload.c (reload_cse_regs_1): Likewise.
* predict.c (strip_predict_hints, tree_bb_level_predictions,
tree_estimate_probability, expensive_function_p,
estimate_bb_frequencies, compute_function_frequency): Likewise.
* profile.c (is_inconsistent, compute_branch_probabilities,
branch_prob): Likewise.
* ree.c (find_removable_extensions): Likewise.
* reg-stack.c (compensate_edges, convert_regs, reg_to_stack): Likewise.
* regcprop.c (copyprop_hardreg_forward): Likewise.
* reginfo.c (init_subregs_of_mode): Likewise.
* regrename.c (regrename_analyze): Likewise.
* regstat.c (regstat_compute_ri, regstat_compute_calls_crossed):
Likewise.
* reload1.c (has_nonexceptional_receiver, reload,
calculate_elim_costs_all_insns): Likewise.
* resource.c (init_resource_info, free_resource_info): Likewise.
* sched-ebb.c (schedule_ebbs): Likewise.
* sched-rgn.c (is_cfg_nonregular, find_single_block_region,
haifa_find_rgns, sched_rgn_local_init): Likewise.
* sel-sched-dump.c (sel_dump_cfg_2): Likewise.
* sel-sched-ir.c (init_lv_sets, free_lv_sets,
make_regions_from_the_rest): Likewise.
* sese.c (build_sese_loop_nests, sese_build_liveouts): Likewise.
* stack-ptr-mod.c (notice_stack_pointer_modification): Likewise.
* store-motion.c (compute_store_table, build_store_vectors,
one_store_motion_pass): Likewise.
* tracer.c (tail_duplicate): Likewise.
* trans-mem.c (compute_transaction_bits): Likewise.
* tree-call-cdce.c (tree_call_cdce): Likewise.
* tree-cfg.c (replace_loop_annotate, factor_computed_gotos,
fold_cond_expr_cond, make_edges, assign_discriminators,
make_abnormal_goto_edges, cleanup_dead_labels, group_case_labels,
dump_cfg_stats, gimple_verify_flow_info, print_loop,
execute_fixup_cfg): Likewise.
* tree-cfgcleanup.c (cleanup_tree_cfg_1, merge_phi_nodes): Likewise.
* tree-complex.c (init_dont_simulate_again, tree_lower_complex):
Likewise.
* tree-dfa.c (collect_dfa_stats, dump_enumerated_decls): Likewise.
* tree-eh.c (execute_lower_resx, execute_lower_eh_dispatch,
mark_reachable_handlers): Likewise.
* tree-emutls.c (lower_emutls_function_body): Likewise.
* tree-if-conv.c (main_tree_if_conversion): Likewise.
* tree-inline.c (optimize_inline_calls): Likewise.
* tree-into-ssa.c (rewrite_into_ssa, update_ssa): Likewise.
* tree-nrv.c (tree_nrv, execute_return_slot_opt): Likewise.
* tree-object-size.c (compute_object_sizes): Likewise.
* tree-outof-ssa.c (eliminate_useless_phis, rewrite_trees,
insert_backedge_copies, tree_profiling): Likewise.
* tree-scalar-evolution.c (scev_const_prop): Likewise.
* tree-sra.c (scan_function, sra_modify_function_body,
propagate_dereference_distances, ipa_sra_modify_function_body,
convert_callers): Likewise.
* tree-ssa-ccp.c (ccp_initialize, execute_fold_all_builtins): Likewise.
* tree-ssa-coalesce.c (build_ssa_conflict_graph): Likewise.
create_outofssa_var_map, coalesce_partitions): Likewise.
* tree-ssa-copy.c (init_copy_prop): Likewise.
* tree-ssa-copyrename.c (rename_ssa_copies): Likewise.
* tree-ssa-dce.c (find_obviously_necessary_stmts,
eliminate_unnecessary_stmts): Likewise.
* tree-ssa-dom.c (free_all_edge_infos, tree_ssa_dominator_optimize):
Likewise.
* tree-ssa-forwprop.c (ssa_forward_propagate_and_combine): Likewise.
* tree-ssa-live.c (clear_unused_block_pointer, remove_unused_locals,
new_tree_live_info, calculate_live_on_exit, dump_live_info,
analyze_memory_references, fill_always_executed_in,
tree_ssa_lim_finalize): Likewise.
* tree-ssa-loop-manip.c (find_uses_to_rename, verify_loop_closed_ssa):
Likewise.
* tree-ssa-math-opts.c (execute_cse_reciprocals, execute_cse_sincos,
execute_optimize_bswap, execute_optimize_widening_mul): Likewise.
* tree-ssa-propagate.c (substitute_and_fold): Likewise.
* tree-ssa-structalias.c (compute_points_to_sets): Likewise.
* tree-ssa-tail-merge.c (find_same_succ, reset_cluster_vectors):
Likewise.
* tree-ssa-ter.c (find_replaceable_exprs): Likewise.
* tree-ssa-threadupdate.c (thread_through_all_blocks): Likewise.
* tree-ssa-uncprop.c (associate_equivalences_with_edges,
tree_ssa_uncprop): Likewise.
* tree-ssa-uninit.c (warn_uninitialized_vars,
execute_late_warn_uninitialized): Likewise.
* tree-ssa.c (verify_ssa, execute_update_addresses_taken): Likewise.
* tree-stdarg.c (check_all_va_list_escapes, execute_optimize_stdarg):
Likewise.
* tree-switch-conversion.c (do_switchconv): Likewise.
* tree-vect-generic.c (expand_vector_operations): Likewise.
* tree-vectorizer.c (adjust_simduid_builtins, note_simd_array_uses,
execute_vect_slp): Likewise.
* tree-vrp.c (check_all_array_refs, remove_range_assertions,
vrp_initialize, identify_jump_threads, instrument_memory_accesses):
Likewise.
* ubsan.c (ubsan_pass): Likewise.
* value-prof.c (verify_histograms, gimple_value_profile_transformations,
gimple_find_values_to_profile): Likewise.
* var-tracking.c (vt_find_locations, dump_dataflow_sets, vt_emit_notes,
vt_initialize, delete_debug_insns, vt_finalize): Likewise.
gcc/testsuite/
* g++.dg/plugin/selfassign.c (execute_warn_self_assign): Eliminate
use of FOR_EACH_BB in favor of FOR_EACH_BB_FN, to make use of cfun
explicit.
* gcc.dg/plugin/selfassign.c (execute_warn_self_assign): Likewise.
From-SVN: r205828
2013-12-09 22:06:06 +01:00
|
|
|
FOR_EACH_BB_FN (bb, cfun)
|
2012-11-12 16:51:13 +01:00
|
|
|
{
|
2013-02-13 21:47:39 +01:00
|
|
|
basic_block prev_bb = bb;
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
|
2012-11-12 16:51:13 +01:00
|
|
|
if (bb->index >= saved_last_basic_block) continue;
|
2013-02-13 21:47:39 +01:00
|
|
|
|
|
|
|
/* Flush the mem ref hash table, if current bb doesn't have
|
|
|
|
exactly one predecessor, or if that predecessor (skipping
|
|
|
|
over asan created basic blocks) isn't the last processed
|
|
|
|
basic block. Thus we effectively flush on extended basic
|
|
|
|
block boundaries. */
|
|
|
|
while (single_pred_p (prev_bb))
|
|
|
|
{
|
|
|
|
prev_bb = single_pred (prev_bb);
|
|
|
|
if (prev_bb->index < saved_last_basic_block)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (prev_bb != last_bb)
|
|
|
|
empty_mem_ref_hash_table ();
|
|
|
|
last_bb = bb;
|
|
|
|
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
for (i = gsi_start_bb (bb); !gsi_end_p (i);)
|
2012-11-12 17:18:59 +01:00
|
|
|
{
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *s = gsi_stmt (i);
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
if (has_stmt_been_instrumented_p (s))
|
|
|
|
gsi_next (&i);
|
|
|
|
else if (gimple_assign_single_p (s)
|
2014-09-18 16:08:28 +02:00
|
|
|
&& !gimple_clobber_p (s)
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
&& maybe_instrument_assignment (&i))
|
|
|
|
/* Nothing to do as maybe_instrument_assignment advanced
|
|
|
|
the iterator I. */;
|
|
|
|
else if (is_gimple_call (s) && maybe_instrument_call (&i))
|
|
|
|
/* Nothing to do as maybe_instrument_call
|
|
|
|
advanced the iterator I. */;
|
|
|
|
else
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
{
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
/* No instrumentation happened.
|
|
|
|
|
2013-02-13 21:47:39 +01:00
|
|
|
If the current instruction is a function call that
|
|
|
|
might free something, let's forget about the memory
|
|
|
|
references that got instrumented. Otherwise we might
|
2016-11-07 11:23:38 +01:00
|
|
|
miss some instrumentation opportunities. Do the same
|
|
|
|
for a ASAN_MARK poisoning internal function. */
|
|
|
|
if (is_gimple_call (s)
|
2016-12-13 10:14:47 +01:00
|
|
|
&& (!nonfreeing_call_p (s)
|
|
|
|
|| asan_mark_p (s, ASAN_MARK_POISON)))
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
empty_mem_ref_hash_table ();
|
|
|
|
|
|
|
|
gsi_next (&i);
|
Instrument built-in memory access function calls
This patch instruments many memory access patterns through builtins.
Basically, for a call like:
__builtin_memset (from, 0, n_bytes);
the patch would only instrument the accesses at the beginning and at
the end of the memory region [from, from + n_bytes]. This is the
strategy used by the llvm implementation of asan.
This instrumentation is done for all the memory access builtin
functions that expose a well specified memory region -- one that
explicitly states the number of bytes accessed in the region.
A special treatment is used for __builtin_strlen. The patch
instruments the access to the first byte of its argument, as well as
the access to the byte (of the argument) at the offset returned by
strlen.
For the __sync_* and __atomic* calls the patch instruments the access
to the bytes pointed to by the argument.
While doing this, I have added a new parameter to build_check_stmt to
decide whether to insert the instrumentation code before or after the
statement iterator. This allows us to do away with the
gsi_{next,prev} dance we were doing in the callers of this function.
Tested by running cc1 -fasan on variations of simple programs like:
int
foo ()
{
char foo[10] = {0};
foo[0] = 't';
foo[1] = 'e';
foo[2] = 's';
foo[3] = 't';
int l = __builtin_strlen (foo);
int n = sizeof (foo);
__builtin_memset (&foo[4], 0, n - 4);
__sync_fetch_and_add (&foo[11], 1);
return l;
}
and by starring at the gimple output which for this function is:
;; Function foo (foo, funcdef_no=0, decl_uid=1714, cgraph_uid=0)
foo ()
{
int n;
int l;
char foo[10];
int D.1725;
char * D.1724;
int D.1723;
long unsigned int D.1722;
int D.1721;
long unsigned int D.1720;
long unsigned int _1;
int _4;
long unsigned int _5;
int _6;
char * _7;
int _8;
char * _9;
unsigned long _10;
unsigned long _11;
unsigned long _12;
signed char * _13;
signed char _14;
_Bool _15;
unsigned long _16;
signed char _17;
_Bool _18;
_Bool _19;
char * _20;
unsigned long _21;
unsigned long _22;
unsigned long _23;
signed char * _24;
signed char _25;
_Bool _26;
unsigned long _27;
signed char _28;
_Bool _29;
_Bool _30;
char * _31;
unsigned long _32;
unsigned long _33;
unsigned long _34;
signed char * _35;
signed char _36;
_Bool _37;
unsigned long _38;
signed char _39;
_Bool _40;
_Bool _41;
char * _42;
unsigned long _43;
unsigned long _44;
unsigned long _45;
signed char * _46;
signed char _47;
_Bool _48;
unsigned long _49;
signed char _50;
_Bool _51;
_Bool _52;
char * _53;
unsigned long _54;
unsigned long _55;
unsigned long _56;
signed char * _57;
signed char _58;
_Bool _59;
unsigned long _60;
signed char _61;
_Bool _62;
_Bool _63;
char[10] * _64;
unsigned long _65;
unsigned long _66;
unsigned long _67;
signed char * _68;
signed char _69;
_Bool _70;
unsigned long _71;
signed char _72;
_Bool _73;
_Bool _74;
unsigned long _75;
unsigned long _76;
unsigned long _77;
signed char * _78;
signed char _79;
_Bool _80;
unsigned long _81;
signed char _82;
_Bool _83;
_Bool _84;
long unsigned int _85;
long unsigned int _86;
char * _87;
char * _88;
unsigned long _89;
unsigned long _90;
unsigned long _91;
signed char * _92;
signed char _93;
_Bool _94;
unsigned long _95;
signed char _96;
_Bool _97;
_Bool _98;
char * _99;
unsigned long _100;
unsigned long _101;
unsigned long _102;
signed char * _103;
signed char _104;
_Bool _105;
unsigned long _106;
signed char _107;
_Bool _108;
_Bool _109;
<bb 2>:
foo = {};
_9 = &foo[0];
_10 = (unsigned long) _9;
_11 = _10 >> 3;
_12 = _11 + 17592186044416;
_13 = (signed char *) _12;
_14 = *_13;
_15 = _14 != 0;
_16 = _10 & 7;
_17 = (signed char) _16;
_18 = _17 >= _14;
_19 = _15 & _18;
if (_19 != 0)
goto <bb 5>;
else
goto <bb 4>;
<bb 5>:
__asan_report_store1 (_10);
<bb 4>:
foo[0] = 116;
_20 = &foo[1];
_21 = (unsigned long) _20;
_22 = _21 >> 3;
_23 = _22 + 17592186044416;
_24 = (signed char *) _23;
_25 = *_24;
_26 = _25 != 0;
_27 = _21 & 7;
_28 = (signed char) _27;
_29 = _28 >= _25;
_30 = _26 & _29;
if (_30 != 0)
goto <bb 7>;
else
goto <bb 6>;
<bb 7>:
__asan_report_store1 (_21);
<bb 6>:
foo[1] = 101;
_31 = &foo[2];
_32 = (unsigned long) _31;
_33 = _32 >> 3;
_34 = _33 + 17592186044416;
_35 = (signed char *) _34;
_36 = *_35;
_37 = _36 != 0;
_38 = _32 & 7;
_39 = (signed char) _38;
_40 = _39 >= _36;
_41 = _37 & _40;
if (_41 != 0)
goto <bb 9>;
else
goto <bb 8>;
<bb 9>:
__asan_report_store1 (_32);
<bb 8>:
foo[2] = 115;
_42 = &foo[3];
_43 = (unsigned long) _42;
_44 = _43 >> 3;
_45 = _44 + 17592186044416;
_46 = (signed char *) _45;
_47 = *_46;
_48 = _47 != 0;
_49 = _43 & 7;
_50 = (signed char) _49;
_51 = _50 >= _47;
_52 = _48 & _51;
if (_52 != 0)
goto <bb 11>;
else
goto <bb 10>;
<bb 11>:
__asan_report_store1 (_43);
<bb 10>:
foo[3] = 116;
_53 = (char *) &foo;
_54 = (unsigned long) _53;
_55 = _54 >> 3;
_56 = _55 + 17592186044416;
_57 = (signed char *) _56;
_58 = *_57;
_59 = _58 != 0;
_60 = _54 & 7;
_61 = (signed char) _60;
_62 = _61 >= _58;
_63 = _59 & _62;
if (_63 != 0)
goto <bb 13>;
else
goto <bb 12>;
<bb 13>:
__asan_report_load1 (_54);
<bb 12>:
_1 = __builtin_strlen (&foo);
_64 = _53 + _1;
_65 = (unsigned long) _64;
_66 = _65 >> 3;
_67 = _66 + 17592186044416;
_68 = (signed char *) _67;
_69 = *_68;
_70 = _69 != 0;
_71 = _65 & 7;
_72 = (signed char) _71;
_73 = _72 >= _69;
_74 = _70 & _73;
if (_74 != 0)
goto <bb 15>;
else
goto <bb 14>;
<bb 15>:
__asan_report_load1 (_65);
<bb 14>:
l_2 = (int) _1;
n_3 = 10;
_4 = n_3 + -4;
_5 = (long unsigned int) _4;
_6 = l_2 + 1;
_7 = &foo[_6];
if (_5 != 0)
goto <bb 17>;
else
goto <bb 16>;
<bb 17>:
_75 = (unsigned long) _7;
_76 = _75 >> 3;
_77 = _76 + 17592186044416;
_78 = (signed char *) _77;
_79 = *_78;
_80 = _79 != 0;
_81 = _75 & 7;
_82 = (signed char) _81;
_83 = _82 >= _79;
_84 = _80 & _83;
_85 = _5;
_86 = _85 - 1;
_87 = _7;
_88 = _87 + _86;
_89 = (unsigned long) _88;
_90 = _89 >> 3;
_91 = _90 + 17592186044416;
_92 = (signed char *) _91;
_93 = *_92;
_94 = _93 != 0;
_95 = _89 & 7;
_96 = (signed char) _95;
_97 = _96 >= _93;
_98 = _94 & _97;
if (_98 != 0)
goto <bb 21>;
else
goto <bb 20>;
<bb 21>:
__asan_report_store1 (_89);
<bb 20>:
if (_84 != 0)
goto <bb 19>;
else
goto <bb 18>;
<bb 19>:
__asan_report_store1 (_75);
<bb 18>:
<bb 16>:
__builtin_memset (_7, 0, _5);
_99 = &foo[11];
_100 = (unsigned long) _99;
_101 = _100 >> 3;
_102 = _101 + 17592186044416;
_103 = (signed char *) _102;
_104 = *_103;
_105 = _104 != 0;
_106 = _100 & 7;
_107 = (signed char) _106;
_108 = _107 >= _104;
_109 = _105 & _108;
if (_109 != 0)
goto <bb 23>;
else
goto <bb 22>;
<bb 23>:
__asan_report_store1 (_100);
<bb 22>:
__sync_fetch_and_add_1 (&foo[11], 1);
_8 = l_2;
foo ={v} {CLOBBER};
<L1>:
return _8;
}
;; Function _GLOBAL__sub_I_00099_0_foo (_GLOBAL__sub_I_00099_0_foo, funcdef_no=1, decl_uid=1752, cgraph_uid=4)
_GLOBAL__sub_I_00099_0_foo ()
{
<bb 2>:
__asan_init ();
return;
}
gcc/
* gimple.h (is_gimple_builtin_call): Declare ...
* gimple.c (is_gimple_builtin_call): ... New public function.
* asan.c (insert_if_then_before_iter, instrument_mem_region_access,
instrument_strlen_call, maybe_instrument_builtin_call,
instrument_call): New static functions.
(create_cond_insert_point): Renamed
create_cond_insert_point_before_iter into this. Add a new
parameter to decide whether to insert the condition before or
after the statement iterator.
(build_check_stmt): Adjust for the new create_cond_insert_point.
Add a new parameter to decide whether to add the instrumentation
code before or after the statement iterator.
(instrument_assignment): Factorize from ...
(transform_statements): ... here. Use maybe_instrument_call to
instrument builtin function calls as well.
(instrument_derefs): Adjust for the new parameter of
build_check_stmt. Fix detection of bit-field access.
From-SVN: r193440
2012-11-12 16:53:25 +01:00
|
|
|
}
|
2012-11-12 17:18:59 +01:00
|
|
|
}
|
2012-11-12 16:51:13 +01:00
|
|
|
}
|
[asan] Avoid instrumenting duplicated memory access in the same basic block
Like what Address Sanitizer does in LLVM, this patch avoids instrumented
duplicated memory accesses in the same basic blocks.
The approach taken is very conservative, to keep the pass simple, for
a start.
A memory access is considered to be a pair made of an expression tree
representing the beginning of the memory region that is accessed and
a the size of the access, in byte. For now that size is either 1, 2,
4, 8 or 16 bytes.
The patch builds a hash table of the memory accesses that have been
instrumented in the current basic block. Then it walks the gimple
statements of the current basic block. For each statement, it tests
if the memory regions it references have already been instrumented.
If not, the statement is instrumented and each memory references that
are actually instrumented are added to the hash table. When a memory
region is accessed (usually through builtin functions like memset),
then what gets added to the hash table is actually two memory
accesses: one for the beginning of the region, and the other for the
its end.
When the patch crosses a function call that is not a built-in function
that we ought to instrument, the hash table is cleared, because that
function call can possibly e.g free some memory that was instrumented.
Likewise, when a new basic block is visited, the hash table is
cleared. I guess we could be smarter than just unconditionally
clearing the hash table in this later case, but this is what asan@llvm
does, and for now, I thought starting in a conservative manner might
have some value.
The hash table is destroyed at the end of the pass.
Bootstrapped and tested against trunk on x86-64-unknown-linux-gnu.
gcc/
* Makefile.in (asan.o): Add new dependency on hash-table.h
* asan.c (struct asan_mem_ref, struct mem_ref_hasher): New types.
(asan_mem_ref_init, asan_mem_ref_get_end, get_mem_ref_hash_table)
(has_stmt_been_instrumented_p, empty_mem_ref_hash_table)
(free_mem_ref_resources, has_mem_ref_been_instrumented)
(has_stmt_been_instrumented_p, update_mem_ref_hash_table)
(get_mem_ref_of_assignment): New functions.
(get_mem_refs_of_builtin_call): Extract from
instrument_builtin_call and tweak a little bit to make it fit with
the new signature.
(instrument_builtin_call): Use the new
get_mem_refs_of_builtin_call. Use gimple_call_builtin_p instead
of is_gimple_builtin_call.
(instrument_derefs, instrument_mem_region_access): Insert the
instrumented memory reference into the hash table.
(maybe_instrument_assignment): Renamed instrument_assignment into
this, and change it to advance the iterator when instrumentation
actually happened and return true in that case. This makes it
homogeneous with maybe_instrument_assignment, and thus give a
chance to callers to be more 'regular'.
(transform_statements): Clear the memory reference hash table
whenever we enter a new BB, when we cross a function call, or when
we are done transforming statements. Use
maybe_instrument_assignment instead of instrumentation. No more
need to special case maybe_instrument_assignment and advance the
iterator after calling it; it's now handled just like
maybe_instrument_call. Update comment.
gcc/testsuite/
* c-c++-common/asan/no-redundant-instrumentation-1.c: New test.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-2.c: Likewise.
* testsuite/c-c++-common/asan/no-redundant-instrumentation-3.c: Likewise.
* testsuite/c-c++-common/asan/inc.c: Likewise.
From-SVN: r196008
2013-02-13 11:32:26 +01:00
|
|
|
free_mem_ref_resources ();
|
2012-11-12 16:51:13 +01:00
|
|
|
}
|
|
|
|
|
2013-11-22 21:04:45 +01:00
|
|
|
/* Build
|
|
|
|
__asan_before_dynamic_init (module_name)
|
|
|
|
or
|
|
|
|
__asan_after_dynamic_init ()
|
|
|
|
call. */
|
|
|
|
|
|
|
|
tree
|
|
|
|
asan_dynamic_init_call (bool after_p)
|
|
|
|
{
|
2016-07-21 18:17:58 +02:00
|
|
|
if (shadow_ptr_types[0] == NULL_TREE)
|
|
|
|
asan_init_shadow_ptr_types ();
|
|
|
|
|
2013-11-22 21:04:45 +01:00
|
|
|
tree fn = builtin_decl_implicit (after_p
|
|
|
|
? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
|
|
|
|
: BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
|
|
|
|
tree module_name_cst = NULL_TREE;
|
|
|
|
if (!after_p)
|
|
|
|
{
|
|
|
|
pretty_printer module_name_pp;
|
|
|
|
pp_string (&module_name_pp, main_input_filename);
|
|
|
|
|
|
|
|
module_name_cst = asan_pp_string (&module_name_pp);
|
|
|
|
module_name_cst = fold_convert (const_ptr_type_node,
|
|
|
|
module_name_cst);
|
|
|
|
}
|
|
|
|
|
|
|
|
return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
|
|
|
|
}
|
|
|
|
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
/* Build
|
|
|
|
struct __asan_global
|
|
|
|
{
|
|
|
|
const void *__beg;
|
|
|
|
uptr __size;
|
|
|
|
uptr __size_with_redzone;
|
|
|
|
const void *__name;
|
2013-11-04 22:33:31 +01:00
|
|
|
const void *__module_name;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
uptr __has_dynamic_init;
|
2014-09-23 19:59:53 +02:00
|
|
|
__asan_global_source_location *__location;
|
2016-11-08 23:15:35 +01:00
|
|
|
char *__odr_indicator;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
} type. */
|
|
|
|
|
|
|
|
static tree
|
|
|
|
asan_global_struct (void)
|
|
|
|
{
|
2016-12-02 08:39:27 +01:00
|
|
|
static const char *field_names[]
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
= { "__beg", "__size", "__size_with_redzone",
|
2016-12-02 08:39:27 +01:00
|
|
|
"__name", "__module_name", "__has_dynamic_init", "__location",
|
|
|
|
"__odr_indicator" };
|
|
|
|
tree fields[ARRAY_SIZE (field_names)], ret;
|
|
|
|
unsigned i;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|
|
|
|
ret = make_node (RECORD_TYPE);
|
2016-12-02 08:39:27 +01:00
|
|
|
for (i = 0; i < ARRAY_SIZE (field_names); i++)
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
{
|
|
|
|
fields[i]
|
|
|
|
= build_decl (UNKNOWN_LOCATION, FIELD_DECL,
|
|
|
|
get_identifier (field_names[i]),
|
|
|
|
(i == 0 || i == 3) ? const_ptr_type_node
|
2013-08-30 18:12:58 +02:00
|
|
|
: pointer_sized_int_node);
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
DECL_CONTEXT (fields[i]) = ret;
|
|
|
|
if (i)
|
|
|
|
DECL_CHAIN (fields[i - 1]) = fields[i];
|
|
|
|
}
|
2014-11-18 15:55:44 +01:00
|
|
|
tree type_decl = build_decl (input_location, TYPE_DECL,
|
|
|
|
get_identifier ("__asan_global"), ret);
|
|
|
|
DECL_IGNORED_P (type_decl) = 1;
|
|
|
|
DECL_ARTIFICIAL (type_decl) = 1;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
TYPE_FIELDS (ret) = fields[0];
|
2014-11-18 15:55:44 +01:00
|
|
|
TYPE_NAME (ret) = type_decl;
|
|
|
|
TYPE_STUB_DECL (ret) = type_decl;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
layout_type (ret);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2016-12-02 08:39:27 +01:00
|
|
|
/* Create and return odr indicator symbol for DECL.
|
|
|
|
TYPE is __asan_global struct type as returned by asan_global_struct. */
|
|
|
|
|
|
|
|
static tree
|
|
|
|
create_odr_indicator (tree decl, tree type)
|
|
|
|
{
|
|
|
|
char *name;
|
|
|
|
tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
|
|
|
|
tree decl_name
|
|
|
|
= (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
|
|
|
|
: DECL_NAME (decl));
|
|
|
|
/* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
|
|
|
|
if (decl_name == NULL_TREE)
|
|
|
|
return build_int_cst (uptr, 0);
|
|
|
|
size_t len = strlen (IDENTIFIER_POINTER (decl_name)) + sizeof ("__odr_asan_");
|
|
|
|
name = XALLOCAVEC (char, len);
|
|
|
|
snprintf (name, len, "__odr_asan_%s", IDENTIFIER_POINTER (decl_name));
|
|
|
|
#ifndef NO_DOT_IN_LABEL
|
|
|
|
name[sizeof ("__odr_asan") - 1] = '.';
|
|
|
|
#elif !defined(NO_DOLLAR_IN_LABEL)
|
|
|
|
name[sizeof ("__odr_asan") - 1] = '$';
|
|
|
|
#endif
|
|
|
|
tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
|
|
|
|
char_type_node);
|
|
|
|
TREE_ADDRESSABLE (var) = 1;
|
|
|
|
TREE_READONLY (var) = 0;
|
|
|
|
TREE_THIS_VOLATILE (var) = 1;
|
|
|
|
DECL_GIMPLE_REG_P (var) = 0;
|
|
|
|
DECL_ARTIFICIAL (var) = 1;
|
|
|
|
DECL_IGNORED_P (var) = 1;
|
|
|
|
TREE_STATIC (var) = 1;
|
|
|
|
TREE_PUBLIC (var) = 1;
|
|
|
|
DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
|
|
|
|
DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
|
|
|
|
|
|
|
|
TREE_USED (var) = 1;
|
|
|
|
tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
|
|
|
|
build_int_cst (unsigned_type_node, 0));
|
|
|
|
TREE_CONSTANT (ctor) = 1;
|
|
|
|
TREE_STATIC (ctor) = 1;
|
|
|
|
DECL_INITIAL (var) = ctor;
|
|
|
|
DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
|
|
|
|
NULL, DECL_ATTRIBUTES (var));
|
|
|
|
make_decl_rtl (var);
|
|
|
|
varpool_node::finalize_decl (var);
|
|
|
|
return fold_convert (uptr, build_fold_addr_expr (var));
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Return true if DECL, a global var, might be overridden and needs
|
|
|
|
an additional odr indicator symbol. */
|
|
|
|
|
|
|
|
static bool
|
|
|
|
asan_needs_odr_indicator_p (tree decl)
|
|
|
|
{
|
2017-01-13 11:08:50 +01:00
|
|
|
/* Don't emit ODR indicators for kernel because:
|
|
|
|
a) Kernel is written in C thus doesn't need ODR indicators.
|
|
|
|
b) Some kernel code may have assumptions about symbols containing specific
|
|
|
|
patterns in their names. Since ODR indicators contain original names
|
|
|
|
of symbols they are emitted for, these assumptions would be broken for
|
|
|
|
ODR indicator symbols. */
|
|
|
|
return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
|
|
|
|
&& !DECL_ARTIFICIAL (decl)
|
|
|
|
&& !DECL_WEAK (decl)
|
|
|
|
&& TREE_PUBLIC (decl));
|
2016-12-02 08:39:27 +01:00
|
|
|
}
|
|
|
|
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
/* Append description of a single global DECL into vector V.
|
|
|
|
TYPE is __asan_global struct type as returned by asan_global_struct. */
|
|
|
|
|
|
|
|
static void
|
This patch rewrites the old VEC macro-based interface into a new one based on the template class 'vec'.
This patch rewrites the old VEC macro-based interface into a new one
based on the template class 'vec'. The user-visible changes are
described in http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec.
I have tested the patch pretty extensively:
- Regular bootstraps on x86_64, ppc, ia64, sparc and hppa.
- Bootstraps with --enable-checking=release
- Bootstraps with --enable-checking=gc,gcac
- Basic builds on all targets (using contrib/config-list.mk).
We no longer access the vectors via VEC_* macros. The pattern is
"VEC_operation (T, A, V, args)" becomes "V.operation (args)".
The only thing I could not do is create proper ctors and dtors for the
vec class. Since these vectors are stored in unions, we
have to keep them as PODs (C++03 does not allow non-PODs in unions).
This means that creation and destruction must be explicit. There is a
new method vec<type, allocation, layout>::create() and another vec<type,
allocation, layout>::destroy() to allocate the internal vector.
For vectors that must be pointers, there is a family of free functions
that implement the operations that need to tolerate NULL vectors.
These functions all start with the prefix 'vec_safe_'. See the wiki
page for details.
The gengtype change removes the special handling for VEC() that used
to exist in gengtype. Additionally, it allows gengtype to recognize
templates of more than one argument and introduces the concept of an
undefined type (useful for template arguments that may or may not be
types).
When a TYPE_UNDEFINED is reached, gengtype will ignore it if it
happens inside a type marked with GTY((user)). Otherwise, it will
emit an error.
Finally, gengtype rejects root types marked GTY((user)) that are not
first class pointers.
2012-11-16 Diego Novillo <dnovillo@google.com>
VEC API overhaul (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
* vec.c (register_overhead): Convert it into
member function of vec_prefix.
(release_overhead): Likewise.
(calculate_allocation): Likewise.
(vec_heap_free): Remove.
(vec_gc_o_reserve_1): Remove.
(vec_heap_o_reserve_1): Remove.
(vec_stack_o_reserve_1): Remove.
(vec_stack_o_reserve_exact): Remove.
(register_stack_vec): New.
(stack_vec_register_index): New.
(unregister_stack_vec): New.
(vec_assert_fail): Remove.
* vec.h: Conditionally include ggc.h. Document conditional
hackery.
Update top-level documentation.
(ALONE_VEC_CHECK_INFO): Remove.
(VEC_CHECK_INFO): Remove.
(ALONE_VEC_CHECK_DECL): Remove.
(VEC_CHECK_DECL): Remove.
(ALONE_VEC_CHECK_PASS): Remove.
(VEC_CHECK_PASS): Remove.
(VEC_ASSERT): Remove.
(vec_prefix): Add friends va_gc, va_gc_atomic, va_heap and
va_stack.
Mark fields alloc_ and num_ as protected.
(struct vec_t): Remove. Remove all function members.
(struct vl_embed): Declare.
(struct vl_ptr): Declare.
(free): Remove.
(reserve_exact): Remove.
(reserve): Remove.
(safe_splice): Remove.
(safe_push): Remove.
(safe_grow): Remove.
(safe_grow_cleared): Remove.
(safe_insert): Remove.
(DEF_VEC_I): Remove.
(DEF_VEC_ALLOC_I): Remove.
(DEF_VEC_P): Remove.
(DEF_VEC_ALLOC_P): Remove.
(DEF_VEC_O): Remove.
(DEF_VEC_ALLOC_O): Remove.
(DEF_VEC_ALLOC_P_STACK): Remove.
(DEF_VEC_ALLOC_O_STACK): Remove.
(DEF_VEC_ALLOC_I_STACK): Remove.
(DEF_VEC_A): Remove.
(DEF_VEC_ALLOC_A): Remove.
(vec_stack_p_reserve_exact_1): Remove.
(vec_stack_o_reserve): Remove.
(vec_stack_o_reserve_exact): Remove.
(VEC_length): Remove.
(VEC_empty): Remove.
(VEC_address): Remove.
(vec_address): Remove.
(VEC_last): Remove.
(VEC_index): Remove.
(VEC_iterate): Remove.
(VEC_embedded_size): Remove.
(VEC_embedded_init): Remove.
(VEC_free): Remove.
(VEC_copy): Remove.
(VEC_space): Remove.
(VEC_reserve): Remove.
(VEC_reserve_exact): Remove.
(VEC_splice): Remove.
(VEC_safe_splice): Remove.
(VEC_quick_push): Remove.
(VEC_safe_push): Remove.
(VEC_pop): Remove.
(VEC_truncate): Remove.
(VEC_safe_grow): Remove.
(VEC_replace): Remove.
(VEC_quick_insert): Remove.
(VEC_safe_insert): Remove.
(VEC_ordered_remove): Remove.
(VEC_unordered_remove): Remove.
(VEC_block_remove): Remove.
(VEC_lower_bound): Remove.
(VEC_alloc): Remove.
(VEC_qsort): Remove.
(va_heap): Declare.
(va_heap::default_layout): New typedef to vl_ptr.
(va_heap::reserve): New.
(va_heap::release): New.
(va_gc): Declare.
(va_gc::default_layout): New typedef to vl_embed.
(va_gc::reserve): New.
(va_gc::release): New.
(va_gc_atomic): Declare. Inherit from va_gc.
(va_stack): Declare.
(va_stack::default_layout): New typedef to vl_ptr.
(va_stack::alloc): New.
(va_stack::reserve): New.
(va_stack::release): New.
(register_stack_vec): Declare.
(stack_vec_register_index): Declare.
(unregister_stack_vec): Declare.
(vec<T, A = va_heap, L = typename A::default_layout>): Declare
empty vec template.
(vec<T, A, vl_embed>): Partial specialization for embedded
layout.
(vec<T, A, vl_embed>::allocated): New.
(vec<T, A, vl_embed>::length): New.
(vec<T, A, vl_embed>::is_empty): New.
(vec<T, A, vl_embed>::address): New.
(vec<T, A, vl_embed>::operator[]): New.
(vec<T, A, vl_embed>::last New.
(vec<T, A, vl_embed>::space): New.
(vec<T, A, vl_embed>::iterate): New.
(vec<T, A, vl_embed>::iterate): New.
(vec<T, A, vl_embed>::copy): New.
(vec<T, A, vl_embed>::splice): New.
(vec<T, A, vl_embed>::quick_push New.
(vec<T, A, vl_embed>::pop New.
(vec<T, A, vl_embed>::truncate): New.
(vec<T, A, vl_embed>::quick_insert): New.
(vec<T, A, vl_embed>::ordered_remove): New.
(vec<T, A, vl_embed>::unordered_remove): New.
(vec<T, A, vl_embed>::block_remove): New.
(vec<T, A, vl_embed>::qsort): New.
(vec<T, A, vl_embed>::lower_bound): New.
(vec<T, A, vl_embed>::embedded_size): New.
(vec<T, A, vl_embed>::embedded_init): New.
(vec<T, A, vl_embed>::quick_grow): New.
(vec<T, A, vl_embed>::quick_grow_cleared): New.
(vec_safe_space): New.
(vec_safe_length): New.
(vec_safe_address): New.
(vec_safe_is_empty): New.
(vec_safe_reserve): New.
(vec_safe_reserve_exact): New.
(vec_alloc): New.
(vec_free): New.
(vec_safe_grow): New.
(vec_safe_grow_cleared): New.
(vec_safe_iterate): New.
(vec_safe_push): New.
(vec_safe_insert): New.
(vec_safe_truncate): New.
(vec_safe_copy): New.
(vec_safe_splice): New.
(vec<T, A, vl_ptr>): New partial specialization for the space
efficient layout.
(vec<T, A, vl_ptr>::exists): New.
(vec<T, A, vl_ptr>::is_empty): New.
(vec<T, A, vl_ptr>::length): New.
(vec<T, A, vl_ptr>::address): New.
(vec<T, A, vl_ptr>::operator[]): New.
(vec<T, A, vl_ptr>::operator!=): New.
(vec<T, A, vl_ptr>::operator==): New.
(vec<T, A, vl_ptr>::last): New.
(vec<T, A, vl_ptr>::space): New.
(vec<T, A, vl_ptr>::iterate): New.
(vec<T, A, vl_ptr>::copy): New.
(vec<T, A, vl_ptr>::reserve): New.
(vec<T, A, vl_ptr>::reserve_exact): New.
(vec<T, A, vl_ptr>::splice): New.
(vec<T, A, vl_ptr>::safe_splice): New.
(vec<T, A, vl_ptr>::quick_push): New.
(vec<T, A, vl_ptr>::safe_push): New.
(vec<T, A, vl_ptr>::pop): New.
(vec<T, A, vl_ptr>::truncate): New.
(vec<T, A, vl_ptr>::safe_grow): New.
(vec<T, A, vl_ptr>::safe_grow_cleared): New.
(vec<T, A, vl_ptr>::quick_grow): New.
(vec<T, A, vl_ptr>::quick_grow_cleared): New.
(vec<T, A, vl_ptr>::quick_insert): New.
(vec<T, A, vl_ptr>::safe_insert): New.
(vec<T, A, vl_ptr>::ordered_remove): New.
(vec<T, A, vl_ptr>::unordered_remove): New.
(vec<T, A, vl_ptr>::block_remove): New.
(vec<T, A, vl_ptr>::qsort): New.
(vec<T, A, vl_ptr>::lower_bound): New.
(vec_stack_alloc): Define.
(FOR_EACH_VEC_SAFE_ELT): Define.
* vecir.h: Remove. Update all users.
* vecprim.h: Remove. Update all users.
Move uchar to coretypes.h.
* Makefile.in (VEC_H): Add $(GGC_H).
Remove vecir.h and vecprim.h dependencies everywhere.
2012-11-16 Diego Novillo <dnovillo@google.com>
* gengtype-lex.l (VEC): Remove.
Add characters in the set [\!\>\.-].
* gengtype-parse.c (token_names): Remove "VEC".
(require_template_declaration): Remove handling of VEC_TOKEN.
(type): Likewise.
Call create_user_defined_type when parsing GTY((user)).
* gengtype-state.c (type_lineloc): handle TYPE_UNDEFINED.
(write_state_undefined_type): New.
(write_state_type): Call write_state_undefined_type for
TYPE_UNDEFINED.
(read_state_type): Call read_state_undefined_type for
TYPE_UNDEFINED.
* gengtype.c (dbgprint_count_type_at): Handle TYPE_UNDEFINED.
(create_user_defined_type): Make extern.
(type_for_name): Factor out of resolve_typedef.
(create_undefined_type): New
(resolve_typedef): Call it when we cannot find a previous
typedef and the type is not a template.
(find_structure): Accept TYPE_UNDEFINED.
(set_gc_used_type): Add argument ALLOWED_UNDEFINED_TYPES,
default to false.
Emit an error for TYPE_UNDEFINED unless LEVEL is GC_UNUSED or
ALLOWED_UNDEFINED_TYPES is set.
Set ALLOWED_UNDEFINED_TYPES to true for TYPE_USER_STRUCT.
(filter_type_name): Accept templates with more than one
argument.
(output_mangled_typename): Handle TYPE_UNDEFINED
(walk_type): Likewise.
(write_types_process_field): Likewise.
(write_func_for_structure): If CHAIN_NEXT is set, ORIG_S
should not be a user-defined type.
(write_types_local_user_process_field): Handle TYPE_ARRAY,
TYPE_NONE and TYPE_UNDEFINED.
(write_types_local_process_field): Likewise.
(contains_scalar_p): Return 0 for TYPE_USER_STRUCT.
(write_root): Reject user-defined types that are not pointers.
Handle TYPE_NONE, TYPE_UNDEFINED, TYPE_UNION, TYPE_LANG_STRUCT
and TYPE_PARAM_STRUCT.
(output_typename): Handle TYPE_NONE, TYPE_UNDEFINED, and
TYPE_ARRAY.
(dump_typekind): Handle TYPE_UNDEFINED.
* gengtype.h (enum typekind): Add TYPE_UNDEFINED.
(create_user_defined_type): Declare.
(enum gty_token): Remove VEC_TOKEN.
2012-11-16 Diego Novillo <dnovillo@google.com>
Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
* coretypes.h (uchar): Define.
* alias.c: Use new vec API in vec.h.
* asan.c: Likewise.
* attribs.c: Likewise.
* basic-block.h: Likewise.
* bb-reorder.c: Likewise.
* builtins.c: Likewise.
* calls.c: Likewise.
* cfg.c: Likewise.
* cfganal.c: Likewise.
* cfgcleanup.c: Likewise.
* cfgexpand.c: Likewise.
* cfghooks.c: Likewise.
* cfghooks.h: Likewise.
* cfgloop.c: Likewise.
* cfgloop.h: Likewise.
* cfgloopanal.c: Likewise.
* cfgloopmanip.c: Likewise.
* cfgrtl.c: Likewise.
* cgraph.c: Likewise.
* cgraph.h: Likewise.
* cgraphclones.c: Likewise.
* cgraphunit.c: Likewise.
* combine.c: Likewise.
* compare-elim.c: Likewise.
* coverage.c: Likewise.
* cprop.c: Likewise.
* data-streamer.h: Likewise.
* dbxout.c: Likewise.
* dce.c: Likewise.
* df-core.c: Likewise.
* df-problems.c: Likewise.
* df-scan.c: Likewise.
* dominance.c: Likewise.
* domwalk.c: Likewise.
* domwalk.h: Likewise.
* dse.c: Likewise.
* dwarf2cfi.c: Likewise.
* dwarf2out.c: Likewise.
* dwarf2out.h: Likewise.
* emit-rtl.c: Likewise.
* except.c: Likewise.
* except.h: Likewise.
* expr.c: Likewise.
* expr.h: Likewise.
* final.c: Likewise.
* fold-const.c: Likewise.
* function.c: Likewise.
* function.h: Likewise.
* fwprop.c: Likewise.
* gcc.c: Likewise.
* gcse.c: Likewise.
* genattr.c: Likewise.
* genattrtab.c: Likewise.
* genautomata.c: Likewise.
* genextract.c: Likewise.
* genopinit.c: Likewise
* ggc-common.c: Likewise.
* ggc.h: Likewise.
* gimple-low.c: Likewise.
* gimple-ssa-strength-reduction.c: Likewise.
* gimple-streamer-in.c: Likewise.
* gimple.c: Likewise.
* gimple.h: Likewise.
* gimplify.c: Likewise.
* graph.c: Likewise.
* graphds.c: Likewise.
* graphds.h: Likewise.
* graphite-blocking.c: Likewise.
* graphite-clast-to-gimple.c: Likewise.
* graphite-dependences.c: Likewise.
* graphite-interchange.c: Likewise.
* graphite-optimize-isl.c: Likewise.
* graphite-poly.c: Likewise.
* graphite-poly.h: Likewise.
* graphite-scop-detection.c: Likewise.
* graphite-scop-detection.h: Likewise.
* graphite-sese-to-poly.c: Likewise.
* graphite.c: Likewise.
* godump.c: Likewise.
* haifa-sched.c: Likewise.
* hw-doloop.c: Likewise.
* hw-doloop.h: Likewise.
* ifcvt.c: Likewise.
* insn-addr.h: Likewise.
* ipa-cp.c: Likewise.
* ipa-inline-analysis.c: Likewise.
* ipa-inline-transform.c: Likewise.
* ipa-inline.c: Likewise.
* ipa-inline.h: Likewise.
* ipa-prop.c: Likewise.
* ipa-prop.h: Likewise.
* ipa-pure-const.c: Likewise.
* ipa-ref-inline.h: Likewise.
* ipa-ref.c: Likewise.
* ipa-ref.h: Likewise.
* ipa-reference.c: Likewise.
* ipa-split.c: Likewise.
* ipa-utils.c: Likewise.
* ipa-utils.h: Likewise.
* ipa.c: Likewise.
* ira-build.c: Likewise.
* ira-color.c: Likewise.
* ira-emit.c: Likewise.
* ira-int.h: Likewise.
* ira.c: Likewise.
* loop-invariant.c: Likewise.
* loop-unroll.c: Likewise.
* lower-subreg.c: Likewise.
* lra-lives.c: Likewise.
* lra.c: Likewise.
* lto-cgraph.c: Likewise.
* lto-section-out.c: Likewise.
* lto-streamer-in.c: Likewise.
* lto-streamer-out.c: Likewise.
* lto-streamer.h: Likewise.
* lto-symtab.c: Likewise.
* mcf.c: Likewise.
* modulo-sched.c: Likewise.
* omp-low.c: Likewise.
* opts-common.c: Likewise.
* opts-global.c: Likewise.
* opts.c: Likewise.
* opts.h: Likewise.
* passes.c: Likewise.
* predict.c: Likewise.
* print-tree.c: Likewise.
* profile.c: Likewise.
* profile.h: Likewise.
* read-rtl.c: Likewise.
* ree.c: Likewise.
* reg-stack.c: Likewise.
* regrename.c: Likewise.
* regrename.h: Likewise.
* reload.c: Likewise.
* reload.h: Likewise.
* reload1.c: Likewise.
* rtl.h: Likewise.
* sched-deps.c: Likewise.
* sched-int.h: Likewise.
* sdbout.c: Likewise.
* sel-sched-dump.c: Likewise.
* sel-sched-ir.c: Likewise.
* sel-sched-ir.h: Likewise.
* sel-sched.c: Likewise.
* sese.c: Likewise.
* sese.h: Likewise.
* statistics.h: Likewise.
* stmt.c: Likewise.
* stor-layout.c: Likewise.
* store-motion.c: Likewise.
* tlink.c: Likewise.
* toplev.c: Likewise.
* trans-mem.c: Likewise.
* tree-browser.c: Likewise.
* tree-call-cdce.c: Likewise.
* tree-cfg.c: Likewise.
* tree-cfgcleanup.c: Likewise.
* tree-chrec.c: Likewise.
* tree-chrec.h: Likewise.
* tree-complex.c: Likewise.
* tree-data-ref.c: Likewise.
* tree-data-ref.h: Likewise.
* tree-dfa.c: Likewise.
* tree-diagnostic.c: Likewise.
* tree-dump.c: Likewise.
* tree-eh.c: Likewise.
* tree-emutls.c: Likewise.
* tree-flow.h: Likewise.
* tree-if-conv.c: Likewise.
* tree-inline.c: Likewise.
* tree-inline.h: Likewise.
* tree-into-ssa.c: Likewise.
* tree-iterator.c: Likewise.
* tree-loop-distribution.c: Likewise.
* tree-mudflap.c: Likewise.
* tree-optimize.c: Likewise.
* tree-outof-ssa.c: Likewise.
* tree-parloops.c: Likewise.
* tree-phinodes.c: Likewise.
* tree-predcom.c: Likewise.
* tree-pretty-print.c: Likewise.
* tree-scalar-evolution.c: Likewise.
* tree-sra.c: Likewise.
* tree-ssa-address.c: Likewise.
* tree-ssa-alias.c: Likewise.
* tree-ssa-ccp.c: Likewise.
* tree-ssa-coalesce.c: Likewise.
* tree-ssa-dce.c: Likewise.
* tree-ssa-dom.c: Likewise.
* tree-ssa-forwprop.c: Likewise.
* tree-ssa-live.c: Likewise.
* tree-ssa-live.h: Likewise.
* tree-ssa-loop-im.c: Likewise.
* tree-ssa-loop-ivcanon.c: Likewise.
* tree-ssa-loop-ivopts.c: Likewise.
* tree-ssa-loop-manip.c: Likewise.
* tree-ssa-loop-niter.c: Likewise.
* tree-ssa-loop-prefetch.c: Likewise.
* tree-ssa-math-opts.c: Likewise.
* tree-ssa-operands.c: Likewise.
* tree-ssa-phiopt.c: Likewise.
* tree-ssa-phiprop.c: Likewise.
* tree-ssa-pre.c: Likewise.
* tree-ssa-propagate.c: Likewise.
* tree-ssa-reassoc.c: Likewise.
* tree-ssa-sccvn.c: Likewise.
* tree-ssa-sccvn.h: Likewise.
* tree-ssa-strlen.c: Likewise.
* tree-ssa-structalias.c: Likewise.
* tree-ssa-tail-merge.c: Likewise.
* tree-ssa-threadedge.c: Likewise.
* tree-ssa-threadupdate.c: Likewise.
* tree-ssa-uncprop.c: Likewise.
* tree-ssa-uninit.c: Likewise.
* tree-ssa.c: Likewise.
* tree-ssanames.c: Likewise.
* tree-stdarg.c: Likewise.
* tree-streamer-in.c: Likewise.
* tree-streamer-out.c: Likewise.
* tree-streamer.c: Likewise.
* tree-streamer.h: Likewise.
* tree-switch-conversion.c: Likewise.
* tree-vect-data-refs.c: Likewise.
* tree-vect-generic.c: Likewise.
* tree-vect-loop-manip.c: Likewise.
* tree-vect-loop.c: Likewise.
* tree-vect-patterns.c: Likewise.
* tree-vect-slp.c: Likewise.
* tree-vect-stmts.c: Likewise.
* tree-vectorizer.c: Likewise.
* tree-vectorizer.h: Likewise.
* tree-vrp.c: Likewise.
* tree.c: Likewise.
* tree.h: Likewise.
* value-prof.c: Likewise.
* value-prof.h: Likewise.
* var-tracking.c: Likewise.
* varasm.c: Likewise.
* varpool.c: Likewise.
* vmsdbgout.c: Likewise.
* config/bfin/bfin.c: Likewise.
* config/c6x/c6x.c: Likewise.
* config/darwin.c: Likewise.
* config/i386/i386.c: Likewise.
* config/ia64/ia64.c: Likewise.
* config/mep/mep.c: Likewise.
* config/mips/mips.c: Likewise.
* config/pa/pa.c: Likewise.
* config/rs6000/rs6000-c.c: Likewise.
* config/rs6000/rs6000.c: Likewise.
* config/rx/rx.c: Likewise.
* config/spu/spu-c.c: Likewise.
* config/vms/vms.c: Likewise.
* config/vxworks.c: Likewise.
* config/epiphany/resolve-sw-modes.c: Likewise.
From-SVN: r193595
2012-11-18 03:54:30 +01:00
|
|
|
asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
{
|
|
|
|
tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
|
|
|
|
unsigned HOST_WIDE_INT size;
|
2013-11-04 22:33:31 +01:00
|
|
|
tree str_cst, module_name_cst, refdecl = decl;
|
This patch rewrites the old VEC macro-based interface into a new one based on the template class 'vec'.
This patch rewrites the old VEC macro-based interface into a new one
based on the template class 'vec'. The user-visible changes are
described in http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec.
I have tested the patch pretty extensively:
- Regular bootstraps on x86_64, ppc, ia64, sparc and hppa.
- Bootstraps with --enable-checking=release
- Bootstraps with --enable-checking=gc,gcac
- Basic builds on all targets (using contrib/config-list.mk).
We no longer access the vectors via VEC_* macros. The pattern is
"VEC_operation (T, A, V, args)" becomes "V.operation (args)".
The only thing I could not do is create proper ctors and dtors for the
vec class. Since these vectors are stored in unions, we
have to keep them as PODs (C++03 does not allow non-PODs in unions).
This means that creation and destruction must be explicit. There is a
new method vec<type, allocation, layout>::create() and another vec<type,
allocation, layout>::destroy() to allocate the internal vector.
For vectors that must be pointers, there is a family of free functions
that implement the operations that need to tolerate NULL vectors.
These functions all start with the prefix 'vec_safe_'. See the wiki
page for details.
The gengtype change removes the special handling for VEC() that used
to exist in gengtype. Additionally, it allows gengtype to recognize
templates of more than one argument and introduces the concept of an
undefined type (useful for template arguments that may or may not be
types).
When a TYPE_UNDEFINED is reached, gengtype will ignore it if it
happens inside a type marked with GTY((user)). Otherwise, it will
emit an error.
Finally, gengtype rejects root types marked GTY((user)) that are not
first class pointers.
2012-11-16 Diego Novillo <dnovillo@google.com>
VEC API overhaul (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
* vec.c (register_overhead): Convert it into
member function of vec_prefix.
(release_overhead): Likewise.
(calculate_allocation): Likewise.
(vec_heap_free): Remove.
(vec_gc_o_reserve_1): Remove.
(vec_heap_o_reserve_1): Remove.
(vec_stack_o_reserve_1): Remove.
(vec_stack_o_reserve_exact): Remove.
(register_stack_vec): New.
(stack_vec_register_index): New.
(unregister_stack_vec): New.
(vec_assert_fail): Remove.
* vec.h: Conditionally include ggc.h. Document conditional
hackery.
Update top-level documentation.
(ALONE_VEC_CHECK_INFO): Remove.
(VEC_CHECK_INFO): Remove.
(ALONE_VEC_CHECK_DECL): Remove.
(VEC_CHECK_DECL): Remove.
(ALONE_VEC_CHECK_PASS): Remove.
(VEC_CHECK_PASS): Remove.
(VEC_ASSERT): Remove.
(vec_prefix): Add friends va_gc, va_gc_atomic, va_heap and
va_stack.
Mark fields alloc_ and num_ as protected.
(struct vec_t): Remove. Remove all function members.
(struct vl_embed): Declare.
(struct vl_ptr): Declare.
(free): Remove.
(reserve_exact): Remove.
(reserve): Remove.
(safe_splice): Remove.
(safe_push): Remove.
(safe_grow): Remove.
(safe_grow_cleared): Remove.
(safe_insert): Remove.
(DEF_VEC_I): Remove.
(DEF_VEC_ALLOC_I): Remove.
(DEF_VEC_P): Remove.
(DEF_VEC_ALLOC_P): Remove.
(DEF_VEC_O): Remove.
(DEF_VEC_ALLOC_O): Remove.
(DEF_VEC_ALLOC_P_STACK): Remove.
(DEF_VEC_ALLOC_O_STACK): Remove.
(DEF_VEC_ALLOC_I_STACK): Remove.
(DEF_VEC_A): Remove.
(DEF_VEC_ALLOC_A): Remove.
(vec_stack_p_reserve_exact_1): Remove.
(vec_stack_o_reserve): Remove.
(vec_stack_o_reserve_exact): Remove.
(VEC_length): Remove.
(VEC_empty): Remove.
(VEC_address): Remove.
(vec_address): Remove.
(VEC_last): Remove.
(VEC_index): Remove.
(VEC_iterate): Remove.
(VEC_embedded_size): Remove.
(VEC_embedded_init): Remove.
(VEC_free): Remove.
(VEC_copy): Remove.
(VEC_space): Remove.
(VEC_reserve): Remove.
(VEC_reserve_exact): Remove.
(VEC_splice): Remove.
(VEC_safe_splice): Remove.
(VEC_quick_push): Remove.
(VEC_safe_push): Remove.
(VEC_pop): Remove.
(VEC_truncate): Remove.
(VEC_safe_grow): Remove.
(VEC_replace): Remove.
(VEC_quick_insert): Remove.
(VEC_safe_insert): Remove.
(VEC_ordered_remove): Remove.
(VEC_unordered_remove): Remove.
(VEC_block_remove): Remove.
(VEC_lower_bound): Remove.
(VEC_alloc): Remove.
(VEC_qsort): Remove.
(va_heap): Declare.
(va_heap::default_layout): New typedef to vl_ptr.
(va_heap::reserve): New.
(va_heap::release): New.
(va_gc): Declare.
(va_gc::default_layout): New typedef to vl_embed.
(va_gc::reserve): New.
(va_gc::release): New.
(va_gc_atomic): Declare. Inherit from va_gc.
(va_stack): Declare.
(va_stack::default_layout): New typedef to vl_ptr.
(va_stack::alloc): New.
(va_stack::reserve): New.
(va_stack::release): New.
(register_stack_vec): Declare.
(stack_vec_register_index): Declare.
(unregister_stack_vec): Declare.
(vec<T, A = va_heap, L = typename A::default_layout>): Declare
empty vec template.
(vec<T, A, vl_embed>): Partial specialization for embedded
layout.
(vec<T, A, vl_embed>::allocated): New.
(vec<T, A, vl_embed>::length): New.
(vec<T, A, vl_embed>::is_empty): New.
(vec<T, A, vl_embed>::address): New.
(vec<T, A, vl_embed>::operator[]): New.
(vec<T, A, vl_embed>::last New.
(vec<T, A, vl_embed>::space): New.
(vec<T, A, vl_embed>::iterate): New.
(vec<T, A, vl_embed>::iterate): New.
(vec<T, A, vl_embed>::copy): New.
(vec<T, A, vl_embed>::splice): New.
(vec<T, A, vl_embed>::quick_push New.
(vec<T, A, vl_embed>::pop New.
(vec<T, A, vl_embed>::truncate): New.
(vec<T, A, vl_embed>::quick_insert): New.
(vec<T, A, vl_embed>::ordered_remove): New.
(vec<T, A, vl_embed>::unordered_remove): New.
(vec<T, A, vl_embed>::block_remove): New.
(vec<T, A, vl_embed>::qsort): New.
(vec<T, A, vl_embed>::lower_bound): New.
(vec<T, A, vl_embed>::embedded_size): New.
(vec<T, A, vl_embed>::embedded_init): New.
(vec<T, A, vl_embed>::quick_grow): New.
(vec<T, A, vl_embed>::quick_grow_cleared): New.
(vec_safe_space): New.
(vec_safe_length): New.
(vec_safe_address): New.
(vec_safe_is_empty): New.
(vec_safe_reserve): New.
(vec_safe_reserve_exact): New.
(vec_alloc): New.
(vec_free): New.
(vec_safe_grow): New.
(vec_safe_grow_cleared): New.
(vec_safe_iterate): New.
(vec_safe_push): New.
(vec_safe_insert): New.
(vec_safe_truncate): New.
(vec_safe_copy): New.
(vec_safe_splice): New.
(vec<T, A, vl_ptr>): New partial specialization for the space
efficient layout.
(vec<T, A, vl_ptr>::exists): New.
(vec<T, A, vl_ptr>::is_empty): New.
(vec<T, A, vl_ptr>::length): New.
(vec<T, A, vl_ptr>::address): New.
(vec<T, A, vl_ptr>::operator[]): New.
(vec<T, A, vl_ptr>::operator!=): New.
(vec<T, A, vl_ptr>::operator==): New.
(vec<T, A, vl_ptr>::last): New.
(vec<T, A, vl_ptr>::space): New.
(vec<T, A, vl_ptr>::iterate): New.
(vec<T, A, vl_ptr>::copy): New.
(vec<T, A, vl_ptr>::reserve): New.
(vec<T, A, vl_ptr>::reserve_exact): New.
(vec<T, A, vl_ptr>::splice): New.
(vec<T, A, vl_ptr>::safe_splice): New.
(vec<T, A, vl_ptr>::quick_push): New.
(vec<T, A, vl_ptr>::safe_push): New.
(vec<T, A, vl_ptr>::pop): New.
(vec<T, A, vl_ptr>::truncate): New.
(vec<T, A, vl_ptr>::safe_grow): New.
(vec<T, A, vl_ptr>::safe_grow_cleared): New.
(vec<T, A, vl_ptr>::quick_grow): New.
(vec<T, A, vl_ptr>::quick_grow_cleared): New.
(vec<T, A, vl_ptr>::quick_insert): New.
(vec<T, A, vl_ptr>::safe_insert): New.
(vec<T, A, vl_ptr>::ordered_remove): New.
(vec<T, A, vl_ptr>::unordered_remove): New.
(vec<T, A, vl_ptr>::block_remove): New.
(vec<T, A, vl_ptr>::qsort): New.
(vec<T, A, vl_ptr>::lower_bound): New.
(vec_stack_alloc): Define.
(FOR_EACH_VEC_SAFE_ELT): Define.
* vecir.h: Remove. Update all users.
* vecprim.h: Remove. Update all users.
Move uchar to coretypes.h.
* Makefile.in (VEC_H): Add $(GGC_H).
Remove vecir.h and vecprim.h dependencies everywhere.
2012-11-16 Diego Novillo <dnovillo@google.com>
* gengtype-lex.l (VEC): Remove.
Add characters in the set [\!\>\.-].
* gengtype-parse.c (token_names): Remove "VEC".
(require_template_declaration): Remove handling of VEC_TOKEN.
(type): Likewise.
Call create_user_defined_type when parsing GTY((user)).
* gengtype-state.c (type_lineloc): handle TYPE_UNDEFINED.
(write_state_undefined_type): New.
(write_state_type): Call write_state_undefined_type for
TYPE_UNDEFINED.
(read_state_type): Call read_state_undefined_type for
TYPE_UNDEFINED.
* gengtype.c (dbgprint_count_type_at): Handle TYPE_UNDEFINED.
(create_user_defined_type): Make extern.
(type_for_name): Factor out of resolve_typedef.
(create_undefined_type): New
(resolve_typedef): Call it when we cannot find a previous
typedef and the type is not a template.
(find_structure): Accept TYPE_UNDEFINED.
(set_gc_used_type): Add argument ALLOWED_UNDEFINED_TYPES,
default to false.
Emit an error for TYPE_UNDEFINED unless LEVEL is GC_UNUSED or
ALLOWED_UNDEFINED_TYPES is set.
Set ALLOWED_UNDEFINED_TYPES to true for TYPE_USER_STRUCT.
(filter_type_name): Accept templates with more than one
argument.
(output_mangled_typename): Handle TYPE_UNDEFINED
(walk_type): Likewise.
(write_types_process_field): Likewise.
(write_func_for_structure): If CHAIN_NEXT is set, ORIG_S
should not be a user-defined type.
(write_types_local_user_process_field): Handle TYPE_ARRAY,
TYPE_NONE and TYPE_UNDEFINED.
(write_types_local_process_field): Likewise.
(contains_scalar_p): Return 0 for TYPE_USER_STRUCT.
(write_root): Reject user-defined types that are not pointers.
Handle TYPE_NONE, TYPE_UNDEFINED, TYPE_UNION, TYPE_LANG_STRUCT
and TYPE_PARAM_STRUCT.
(output_typename): Handle TYPE_NONE, TYPE_UNDEFINED, and
TYPE_ARRAY.
(dump_typekind): Handle TYPE_UNDEFINED.
* gengtype.h (enum typekind): Add TYPE_UNDEFINED.
(create_user_defined_type): Declare.
(enum gty_token): Remove VEC_TOKEN.
2012-11-16 Diego Novillo <dnovillo@google.com>
Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
* coretypes.h (uchar): Define.
* alias.c: Use new vec API in vec.h.
* asan.c: Likewise.
* attribs.c: Likewise.
* basic-block.h: Likewise.
* bb-reorder.c: Likewise.
* builtins.c: Likewise.
* calls.c: Likewise.
* cfg.c: Likewise.
* cfganal.c: Likewise.
* cfgcleanup.c: Likewise.
* cfgexpand.c: Likewise.
* cfghooks.c: Likewise.
* cfghooks.h: Likewise.
* cfgloop.c: Likewise.
* cfgloop.h: Likewise.
* cfgloopanal.c: Likewise.
* cfgloopmanip.c: Likewise.
* cfgrtl.c: Likewise.
* cgraph.c: Likewise.
* cgraph.h: Likewise.
* cgraphclones.c: Likewise.
* cgraphunit.c: Likewise.
* combine.c: Likewise.
* compare-elim.c: Likewise.
* coverage.c: Likewise.
* cprop.c: Likewise.
* data-streamer.h: Likewise.
* dbxout.c: Likewise.
* dce.c: Likewise.
* df-core.c: Likewise.
* df-problems.c: Likewise.
* df-scan.c: Likewise.
* dominance.c: Likewise.
* domwalk.c: Likewise.
* domwalk.h: Likewise.
* dse.c: Likewise.
* dwarf2cfi.c: Likewise.
* dwarf2out.c: Likewise.
* dwarf2out.h: Likewise.
* emit-rtl.c: Likewise.
* except.c: Likewise.
* except.h: Likewise.
* expr.c: Likewise.
* expr.h: Likewise.
* final.c: Likewise.
* fold-const.c: Likewise.
* function.c: Likewise.
* function.h: Likewise.
* fwprop.c: Likewise.
* gcc.c: Likewise.
* gcse.c: Likewise.
* genattr.c: Likewise.
* genattrtab.c: Likewise.
* genautomata.c: Likewise.
* genextract.c: Likewise.
* genopinit.c: Likewise
* ggc-common.c: Likewise.
* ggc.h: Likewise.
* gimple-low.c: Likewise.
* gimple-ssa-strength-reduction.c: Likewise.
* gimple-streamer-in.c: Likewise.
* gimple.c: Likewise.
* gimple.h: Likewise.
* gimplify.c: Likewise.
* graph.c: Likewise.
* graphds.c: Likewise.
* graphds.h: Likewise.
* graphite-blocking.c: Likewise.
* graphite-clast-to-gimple.c: Likewise.
* graphite-dependences.c: Likewise.
* graphite-interchange.c: Likewise.
* graphite-optimize-isl.c: Likewise.
* graphite-poly.c: Likewise.
* graphite-poly.h: Likewise.
* graphite-scop-detection.c: Likewise.
* graphite-scop-detection.h: Likewise.
* graphite-sese-to-poly.c: Likewise.
* graphite.c: Likewise.
* godump.c: Likewise.
* haifa-sched.c: Likewise.
* hw-doloop.c: Likewise.
* hw-doloop.h: Likewise.
* ifcvt.c: Likewise.
* insn-addr.h: Likewise.
* ipa-cp.c: Likewise.
* ipa-inline-analysis.c: Likewise.
* ipa-inline-transform.c: Likewise.
* ipa-inline.c: Likewise.
* ipa-inline.h: Likewise.
* ipa-prop.c: Likewise.
* ipa-prop.h: Likewise.
* ipa-pure-const.c: Likewise.
* ipa-ref-inline.h: Likewise.
* ipa-ref.c: Likewise.
* ipa-ref.h: Likewise.
* ipa-reference.c: Likewise.
* ipa-split.c: Likewise.
* ipa-utils.c: Likewise.
* ipa-utils.h: Likewise.
* ipa.c: Likewise.
* ira-build.c: Likewise.
* ira-color.c: Likewise.
* ira-emit.c: Likewise.
* ira-int.h: Likewise.
* ira.c: Likewise.
* loop-invariant.c: Likewise.
* loop-unroll.c: Likewise.
* lower-subreg.c: Likewise.
* lra-lives.c: Likewise.
* lra.c: Likewise.
* lto-cgraph.c: Likewise.
* lto-section-out.c: Likewise.
* lto-streamer-in.c: Likewise.
* lto-streamer-out.c: Likewise.
* lto-streamer.h: Likewise.
* lto-symtab.c: Likewise.
* mcf.c: Likewise.
* modulo-sched.c: Likewise.
* omp-low.c: Likewise.
* opts-common.c: Likewise.
* opts-global.c: Likewise.
* opts.c: Likewise.
* opts.h: Likewise.
* passes.c: Likewise.
* predict.c: Likewise.
* print-tree.c: Likewise.
* profile.c: Likewise.
* profile.h: Likewise.
* read-rtl.c: Likewise.
* ree.c: Likewise.
* reg-stack.c: Likewise.
* regrename.c: Likewise.
* regrename.h: Likewise.
* reload.c: Likewise.
* reload.h: Likewise.
* reload1.c: Likewise.
* rtl.h: Likewise.
* sched-deps.c: Likewise.
* sched-int.h: Likewise.
* sdbout.c: Likewise.
* sel-sched-dump.c: Likewise.
* sel-sched-ir.c: Likewise.
* sel-sched-ir.h: Likewise.
* sel-sched.c: Likewise.
* sese.c: Likewise.
* sese.h: Likewise.
* statistics.h: Likewise.
* stmt.c: Likewise.
* stor-layout.c: Likewise.
* store-motion.c: Likewise.
* tlink.c: Likewise.
* toplev.c: Likewise.
* trans-mem.c: Likewise.
* tree-browser.c: Likewise.
* tree-call-cdce.c: Likewise.
* tree-cfg.c: Likewise.
* tree-cfgcleanup.c: Likewise.
* tree-chrec.c: Likewise.
* tree-chrec.h: Likewise.
* tree-complex.c: Likewise.
* tree-data-ref.c: Likewise.
* tree-data-ref.h: Likewise.
* tree-dfa.c: Likewise.
* tree-diagnostic.c: Likewise.
* tree-dump.c: Likewise.
* tree-eh.c: Likewise.
* tree-emutls.c: Likewise.
* tree-flow.h: Likewise.
* tree-if-conv.c: Likewise.
* tree-inline.c: Likewise.
* tree-inline.h: Likewise.
* tree-into-ssa.c: Likewise.
* tree-iterator.c: Likewise.
* tree-loop-distribution.c: Likewise.
* tree-mudflap.c: Likewise.
* tree-optimize.c: Likewise.
* tree-outof-ssa.c: Likewise.
* tree-parloops.c: Likewise.
* tree-phinodes.c: Likewise.
* tree-predcom.c: Likewise.
* tree-pretty-print.c: Likewise.
* tree-scalar-evolution.c: Likewise.
* tree-sra.c: Likewise.
* tree-ssa-address.c: Likewise.
* tree-ssa-alias.c: Likewise.
* tree-ssa-ccp.c: Likewise.
* tree-ssa-coalesce.c: Likewise.
* tree-ssa-dce.c: Likewise.
* tree-ssa-dom.c: Likewise.
* tree-ssa-forwprop.c: Likewise.
* tree-ssa-live.c: Likewise.
* tree-ssa-live.h: Likewise.
* tree-ssa-loop-im.c: Likewise.
* tree-ssa-loop-ivcanon.c: Likewise.
* tree-ssa-loop-ivopts.c: Likewise.
* tree-ssa-loop-manip.c: Likewise.
* tree-ssa-loop-niter.c: Likewise.
* tree-ssa-loop-prefetch.c: Likewise.
* tree-ssa-math-opts.c: Likewise.
* tree-ssa-operands.c: Likewise.
* tree-ssa-phiopt.c: Likewise.
* tree-ssa-phiprop.c: Likewise.
* tree-ssa-pre.c: Likewise.
* tree-ssa-propagate.c: Likewise.
* tree-ssa-reassoc.c: Likewise.
* tree-ssa-sccvn.c: Likewise.
* tree-ssa-sccvn.h: Likewise.
* tree-ssa-strlen.c: Likewise.
* tree-ssa-structalias.c: Likewise.
* tree-ssa-tail-merge.c: Likewise.
* tree-ssa-threadedge.c: Likewise.
* tree-ssa-threadupdate.c: Likewise.
* tree-ssa-uncprop.c: Likewise.
* tree-ssa-uninit.c: Likewise.
* tree-ssa.c: Likewise.
* tree-ssanames.c: Likewise.
* tree-stdarg.c: Likewise.
* tree-streamer-in.c: Likewise.
* tree-streamer-out.c: Likewise.
* tree-streamer.c: Likewise.
* tree-streamer.h: Likewise.
* tree-switch-conversion.c: Likewise.
* tree-vect-data-refs.c: Likewise.
* tree-vect-generic.c: Likewise.
* tree-vect-loop-manip.c: Likewise.
* tree-vect-loop.c: Likewise.
* tree-vect-patterns.c: Likewise.
* tree-vect-slp.c: Likewise.
* tree-vect-stmts.c: Likewise.
* tree-vectorizer.c: Likewise.
* tree-vectorizer.h: Likewise.
* tree-vrp.c: Likewise.
* tree.c: Likewise.
* tree.h: Likewise.
* value-prof.c: Likewise.
* value-prof.h: Likewise.
* var-tracking.c: Likewise.
* varasm.c: Likewise.
* varpool.c: Likewise.
* vmsdbgout.c: Likewise.
* config/bfin/bfin.c: Likewise.
* config/c6x/c6x.c: Likewise.
* config/darwin.c: Likewise.
* config/i386/i386.c: Likewise.
* config/ia64/ia64.c: Likewise.
* config/mep/mep.c: Likewise.
* config/mips/mips.c: Likewise.
* config/pa/pa.c: Likewise.
* config/rs6000/rs6000-c.c: Likewise.
* config/rs6000/rs6000.c: Likewise.
* config/rx/rx.c: Likewise.
* config/spu/spu-c.c: Likewise.
* config/vms/vms.c: Likewise.
* config/vxworks.c: Likewise.
* config/epiphany/resolve-sw-modes.c: Likewise.
From-SVN: r193595
2012-11-18 03:54:30 +01:00
|
|
|
vec<constructor_elt, va_gc> *vinner = NULL;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|
2013-11-04 22:33:31 +01:00
|
|
|
pretty_printer asan_pp, module_name_pp;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|
|
|
|
if (DECL_NAME (decl))
|
2013-08-05 06:00:57 +02:00
|
|
|
pp_tree_identifier (&asan_pp, DECL_NAME (decl));
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
else
|
|
|
|
pp_string (&asan_pp, "<unknown>");
|
2013-08-05 07:16:29 +02:00
|
|
|
str_cst = asan_pp_string (&asan_pp);
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|
2017-01-23 10:12:29 +01:00
|
|
|
pp_string (&module_name_pp, main_input_filename);
|
2013-11-04 22:33:31 +01:00
|
|
|
module_name_cst = asan_pp_string (&module_name_pp);
|
|
|
|
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
if (asan_needs_local_alias (decl))
|
|
|
|
{
|
|
|
|
char buf[20];
|
This patch rewrites the old VEC macro-based interface into a new one based on the template class 'vec'.
This patch rewrites the old VEC macro-based interface into a new one
based on the template class 'vec'. The user-visible changes are
described in http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec.
I have tested the patch pretty extensively:
- Regular bootstraps on x86_64, ppc, ia64, sparc and hppa.
- Bootstraps with --enable-checking=release
- Bootstraps with --enable-checking=gc,gcac
- Basic builds on all targets (using contrib/config-list.mk).
We no longer access the vectors via VEC_* macros. The pattern is
"VEC_operation (T, A, V, args)" becomes "V.operation (args)".
The only thing I could not do is create proper ctors and dtors for the
vec class. Since these vectors are stored in unions, we
have to keep them as PODs (C++03 does not allow non-PODs in unions).
This means that creation and destruction must be explicit. There is a
new method vec<type, allocation, layout>::create() and another vec<type,
allocation, layout>::destroy() to allocate the internal vector.
For vectors that must be pointers, there is a family of free functions
that implement the operations that need to tolerate NULL vectors.
These functions all start with the prefix 'vec_safe_'. See the wiki
page for details.
The gengtype change removes the special handling for VEC() that used
to exist in gengtype. Additionally, it allows gengtype to recognize
templates of more than one argument and introduces the concept of an
undefined type (useful for template arguments that may or may not be
types).
When a TYPE_UNDEFINED is reached, gengtype will ignore it if it
happens inside a type marked with GTY((user)). Otherwise, it will
emit an error.
Finally, gengtype rejects root types marked GTY((user)) that are not
first class pointers.
2012-11-16 Diego Novillo <dnovillo@google.com>
VEC API overhaul (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
* vec.c (register_overhead): Convert it into
member function of vec_prefix.
(release_overhead): Likewise.
(calculate_allocation): Likewise.
(vec_heap_free): Remove.
(vec_gc_o_reserve_1): Remove.
(vec_heap_o_reserve_1): Remove.
(vec_stack_o_reserve_1): Remove.
(vec_stack_o_reserve_exact): Remove.
(register_stack_vec): New.
(stack_vec_register_index): New.
(unregister_stack_vec): New.
(vec_assert_fail): Remove.
* vec.h: Conditionally include ggc.h. Document conditional
hackery.
Update top-level documentation.
(ALONE_VEC_CHECK_INFO): Remove.
(VEC_CHECK_INFO): Remove.
(ALONE_VEC_CHECK_DECL): Remove.
(VEC_CHECK_DECL): Remove.
(ALONE_VEC_CHECK_PASS): Remove.
(VEC_CHECK_PASS): Remove.
(VEC_ASSERT): Remove.
(vec_prefix): Add friends va_gc, va_gc_atomic, va_heap and
va_stack.
Mark fields alloc_ and num_ as protected.
(struct vec_t): Remove. Remove all function members.
(struct vl_embed): Declare.
(struct vl_ptr): Declare.
(free): Remove.
(reserve_exact): Remove.
(reserve): Remove.
(safe_splice): Remove.
(safe_push): Remove.
(safe_grow): Remove.
(safe_grow_cleared): Remove.
(safe_insert): Remove.
(DEF_VEC_I): Remove.
(DEF_VEC_ALLOC_I): Remove.
(DEF_VEC_P): Remove.
(DEF_VEC_ALLOC_P): Remove.
(DEF_VEC_O): Remove.
(DEF_VEC_ALLOC_O): Remove.
(DEF_VEC_ALLOC_P_STACK): Remove.
(DEF_VEC_ALLOC_O_STACK): Remove.
(DEF_VEC_ALLOC_I_STACK): Remove.
(DEF_VEC_A): Remove.
(DEF_VEC_ALLOC_A): Remove.
(vec_stack_p_reserve_exact_1): Remove.
(vec_stack_o_reserve): Remove.
(vec_stack_o_reserve_exact): Remove.
(VEC_length): Remove.
(VEC_empty): Remove.
(VEC_address): Remove.
(vec_address): Remove.
(VEC_last): Remove.
(VEC_index): Remove.
(VEC_iterate): Remove.
(VEC_embedded_size): Remove.
(VEC_embedded_init): Remove.
(VEC_free): Remove.
(VEC_copy): Remove.
(VEC_space): Remove.
(VEC_reserve): Remove.
(VEC_reserve_exact): Remove.
(VEC_splice): Remove.
(VEC_safe_splice): Remove.
(VEC_quick_push): Remove.
(VEC_safe_push): Remove.
(VEC_pop): Remove.
(VEC_truncate): Remove.
(VEC_safe_grow): Remove.
(VEC_replace): Remove.
(VEC_quick_insert): Remove.
(VEC_safe_insert): Remove.
(VEC_ordered_remove): Remove.
(VEC_unordered_remove): Remove.
(VEC_block_remove): Remove.
(VEC_lower_bound): Remove.
(VEC_alloc): Remove.
(VEC_qsort): Remove.
(va_heap): Declare.
(va_heap::default_layout): New typedef to vl_ptr.
(va_heap::reserve): New.
(va_heap::release): New.
(va_gc): Declare.
(va_gc::default_layout): New typedef to vl_embed.
(va_gc::reserve): New.
(va_gc::release): New.
(va_gc_atomic): Declare. Inherit from va_gc.
(va_stack): Declare.
(va_stack::default_layout): New typedef to vl_ptr.
(va_stack::alloc): New.
(va_stack::reserve): New.
(va_stack::release): New.
(register_stack_vec): Declare.
(stack_vec_register_index): Declare.
(unregister_stack_vec): Declare.
(vec<T, A = va_heap, L = typename A::default_layout>): Declare
empty vec template.
(vec<T, A, vl_embed>): Partial specialization for embedded
layout.
(vec<T, A, vl_embed>::allocated): New.
(vec<T, A, vl_embed>::length): New.
(vec<T, A, vl_embed>::is_empty): New.
(vec<T, A, vl_embed>::address): New.
(vec<T, A, vl_embed>::operator[]): New.
(vec<T, A, vl_embed>::last New.
(vec<T, A, vl_embed>::space): New.
(vec<T, A, vl_embed>::iterate): New.
(vec<T, A, vl_embed>::iterate): New.
(vec<T, A, vl_embed>::copy): New.
(vec<T, A, vl_embed>::splice): New.
(vec<T, A, vl_embed>::quick_push New.
(vec<T, A, vl_embed>::pop New.
(vec<T, A, vl_embed>::truncate): New.
(vec<T, A, vl_embed>::quick_insert): New.
(vec<T, A, vl_embed>::ordered_remove): New.
(vec<T, A, vl_embed>::unordered_remove): New.
(vec<T, A, vl_embed>::block_remove): New.
(vec<T, A, vl_embed>::qsort): New.
(vec<T, A, vl_embed>::lower_bound): New.
(vec<T, A, vl_embed>::embedded_size): New.
(vec<T, A, vl_embed>::embedded_init): New.
(vec<T, A, vl_embed>::quick_grow): New.
(vec<T, A, vl_embed>::quick_grow_cleared): New.
(vec_safe_space): New.
(vec_safe_length): New.
(vec_safe_address): New.
(vec_safe_is_empty): New.
(vec_safe_reserve): New.
(vec_safe_reserve_exact): New.
(vec_alloc): New.
(vec_free): New.
(vec_safe_grow): New.
(vec_safe_grow_cleared): New.
(vec_safe_iterate): New.
(vec_safe_push): New.
(vec_safe_insert): New.
(vec_safe_truncate): New.
(vec_safe_copy): New.
(vec_safe_splice): New.
(vec<T, A, vl_ptr>): New partial specialization for the space
efficient layout.
(vec<T, A, vl_ptr>::exists): New.
(vec<T, A, vl_ptr>::is_empty): New.
(vec<T, A, vl_ptr>::length): New.
(vec<T, A, vl_ptr>::address): New.
(vec<T, A, vl_ptr>::operator[]): New.
(vec<T, A, vl_ptr>::operator!=): New.
(vec<T, A, vl_ptr>::operator==): New.
(vec<T, A, vl_ptr>::last): New.
(vec<T, A, vl_ptr>::space): New.
(vec<T, A, vl_ptr>::iterate): New.
(vec<T, A, vl_ptr>::copy): New.
(vec<T, A, vl_ptr>::reserve): New.
(vec<T, A, vl_ptr>::reserve_exact): New.
(vec<T, A, vl_ptr>::splice): New.
(vec<T, A, vl_ptr>::safe_splice): New.
(vec<T, A, vl_ptr>::quick_push): New.
(vec<T, A, vl_ptr>::safe_push): New.
(vec<T, A, vl_ptr>::pop): New.
(vec<T, A, vl_ptr>::truncate): New.
(vec<T, A, vl_ptr>::safe_grow): New.
(vec<T, A, vl_ptr>::safe_grow_cleared): New.
(vec<T, A, vl_ptr>::quick_grow): New.
(vec<T, A, vl_ptr>::quick_grow_cleared): New.
(vec<T, A, vl_ptr>::quick_insert): New.
(vec<T, A, vl_ptr>::safe_insert): New.
(vec<T, A, vl_ptr>::ordered_remove): New.
(vec<T, A, vl_ptr>::unordered_remove): New.
(vec<T, A, vl_ptr>::block_remove): New.
(vec<T, A, vl_ptr>::qsort): New.
(vec<T, A, vl_ptr>::lower_bound): New.
(vec_stack_alloc): Define.
(FOR_EACH_VEC_SAFE_ELT): Define.
* vecir.h: Remove. Update all users.
* vecprim.h: Remove. Update all users.
Move uchar to coretypes.h.
* Makefile.in (VEC_H): Add $(GGC_H).
Remove vecir.h and vecprim.h dependencies everywhere.
2012-11-16 Diego Novillo <dnovillo@google.com>
* gengtype-lex.l (VEC): Remove.
Add characters in the set [\!\>\.-].
* gengtype-parse.c (token_names): Remove "VEC".
(require_template_declaration): Remove handling of VEC_TOKEN.
(type): Likewise.
Call create_user_defined_type when parsing GTY((user)).
* gengtype-state.c (type_lineloc): handle TYPE_UNDEFINED.
(write_state_undefined_type): New.
(write_state_type): Call write_state_undefined_type for
TYPE_UNDEFINED.
(read_state_type): Call read_state_undefined_type for
TYPE_UNDEFINED.
* gengtype.c (dbgprint_count_type_at): Handle TYPE_UNDEFINED.
(create_user_defined_type): Make extern.
(type_for_name): Factor out of resolve_typedef.
(create_undefined_type): New
(resolve_typedef): Call it when we cannot find a previous
typedef and the type is not a template.
(find_structure): Accept TYPE_UNDEFINED.
(set_gc_used_type): Add argument ALLOWED_UNDEFINED_TYPES,
default to false.
Emit an error for TYPE_UNDEFINED unless LEVEL is GC_UNUSED or
ALLOWED_UNDEFINED_TYPES is set.
Set ALLOWED_UNDEFINED_TYPES to true for TYPE_USER_STRUCT.
(filter_type_name): Accept templates with more than one
argument.
(output_mangled_typename): Handle TYPE_UNDEFINED
(walk_type): Likewise.
(write_types_process_field): Likewise.
(write_func_for_structure): If CHAIN_NEXT is set, ORIG_S
should not be a user-defined type.
(write_types_local_user_process_field): Handle TYPE_ARRAY,
TYPE_NONE and TYPE_UNDEFINED.
(write_types_local_process_field): Likewise.
(contains_scalar_p): Return 0 for TYPE_USER_STRUCT.
(write_root): Reject user-defined types that are not pointers.
Handle TYPE_NONE, TYPE_UNDEFINED, TYPE_UNION, TYPE_LANG_STRUCT
and TYPE_PARAM_STRUCT.
(output_typename): Handle TYPE_NONE, TYPE_UNDEFINED, and
TYPE_ARRAY.
(dump_typekind): Handle TYPE_UNDEFINED.
* gengtype.h (enum typekind): Add TYPE_UNDEFINED.
(create_user_defined_type): Declare.
(enum gty_token): Remove VEC_TOKEN.
2012-11-16 Diego Novillo <dnovillo@google.com>
Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
* coretypes.h (uchar): Define.
* alias.c: Use new vec API in vec.h.
* asan.c: Likewise.
* attribs.c: Likewise.
* basic-block.h: Likewise.
* bb-reorder.c: Likewise.
* builtins.c: Likewise.
* calls.c: Likewise.
* cfg.c: Likewise.
* cfganal.c: Likewise.
* cfgcleanup.c: Likewise.
* cfgexpand.c: Likewise.
* cfghooks.c: Likewise.
* cfghooks.h: Likewise.
* cfgloop.c: Likewise.
* cfgloop.h: Likewise.
* cfgloopanal.c: Likewise.
* cfgloopmanip.c: Likewise.
* cfgrtl.c: Likewise.
* cgraph.c: Likewise.
* cgraph.h: Likewise.
* cgraphclones.c: Likewise.
* cgraphunit.c: Likewise.
* combine.c: Likewise.
* compare-elim.c: Likewise.
* coverage.c: Likewise.
* cprop.c: Likewise.
* data-streamer.h: Likewise.
* dbxout.c: Likewise.
* dce.c: Likewise.
* df-core.c: Likewise.
* df-problems.c: Likewise.
* df-scan.c: Likewise.
* dominance.c: Likewise.
* domwalk.c: Likewise.
* domwalk.h: Likewise.
* dse.c: Likewise.
* dwarf2cfi.c: Likewise.
* dwarf2out.c: Likewise.
* dwarf2out.h: Likewise.
* emit-rtl.c: Likewise.
* except.c: Likewise.
* except.h: Likewise.
* expr.c: Likewise.
* expr.h: Likewise.
* final.c: Likewise.
* fold-const.c: Likewise.
* function.c: Likewise.
* function.h: Likewise.
* fwprop.c: Likewise.
* gcc.c: Likewise.
* gcse.c: Likewise.
* genattr.c: Likewise.
* genattrtab.c: Likewise.
* genautomata.c: Likewise.
* genextract.c: Likewise.
* genopinit.c: Likewise
* ggc-common.c: Likewise.
* ggc.h: Likewise.
* gimple-low.c: Likewise.
* gimple-ssa-strength-reduction.c: Likewise.
* gimple-streamer-in.c: Likewise.
* gimple.c: Likewise.
* gimple.h: Likewise.
* gimplify.c: Likewise.
* graph.c: Likewise.
* graphds.c: Likewise.
* graphds.h: Likewise.
* graphite-blocking.c: Likewise.
* graphite-clast-to-gimple.c: Likewise.
* graphite-dependences.c: Likewise.
* graphite-interchange.c: Likewise.
* graphite-optimize-isl.c: Likewise.
* graphite-poly.c: Likewise.
* graphite-poly.h: Likewise.
* graphite-scop-detection.c: Likewise.
* graphite-scop-detection.h: Likewise.
* graphite-sese-to-poly.c: Likewise.
* graphite.c: Likewise.
* godump.c: Likewise.
* haifa-sched.c: Likewise.
* hw-doloop.c: Likewise.
* hw-doloop.h: Likewise.
* ifcvt.c: Likewise.
* insn-addr.h: Likewise.
* ipa-cp.c: Likewise.
* ipa-inline-analysis.c: Likewise.
* ipa-inline-transform.c: Likewise.
* ipa-inline.c: Likewise.
* ipa-inline.h: Likewise.
* ipa-prop.c: Likewise.
* ipa-prop.h: Likewise.
* ipa-pure-const.c: Likewise.
* ipa-ref-inline.h: Likewise.
* ipa-ref.c: Likewise.
* ipa-ref.h: Likewise.
* ipa-reference.c: Likewise.
* ipa-split.c: Likewise.
* ipa-utils.c: Likewise.
* ipa-utils.h: Likewise.
* ipa.c: Likewise.
* ira-build.c: Likewise.
* ira-color.c: Likewise.
* ira-emit.c: Likewise.
* ira-int.h: Likewise.
* ira.c: Likewise.
* loop-invariant.c: Likewise.
* loop-unroll.c: Likewise.
* lower-subreg.c: Likewise.
* lra-lives.c: Likewise.
* lra.c: Likewise.
* lto-cgraph.c: Likewise.
* lto-section-out.c: Likewise.
* lto-streamer-in.c: Likewise.
* lto-streamer-out.c: Likewise.
* lto-streamer.h: Likewise.
* lto-symtab.c: Likewise.
* mcf.c: Likewise.
* modulo-sched.c: Likewise.
* omp-low.c: Likewise.
* opts-common.c: Likewise.
* opts-global.c: Likewise.
* opts.c: Likewise.
* opts.h: Likewise.
* passes.c: Likewise.
* predict.c: Likewise.
* print-tree.c: Likewise.
* profile.c: Likewise.
* profile.h: Likewise.
* read-rtl.c: Likewise.
* ree.c: Likewise.
* reg-stack.c: Likewise.
* regrename.c: Likewise.
* regrename.h: Likewise.
* reload.c: Likewise.
* reload.h: Likewise.
* reload1.c: Likewise.
* rtl.h: Likewise.
* sched-deps.c: Likewise.
* sched-int.h: Likewise.
* sdbout.c: Likewise.
* sel-sched-dump.c: Likewise.
* sel-sched-ir.c: Likewise.
* sel-sched-ir.h: Likewise.
* sel-sched.c: Likewise.
* sese.c: Likewise.
* sese.h: Likewise.
* statistics.h: Likewise.
* stmt.c: Likewise.
* stor-layout.c: Likewise.
* store-motion.c: Likewise.
* tlink.c: Likewise.
* toplev.c: Likewise.
* trans-mem.c: Likewise.
* tree-browser.c: Likewise.
* tree-call-cdce.c: Likewise.
* tree-cfg.c: Likewise.
* tree-cfgcleanup.c: Likewise.
* tree-chrec.c: Likewise.
* tree-chrec.h: Likewise.
* tree-complex.c: Likewise.
* tree-data-ref.c: Likewise.
* tree-data-ref.h: Likewise.
* tree-dfa.c: Likewise.
* tree-diagnostic.c: Likewise.
* tree-dump.c: Likewise.
* tree-eh.c: Likewise.
* tree-emutls.c: Likewise.
* tree-flow.h: Likewise.
* tree-if-conv.c: Likewise.
* tree-inline.c: Likewise.
* tree-inline.h: Likewise.
* tree-into-ssa.c: Likewise.
* tree-iterator.c: Likewise.
* tree-loop-distribution.c: Likewise.
* tree-mudflap.c: Likewise.
* tree-optimize.c: Likewise.
* tree-outof-ssa.c: Likewise.
* tree-parloops.c: Likewise.
* tree-phinodes.c: Likewise.
* tree-predcom.c: Likewise.
* tree-pretty-print.c: Likewise.
* tree-scalar-evolution.c: Likewise.
* tree-sra.c: Likewise.
* tree-ssa-address.c: Likewise.
* tree-ssa-alias.c: Likewise.
* tree-ssa-ccp.c: Likewise.
* tree-ssa-coalesce.c: Likewise.
* tree-ssa-dce.c: Likewise.
* tree-ssa-dom.c: Likewise.
* tree-ssa-forwprop.c: Likewise.
* tree-ssa-live.c: Likewise.
* tree-ssa-live.h: Likewise.
* tree-ssa-loop-im.c: Likewise.
* tree-ssa-loop-ivcanon.c: Likewise.
* tree-ssa-loop-ivopts.c: Likewise.
* tree-ssa-loop-manip.c: Likewise.
* tree-ssa-loop-niter.c: Likewise.
* tree-ssa-loop-prefetch.c: Likewise.
* tree-ssa-math-opts.c: Likewise.
* tree-ssa-operands.c: Likewise.
* tree-ssa-phiopt.c: Likewise.
* tree-ssa-phiprop.c: Likewise.
* tree-ssa-pre.c: Likewise.
* tree-ssa-propagate.c: Likewise.
* tree-ssa-reassoc.c: Likewise.
* tree-ssa-sccvn.c: Likewise.
* tree-ssa-sccvn.h: Likewise.
* tree-ssa-strlen.c: Likewise.
* tree-ssa-structalias.c: Likewise.
* tree-ssa-tail-merge.c: Likewise.
* tree-ssa-threadedge.c: Likewise.
* tree-ssa-threadupdate.c: Likewise.
* tree-ssa-uncprop.c: Likewise.
* tree-ssa-uninit.c: Likewise.
* tree-ssa.c: Likewise.
* tree-ssanames.c: Likewise.
* tree-stdarg.c: Likewise.
* tree-streamer-in.c: Likewise.
* tree-streamer-out.c: Likewise.
* tree-streamer.c: Likewise.
* tree-streamer.h: Likewise.
* tree-switch-conversion.c: Likewise.
* tree-vect-data-refs.c: Likewise.
* tree-vect-generic.c: Likewise.
* tree-vect-loop-manip.c: Likewise.
* tree-vect-loop.c: Likewise.
* tree-vect-patterns.c: Likewise.
* tree-vect-slp.c: Likewise.
* tree-vect-stmts.c: Likewise.
* tree-vectorizer.c: Likewise.
* tree-vectorizer.h: Likewise.
* tree-vrp.c: Likewise.
* tree.c: Likewise.
* tree.h: Likewise.
* value-prof.c: Likewise.
* value-prof.h: Likewise.
* var-tracking.c: Likewise.
* varasm.c: Likewise.
* varpool.c: Likewise.
* vmsdbgout.c: Likewise.
* config/bfin/bfin.c: Likewise.
* config/c6x/c6x.c: Likewise.
* config/darwin.c: Likewise.
* config/i386/i386.c: Likewise.
* config/ia64/ia64.c: Likewise.
* config/mep/mep.c: Likewise.
* config/mips/mips.c: Likewise.
* config/pa/pa.c: Likewise.
* config/rs6000/rs6000-c.c: Likewise.
* config/rs6000/rs6000.c: Likewise.
* config/rx/rx.c: Likewise.
* config/spu/spu-c.c: Likewise.
* config/vms/vms.c: Likewise.
* config/vxworks.c: Likewise.
* config/epiphany/resolve-sw-modes.c: Likewise.
From-SVN: r193595
2012-11-18 03:54:30 +01:00
|
|
|
ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
|
|
|
|
VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
|
|
|
|
TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
|
|
|
|
TREE_READONLY (refdecl) = TREE_READONLY (decl);
|
|
|
|
TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
|
|
|
|
DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
|
|
|
|
DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
|
|
|
|
DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
|
|
|
|
TREE_STATIC (refdecl) = 1;
|
|
|
|
TREE_PUBLIC (refdecl) = 0;
|
|
|
|
TREE_USED (refdecl) = 1;
|
|
|
|
assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
|
|
|
|
}
|
|
|
|
|
2016-12-02 08:39:27 +01:00
|
|
|
tree odr_indicator_ptr
|
|
|
|
= (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
|
|
|
|
: build_int_cst (uptr, 0));
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
|
|
|
|
fold_convert (const_ptr_type_node,
|
|
|
|
build_fold_addr_expr (refdecl)));
|
decl.c, [...]: Replace tree_low_cst (..., 1) with tree_to_uhwi throughout.
gcc/ada/
* gcc-interface/decl.c, gcc-interface/utils.c, gcc-interface/utils2.c:
Replace tree_low_cst (..., 1) with tree_to_uhwi throughout.
gcc/c-family/
* c-common.c, c-cppbuiltin.c: Replace tree_low_cst (..., 1) with
tree_to_uhwi throughout.
gcc/c/
* c-decl.c, c-typeck.c: Replace tree_low_cst (..., 1) with
tree_to_uhwi throughout.
gcc/cp/
* call.c, class.c, decl.c, error.c: Replace tree_low_cst (..., 1) with
tree_to_uhwi throughout.
gcc/objc/
* objc-encoding.c: Replace tree_low_cst (..., 1) with tree_to_uhwi
throughout.
gcc/
* alias.c, asan.c, builtins.c, cfgexpand.c, cgraph.c,
config/aarch64/aarch64.c, config/alpha/predicates.md,
config/arm/arm.c, config/darwin.c, config/epiphany/epiphany.c,
config/i386/i386.c, config/iq2000/iq2000.c, config/m32c/m32c-pragma.c,
config/mep/mep-pragma.c, config/mips/mips.c,
config/picochip/picochip.c, config/rs6000/rs6000.c, cppbuiltin.c,
dbxout.c, dwarf2out.c, emit-rtl.c, except.c, expr.c, fold-const.c,
function.c, gimple-fold.c, godump.c, ipa-cp.c, ipa-prop.c, omp-low.c,
predict.c, sdbout.c, stor-layout.c, trans-mem.c, tree-object-size.c,
tree-sra.c, tree-ssa-ccp.c, tree-ssa-forwprop.c,
tree-ssa-loop-ivcanon.c, tree-ssa-loop-ivopts.c, tree-ssa-loop-niter.c,
tree-ssa-loop-prefetch.c, tree-ssa-strlen.c, tree-stdarg.c,
tree-switch-conversion.c, tree-vect-generic.c, tree-vect-loop.c,
tree-vect-patterns.c, tree-vrp.c, tree.c, tsan.c, ubsan.c, varasm.c:
Replace tree_low_cst (..., 1) with tree_to_uhwi throughout.
From-SVN: r204961
2013-11-18 15:52:19 +01:00
|
|
|
size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
|
|
|
|
size += asan_red_zone_size (size);
|
|
|
|
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
|
|
|
|
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
|
|
|
|
fold_convert (const_ptr_type_node, str_cst));
|
2013-11-04 22:33:31 +01:00
|
|
|
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
|
|
|
|
fold_convert (const_ptr_type_node, module_name_cst));
|
2014-07-24 14:25:27 +02:00
|
|
|
varpool_node *vnode = varpool_node::get (decl);
|
2013-11-22 21:04:45 +01:00
|
|
|
int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
|
|
|
|
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
|
|
|
|
build_int_cst (uptr, has_dynamic_init));
|
2014-10-06 09:44:13 +02:00
|
|
|
tree locptr = NULL_TREE;
|
|
|
|
location_t loc = DECL_SOURCE_LOCATION (decl);
|
|
|
|
expanded_location xloc = expand_location (loc);
|
|
|
|
if (xloc.file != NULL)
|
|
|
|
{
|
|
|
|
static int lasanloccnt = 0;
|
|
|
|
char buf[25];
|
|
|
|
ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
|
|
|
|
tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
|
|
|
|
ubsan_get_source_location_type ());
|
|
|
|
TREE_STATIC (var) = 1;
|
|
|
|
TREE_PUBLIC (var) = 0;
|
|
|
|
DECL_ARTIFICIAL (var) = 1;
|
|
|
|
DECL_IGNORED_P (var) = 1;
|
|
|
|
pretty_printer filename_pp;
|
|
|
|
pp_string (&filename_pp, xloc.file);
|
|
|
|
tree str = asan_pp_string (&filename_pp);
|
|
|
|
tree ctor = build_constructor_va (TREE_TYPE (var), 3,
|
|
|
|
NULL_TREE, str, NULL_TREE,
|
|
|
|
build_int_cst (unsigned_type_node,
|
|
|
|
xloc.line), NULL_TREE,
|
|
|
|
build_int_cst (unsigned_type_node,
|
|
|
|
xloc.column));
|
|
|
|
TREE_CONSTANT (ctor) = 1;
|
|
|
|
TREE_STATIC (ctor) = 1;
|
|
|
|
DECL_INITIAL (var) = ctor;
|
|
|
|
varpool_node::finalize_decl (var);
|
|
|
|
locptr = fold_convert (uptr, build_fold_addr_expr (var));
|
|
|
|
}
|
|
|
|
else
|
|
|
|
locptr = build_int_cst (uptr, 0);
|
|
|
|
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
|
2016-12-02 08:39:27 +01:00
|
|
|
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
init = build_constructor (type, vinner);
|
|
|
|
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
|
|
|
|
}
|
|
|
|
|
2012-12-03 18:28:10 +01:00
|
|
|
/* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
|
|
|
|
void
|
|
|
|
initialize_sanitizer_builtins (void)
|
|
|
|
{
|
|
|
|
tree decl;
|
|
|
|
|
|
|
|
if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
|
|
|
|
return;
|
|
|
|
|
|
|
|
tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
|
|
|
|
tree BT_FN_VOID_PTR
|
|
|
|
= build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
|
2013-11-22 21:04:45 +01:00
|
|
|
tree BT_FN_VOID_CONST_PTR
|
|
|
|
= build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
|
2013-11-03 18:59:31 +01:00
|
|
|
tree BT_FN_VOID_PTR_PTR
|
|
|
|
= build_function_type_list (void_type_node, ptr_type_node,
|
|
|
|
ptr_type_node, NULL_TREE);
|
2013-08-30 18:12:58 +02:00
|
|
|
tree BT_FN_VOID_PTR_PTR_PTR
|
|
|
|
= build_function_type_list (void_type_node, ptr_type_node,
|
|
|
|
ptr_type_node, ptr_type_node, NULL_TREE);
|
2012-12-03 18:28:10 +01:00
|
|
|
tree BT_FN_VOID_PTR_PTRMODE
|
|
|
|
= build_function_type_list (void_type_node, ptr_type_node,
|
2013-08-30 18:12:58 +02:00
|
|
|
pointer_sized_int_node, NULL_TREE);
|
2012-12-04 14:20:20 +01:00
|
|
|
tree BT_FN_VOID_INT
|
|
|
|
= build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
|
2014-11-26 15:05:21 +01:00
|
|
|
tree BT_FN_SIZE_CONST_PTR_INT
|
|
|
|
= build_function_type_list (size_type_node, const_ptr_type_node,
|
|
|
|
integer_type_node, NULL_TREE);
|
2012-12-04 14:20:20 +01:00
|
|
|
tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
|
|
|
|
tree BT_FN_IX_CONST_VPTR_INT[5];
|
|
|
|
tree BT_FN_IX_VPTR_IX_INT[5];
|
|
|
|
tree BT_FN_VOID_VPTR_IX_INT[5];
|
|
|
|
tree vptr
|
|
|
|
= build_pointer_type (build_qualified_type (void_type_node,
|
|
|
|
TYPE_QUAL_VOLATILE));
|
|
|
|
tree cvptr
|
|
|
|
= build_pointer_type (build_qualified_type (void_type_node,
|
|
|
|
TYPE_QUAL_VOLATILE
|
|
|
|
|TYPE_QUAL_CONST));
|
|
|
|
tree boolt
|
|
|
|
= lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
|
|
|
|
int i;
|
|
|
|
for (i = 0; i < 5; i++)
|
|
|
|
{
|
|
|
|
tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
|
|
|
|
BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
|
|
|
|
= build_function_type_list (boolt, vptr, ptr_type_node, ix,
|
|
|
|
integer_type_node, integer_type_node,
|
|
|
|
NULL_TREE);
|
|
|
|
BT_FN_IX_CONST_VPTR_INT[i]
|
|
|
|
= build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
|
|
|
|
BT_FN_IX_VPTR_IX_INT[i]
|
|
|
|
= build_function_type_list (ix, vptr, ix, integer_type_node,
|
|
|
|
NULL_TREE);
|
|
|
|
BT_FN_VOID_VPTR_IX_INT[i]
|
|
|
|
= build_function_type_list (void_type_node, vptr, ix,
|
|
|
|
integer_type_node, NULL_TREE);
|
|
|
|
}
|
|
|
|
#define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
|
|
|
|
#define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
|
|
|
|
#define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
|
|
|
|
#define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
|
|
|
|
#define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
|
|
|
|
#define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
|
|
|
|
#define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
|
|
|
|
#define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
|
|
|
|
#define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
|
|
|
|
#define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
|
|
|
|
#define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
|
|
|
|
#define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
|
|
|
|
#define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
|
|
|
|
#define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
|
|
|
|
#define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
|
|
|
|
#define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
|
|
|
|
#define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
|
|
|
|
#define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
|
|
|
|
#define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
|
|
|
|
#define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
|
2012-12-03 18:28:10 +01:00
|
|
|
#undef ATTR_NOTHROW_LEAF_LIST
|
|
|
|
#define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
|
2012-12-13 00:05:23 +01:00
|
|
|
#undef ATTR_TMPURE_NOTHROW_LEAF_LIST
|
|
|
|
#define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
|
2012-12-03 18:28:10 +01:00
|
|
|
#undef ATTR_NORETURN_NOTHROW_LEAF_LIST
|
|
|
|
#define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
|
2014-11-14 12:22:12 +01:00
|
|
|
#undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
|
|
|
|
#define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
|
|
|
|
ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
|
2012-12-13 00:05:23 +01:00
|
|
|
#undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
|
|
|
|
#define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
|
|
|
|
ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
|
2013-08-30 18:12:58 +02:00
|
|
|
#undef ATTR_COLD_NOTHROW_LEAF_LIST
|
|
|
|
#define ATTR_COLD_NOTHROW_LEAF_LIST \
|
|
|
|
/* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
|
|
|
|
#undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
|
|
|
|
#define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
|
|
|
|
/* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
|
2014-11-14 12:22:12 +01:00
|
|
|
#undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
|
|
|
|
#define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
|
|
|
|
/* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
|
2014-11-26 15:05:21 +01:00
|
|
|
#undef ATTR_PURE_NOTHROW_LEAF_LIST
|
|
|
|
#define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
|
2016-01-27 19:48:30 +01:00
|
|
|
#undef DEF_BUILTIN_STUB
|
|
|
|
#define DEF_BUILTIN_STUB(ENUM, NAME)
|
2012-12-03 18:28:10 +01:00
|
|
|
#undef DEF_SANITIZER_BUILTIN
|
|
|
|
#define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
|
|
|
|
decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
|
|
|
|
BUILT_IN_NORMAL, NAME, NULL_TREE); \
|
|
|
|
set_call_expr_flags (decl, ATTRS); \
|
|
|
|
set_builtin_decl (ENUM, decl, true);
|
|
|
|
|
|
|
|
#include "sanitizer.def"
|
|
|
|
|
2014-11-26 15:05:21 +01:00
|
|
|
/* -fsanitize=object-size uses __builtin_object_size, but that might
|
|
|
|
not be available for e.g. Fortran at this point. We use
|
|
|
|
DEF_SANITIZER_BUILTIN here only as a convenience macro. */
|
|
|
|
if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
|
|
|
|
&& !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
|
|
|
|
DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
|
|
|
|
BT_FN_SIZE_CONST_PTR_INT,
|
|
|
|
ATTR_PURE_NOTHROW_LEAF_LIST)
|
|
|
|
|
2012-12-03 18:28:10 +01:00
|
|
|
#undef DEF_SANITIZER_BUILTIN
|
2016-01-27 19:48:30 +01:00
|
|
|
#undef DEF_BUILTIN_STUB
|
2012-12-03 18:28:10 +01:00
|
|
|
}
|
|
|
|
|
2012-12-10 13:14:36 +01:00
|
|
|
/* Called via htab_traverse. Count number of emitted
|
|
|
|
STRING_CSTs in the constant hash table. */
|
|
|
|
|
move many gc hashtab to hash_table
gcc/
* asan.c, cfgloop.c, cfgloop.h, cgraph.c, cgraph.h,
config/darwin.c, config/m32c/m32c.c, config/mep/mep.c,
config/mips/mips.c, config/rs6000/rs6000.c, dwarf2out.c,
function.c, function.h, gimple-ssa.h, libfuncs.h, optabs.c,
output.h, rtl.h, sese.c, symtab.c, tree-cfg.c, tree-dfa.c,
tree-ssa.c, varasm.c: Use hash-table instead of hashtab.
* doc/gty.texi (for_user): Document new option.
* gengtype.c (create_user_defined_type): Don't try to get a struct for
char.
(walk_type): Don't error out on for_user option.
(write_func_for_structure): Emit user marking routines if requested by
for_user option.
(write_local_func_for_structure): Likewise.
(main): Mark types with for_user option as used.
* ggc.h (gt_pch_nx): Add overload for unsigned int.
* hash-map.h (hash_map::hash_entry::pch_nx_helper): AddOverloads.
* hash-table.h (ggc_hasher): New struct.
(hash_table::create_ggc): New function.
(gt_pch_nx): New overload for hash_table.
java/
* class.c, decl.c, except.c, expr.c, java-tree.h, lang.c: Use
hash_table instead of hashtab.
objc/
* objc-act.c: use hash_table instead of hashtab.
cp/
* cp-gimplify.c, cp-tree.h, decl.c, mangle.c, name-lookup.c,
pt.c, semantics.c, tree.c, typeck2.c: Use hash_table instead of
hashtab.
fortran/
* trans-decl.c, trans.c, trans.h: Use hash_table instead of hashtab.
c-family/
* c-common.c: Use hash_table instead of hashtab.
From-SVN: r216127
2014-10-13 00:22:53 +02:00
|
|
|
int
|
|
|
|
count_string_csts (constant_descriptor_tree **slot,
|
|
|
|
unsigned HOST_WIDE_INT *data)
|
2012-12-10 13:14:36 +01:00
|
|
|
{
|
move many gc hashtab to hash_table
gcc/
* asan.c, cfgloop.c, cfgloop.h, cgraph.c, cgraph.h,
config/darwin.c, config/m32c/m32c.c, config/mep/mep.c,
config/mips/mips.c, config/rs6000/rs6000.c, dwarf2out.c,
function.c, function.h, gimple-ssa.h, libfuncs.h, optabs.c,
output.h, rtl.h, sese.c, symtab.c, tree-cfg.c, tree-dfa.c,
tree-ssa.c, varasm.c: Use hash-table instead of hashtab.
* doc/gty.texi (for_user): Document new option.
* gengtype.c (create_user_defined_type): Don't try to get a struct for
char.
(walk_type): Don't error out on for_user option.
(write_func_for_structure): Emit user marking routines if requested by
for_user option.
(write_local_func_for_structure): Likewise.
(main): Mark types with for_user option as used.
* ggc.h (gt_pch_nx): Add overload for unsigned int.
* hash-map.h (hash_map::hash_entry::pch_nx_helper): AddOverloads.
* hash-table.h (ggc_hasher): New struct.
(hash_table::create_ggc): New function.
(gt_pch_nx): New overload for hash_table.
java/
* class.c, decl.c, except.c, expr.c, java-tree.h, lang.c: Use
hash_table instead of hashtab.
objc/
* objc-act.c: use hash_table instead of hashtab.
cp/
* cp-gimplify.c, cp-tree.h, decl.c, mangle.c, name-lookup.c,
pt.c, semantics.c, tree.c, typeck2.c: Use hash_table instead of
hashtab.
fortran/
* trans-decl.c, trans.c, trans.h: Use hash_table instead of hashtab.
c-family/
* c-common.c: Use hash_table instead of hashtab.
From-SVN: r216127
2014-10-13 00:22:53 +02:00
|
|
|
struct constant_descriptor_tree *desc = *slot;
|
2012-12-10 13:14:36 +01:00
|
|
|
if (TREE_CODE (desc->value) == STRING_CST
|
|
|
|
&& TREE_ASM_WRITTEN (desc->value)
|
|
|
|
&& asan_protect_global (desc->value))
|
move many gc hashtab to hash_table
gcc/
* asan.c, cfgloop.c, cfgloop.h, cgraph.c, cgraph.h,
config/darwin.c, config/m32c/m32c.c, config/mep/mep.c,
config/mips/mips.c, config/rs6000/rs6000.c, dwarf2out.c,
function.c, function.h, gimple-ssa.h, libfuncs.h, optabs.c,
output.h, rtl.h, sese.c, symtab.c, tree-cfg.c, tree-dfa.c,
tree-ssa.c, varasm.c: Use hash-table instead of hashtab.
* doc/gty.texi (for_user): Document new option.
* gengtype.c (create_user_defined_type): Don't try to get a struct for
char.
(walk_type): Don't error out on for_user option.
(write_func_for_structure): Emit user marking routines if requested by
for_user option.
(write_local_func_for_structure): Likewise.
(main): Mark types with for_user option as used.
* ggc.h (gt_pch_nx): Add overload for unsigned int.
* hash-map.h (hash_map::hash_entry::pch_nx_helper): AddOverloads.
* hash-table.h (ggc_hasher): New struct.
(hash_table::create_ggc): New function.
(gt_pch_nx): New overload for hash_table.
java/
* class.c, decl.c, except.c, expr.c, java-tree.h, lang.c: Use
hash_table instead of hashtab.
objc/
* objc-act.c: use hash_table instead of hashtab.
cp/
* cp-gimplify.c, cp-tree.h, decl.c, mangle.c, name-lookup.c,
pt.c, semantics.c, tree.c, typeck2.c: Use hash_table instead of
hashtab.
fortran/
* trans-decl.c, trans.c, trans.h: Use hash_table instead of hashtab.
c-family/
* c-common.c: Use hash_table instead of hashtab.
From-SVN: r216127
2014-10-13 00:22:53 +02:00
|
|
|
++*data;
|
2012-12-10 13:14:36 +01:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Helper structure to pass two parameters to
|
|
|
|
add_string_csts. */
|
|
|
|
|
|
|
|
struct asan_add_string_csts_data
|
|
|
|
{
|
|
|
|
tree type;
|
|
|
|
vec<constructor_elt, va_gc> *v;
|
|
|
|
};
|
|
|
|
|
move many gc hashtab to hash_table
gcc/
* asan.c, cfgloop.c, cfgloop.h, cgraph.c, cgraph.h,
config/darwin.c, config/m32c/m32c.c, config/mep/mep.c,
config/mips/mips.c, config/rs6000/rs6000.c, dwarf2out.c,
function.c, function.h, gimple-ssa.h, libfuncs.h, optabs.c,
output.h, rtl.h, sese.c, symtab.c, tree-cfg.c, tree-dfa.c,
tree-ssa.c, varasm.c: Use hash-table instead of hashtab.
* doc/gty.texi (for_user): Document new option.
* gengtype.c (create_user_defined_type): Don't try to get a struct for
char.
(walk_type): Don't error out on for_user option.
(write_func_for_structure): Emit user marking routines if requested by
for_user option.
(write_local_func_for_structure): Likewise.
(main): Mark types with for_user option as used.
* ggc.h (gt_pch_nx): Add overload for unsigned int.
* hash-map.h (hash_map::hash_entry::pch_nx_helper): AddOverloads.
* hash-table.h (ggc_hasher): New struct.
(hash_table::create_ggc): New function.
(gt_pch_nx): New overload for hash_table.
java/
* class.c, decl.c, except.c, expr.c, java-tree.h, lang.c: Use
hash_table instead of hashtab.
objc/
* objc-act.c: use hash_table instead of hashtab.
cp/
* cp-gimplify.c, cp-tree.h, decl.c, mangle.c, name-lookup.c,
pt.c, semantics.c, tree.c, typeck2.c: Use hash_table instead of
hashtab.
fortran/
* trans-decl.c, trans.c, trans.h: Use hash_table instead of hashtab.
c-family/
* c-common.c: Use hash_table instead of hashtab.
From-SVN: r216127
2014-10-13 00:22:53 +02:00
|
|
|
/* Called via hash_table::traverse. Call asan_add_global
|
2012-12-10 13:14:36 +01:00
|
|
|
on emitted STRING_CSTs from the constant hash table. */
|
|
|
|
|
move many gc hashtab to hash_table
gcc/
* asan.c, cfgloop.c, cfgloop.h, cgraph.c, cgraph.h,
config/darwin.c, config/m32c/m32c.c, config/mep/mep.c,
config/mips/mips.c, config/rs6000/rs6000.c, dwarf2out.c,
function.c, function.h, gimple-ssa.h, libfuncs.h, optabs.c,
output.h, rtl.h, sese.c, symtab.c, tree-cfg.c, tree-dfa.c,
tree-ssa.c, varasm.c: Use hash-table instead of hashtab.
* doc/gty.texi (for_user): Document new option.
* gengtype.c (create_user_defined_type): Don't try to get a struct for
char.
(walk_type): Don't error out on for_user option.
(write_func_for_structure): Emit user marking routines if requested by
for_user option.
(write_local_func_for_structure): Likewise.
(main): Mark types with for_user option as used.
* ggc.h (gt_pch_nx): Add overload for unsigned int.
* hash-map.h (hash_map::hash_entry::pch_nx_helper): AddOverloads.
* hash-table.h (ggc_hasher): New struct.
(hash_table::create_ggc): New function.
(gt_pch_nx): New overload for hash_table.
java/
* class.c, decl.c, except.c, expr.c, java-tree.h, lang.c: Use
hash_table instead of hashtab.
objc/
* objc-act.c: use hash_table instead of hashtab.
cp/
* cp-gimplify.c, cp-tree.h, decl.c, mangle.c, name-lookup.c,
pt.c, semantics.c, tree.c, typeck2.c: Use hash_table instead of
hashtab.
fortran/
* trans-decl.c, trans.c, trans.h: Use hash_table instead of hashtab.
c-family/
* c-common.c: Use hash_table instead of hashtab.
From-SVN: r216127
2014-10-13 00:22:53 +02:00
|
|
|
int
|
|
|
|
add_string_csts (constant_descriptor_tree **slot,
|
|
|
|
asan_add_string_csts_data *aascd)
|
2012-12-10 13:14:36 +01:00
|
|
|
{
|
move many gc hashtab to hash_table
gcc/
* asan.c, cfgloop.c, cfgloop.h, cgraph.c, cgraph.h,
config/darwin.c, config/m32c/m32c.c, config/mep/mep.c,
config/mips/mips.c, config/rs6000/rs6000.c, dwarf2out.c,
function.c, function.h, gimple-ssa.h, libfuncs.h, optabs.c,
output.h, rtl.h, sese.c, symtab.c, tree-cfg.c, tree-dfa.c,
tree-ssa.c, varasm.c: Use hash-table instead of hashtab.
* doc/gty.texi (for_user): Document new option.
* gengtype.c (create_user_defined_type): Don't try to get a struct for
char.
(walk_type): Don't error out on for_user option.
(write_func_for_structure): Emit user marking routines if requested by
for_user option.
(write_local_func_for_structure): Likewise.
(main): Mark types with for_user option as used.
* ggc.h (gt_pch_nx): Add overload for unsigned int.
* hash-map.h (hash_map::hash_entry::pch_nx_helper): AddOverloads.
* hash-table.h (ggc_hasher): New struct.
(hash_table::create_ggc): New function.
(gt_pch_nx): New overload for hash_table.
java/
* class.c, decl.c, except.c, expr.c, java-tree.h, lang.c: Use
hash_table instead of hashtab.
objc/
* objc-act.c: use hash_table instead of hashtab.
cp/
* cp-gimplify.c, cp-tree.h, decl.c, mangle.c, name-lookup.c,
pt.c, semantics.c, tree.c, typeck2.c: Use hash_table instead of
hashtab.
fortran/
* trans-decl.c, trans.c, trans.h: Use hash_table instead of hashtab.
c-family/
* c-common.c: Use hash_table instead of hashtab.
From-SVN: r216127
2014-10-13 00:22:53 +02:00
|
|
|
struct constant_descriptor_tree *desc = *slot;
|
2012-12-10 13:14:36 +01:00
|
|
|
if (TREE_CODE (desc->value) == STRING_CST
|
|
|
|
&& TREE_ASM_WRITTEN (desc->value)
|
|
|
|
&& asan_protect_global (desc->value))
|
|
|
|
{
|
|
|
|
asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
|
|
|
|
aascd->type, aascd->v);
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
/* Needs to be GTY(()), because cgraph_build_static_cdtor may
|
|
|
|
invoke ggc_collect. */
|
|
|
|
static GTY(()) tree asan_ctor_statements;
|
|
|
|
|
2012-11-12 16:51:13 +01:00
|
|
|
/* Module-level instrumentation.
|
2013-11-04 22:33:31 +01:00
|
|
|
- Insert __asan_init_vN() into the list of CTORs.
|
2012-11-12 16:51:13 +01:00
|
|
|
- TODO: insert redzones around globals.
|
|
|
|
*/
|
|
|
|
|
|
|
|
void
|
|
|
|
asan_finish_file (void)
|
|
|
|
{
|
2013-12-06 11:40:53 +01:00
|
|
|
varpool_node *vnode;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
unsigned HOST_WIDE_INT gcount = 0;
|
|
|
|
|
2012-12-10 13:14:36 +01:00
|
|
|
if (shadow_ptr_types[0] == NULL_TREE)
|
|
|
|
asan_init_shadow_ptr_types ();
|
|
|
|
/* Avoid instrumenting code in the asan ctors/dtors.
|
|
|
|
We don't need to insert padding after the description strings,
|
|
|
|
nor after .LASAN* array. */
|
2013-08-30 18:12:58 +02:00
|
|
|
flag_sanitize &= ~SANITIZE_ADDRESS;
|
2012-12-03 18:28:10 +01:00
|
|
|
|
2014-12-02 19:58:59 +01:00
|
|
|
/* For user-space we want asan constructors to run first.
|
|
|
|
Linux kernel does not support priorities other than default, and the only
|
|
|
|
other user of constructors is coverage. So we run with the default
|
|
|
|
priority. */
|
|
|
|
int priority = flag_sanitize & SANITIZE_USER_ADDRESS
|
|
|
|
? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
|
|
|
|
|
2014-10-03 16:17:28 +02:00
|
|
|
if (flag_sanitize & SANITIZE_USER_ADDRESS)
|
|
|
|
{
|
|
|
|
tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
|
|
|
|
append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
|
2015-10-21 09:40:54 +02:00
|
|
|
fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
|
|
|
|
append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
|
2014-10-03 16:17:28 +02:00
|
|
|
}
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
FOR_EACH_DEFINED_VARIABLE (vnode)
|
2013-10-29 19:30:00 +01:00
|
|
|
if (TREE_ASM_WRITTEN (vnode->decl)
|
|
|
|
&& asan_protect_global (vnode->decl))
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
++gcount;
|
move many gc hashtab to hash_table
gcc/
* asan.c, cfgloop.c, cfgloop.h, cgraph.c, cgraph.h,
config/darwin.c, config/m32c/m32c.c, config/mep/mep.c,
config/mips/mips.c, config/rs6000/rs6000.c, dwarf2out.c,
function.c, function.h, gimple-ssa.h, libfuncs.h, optabs.c,
output.h, rtl.h, sese.c, symtab.c, tree-cfg.c, tree-dfa.c,
tree-ssa.c, varasm.c: Use hash-table instead of hashtab.
* doc/gty.texi (for_user): Document new option.
* gengtype.c (create_user_defined_type): Don't try to get a struct for
char.
(walk_type): Don't error out on for_user option.
(write_func_for_structure): Emit user marking routines if requested by
for_user option.
(write_local_func_for_structure): Likewise.
(main): Mark types with for_user option as used.
* ggc.h (gt_pch_nx): Add overload for unsigned int.
* hash-map.h (hash_map::hash_entry::pch_nx_helper): AddOverloads.
* hash-table.h (ggc_hasher): New struct.
(hash_table::create_ggc): New function.
(gt_pch_nx): New overload for hash_table.
java/
* class.c, decl.c, except.c, expr.c, java-tree.h, lang.c: Use
hash_table instead of hashtab.
objc/
* objc-act.c: use hash_table instead of hashtab.
cp/
* cp-gimplify.c, cp-tree.h, decl.c, mangle.c, name-lookup.c,
pt.c, semantics.c, tree.c, typeck2.c: Use hash_table instead of
hashtab.
fortran/
* trans-decl.c, trans.c, trans.h: Use hash_table instead of hashtab.
c-family/
* c-common.c: Use hash_table instead of hashtab.
From-SVN: r216127
2014-10-13 00:22:53 +02:00
|
|
|
hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
|
|
|
|
const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
|
|
|
|
(&gcount);
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
if (gcount)
|
|
|
|
{
|
2012-12-03 18:28:10 +01:00
|
|
|
tree type = asan_global_struct (), var, ctor;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
tree dtor_statements = NULL_TREE;
|
This patch rewrites the old VEC macro-based interface into a new one based on the template class 'vec'.
This patch rewrites the old VEC macro-based interface into a new one
based on the template class 'vec'. The user-visible changes are
described in http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec.
I have tested the patch pretty extensively:
- Regular bootstraps on x86_64, ppc, ia64, sparc and hppa.
- Bootstraps with --enable-checking=release
- Bootstraps with --enable-checking=gc,gcac
- Basic builds on all targets (using contrib/config-list.mk).
We no longer access the vectors via VEC_* macros. The pattern is
"VEC_operation (T, A, V, args)" becomes "V.operation (args)".
The only thing I could not do is create proper ctors and dtors for the
vec class. Since these vectors are stored in unions, we
have to keep them as PODs (C++03 does not allow non-PODs in unions).
This means that creation and destruction must be explicit. There is a
new method vec<type, allocation, layout>::create() and another vec<type,
allocation, layout>::destroy() to allocate the internal vector.
For vectors that must be pointers, there is a family of free functions
that implement the operations that need to tolerate NULL vectors.
These functions all start with the prefix 'vec_safe_'. See the wiki
page for details.
The gengtype change removes the special handling for VEC() that used
to exist in gengtype. Additionally, it allows gengtype to recognize
templates of more than one argument and introduces the concept of an
undefined type (useful for template arguments that may or may not be
types).
When a TYPE_UNDEFINED is reached, gengtype will ignore it if it
happens inside a type marked with GTY((user)). Otherwise, it will
emit an error.
Finally, gengtype rejects root types marked GTY((user)) that are not
first class pointers.
2012-11-16 Diego Novillo <dnovillo@google.com>
VEC API overhaul (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
* vec.c (register_overhead): Convert it into
member function of vec_prefix.
(release_overhead): Likewise.
(calculate_allocation): Likewise.
(vec_heap_free): Remove.
(vec_gc_o_reserve_1): Remove.
(vec_heap_o_reserve_1): Remove.
(vec_stack_o_reserve_1): Remove.
(vec_stack_o_reserve_exact): Remove.
(register_stack_vec): New.
(stack_vec_register_index): New.
(unregister_stack_vec): New.
(vec_assert_fail): Remove.
* vec.h: Conditionally include ggc.h. Document conditional
hackery.
Update top-level documentation.
(ALONE_VEC_CHECK_INFO): Remove.
(VEC_CHECK_INFO): Remove.
(ALONE_VEC_CHECK_DECL): Remove.
(VEC_CHECK_DECL): Remove.
(ALONE_VEC_CHECK_PASS): Remove.
(VEC_CHECK_PASS): Remove.
(VEC_ASSERT): Remove.
(vec_prefix): Add friends va_gc, va_gc_atomic, va_heap and
va_stack.
Mark fields alloc_ and num_ as protected.
(struct vec_t): Remove. Remove all function members.
(struct vl_embed): Declare.
(struct vl_ptr): Declare.
(free): Remove.
(reserve_exact): Remove.
(reserve): Remove.
(safe_splice): Remove.
(safe_push): Remove.
(safe_grow): Remove.
(safe_grow_cleared): Remove.
(safe_insert): Remove.
(DEF_VEC_I): Remove.
(DEF_VEC_ALLOC_I): Remove.
(DEF_VEC_P): Remove.
(DEF_VEC_ALLOC_P): Remove.
(DEF_VEC_O): Remove.
(DEF_VEC_ALLOC_O): Remove.
(DEF_VEC_ALLOC_P_STACK): Remove.
(DEF_VEC_ALLOC_O_STACK): Remove.
(DEF_VEC_ALLOC_I_STACK): Remove.
(DEF_VEC_A): Remove.
(DEF_VEC_ALLOC_A): Remove.
(vec_stack_p_reserve_exact_1): Remove.
(vec_stack_o_reserve): Remove.
(vec_stack_o_reserve_exact): Remove.
(VEC_length): Remove.
(VEC_empty): Remove.
(VEC_address): Remove.
(vec_address): Remove.
(VEC_last): Remove.
(VEC_index): Remove.
(VEC_iterate): Remove.
(VEC_embedded_size): Remove.
(VEC_embedded_init): Remove.
(VEC_free): Remove.
(VEC_copy): Remove.
(VEC_space): Remove.
(VEC_reserve): Remove.
(VEC_reserve_exact): Remove.
(VEC_splice): Remove.
(VEC_safe_splice): Remove.
(VEC_quick_push): Remove.
(VEC_safe_push): Remove.
(VEC_pop): Remove.
(VEC_truncate): Remove.
(VEC_safe_grow): Remove.
(VEC_replace): Remove.
(VEC_quick_insert): Remove.
(VEC_safe_insert): Remove.
(VEC_ordered_remove): Remove.
(VEC_unordered_remove): Remove.
(VEC_block_remove): Remove.
(VEC_lower_bound): Remove.
(VEC_alloc): Remove.
(VEC_qsort): Remove.
(va_heap): Declare.
(va_heap::default_layout): New typedef to vl_ptr.
(va_heap::reserve): New.
(va_heap::release): New.
(va_gc): Declare.
(va_gc::default_layout): New typedef to vl_embed.
(va_gc::reserve): New.
(va_gc::release): New.
(va_gc_atomic): Declare. Inherit from va_gc.
(va_stack): Declare.
(va_stack::default_layout): New typedef to vl_ptr.
(va_stack::alloc): New.
(va_stack::reserve): New.
(va_stack::release): New.
(register_stack_vec): Declare.
(stack_vec_register_index): Declare.
(unregister_stack_vec): Declare.
(vec<T, A = va_heap, L = typename A::default_layout>): Declare
empty vec template.
(vec<T, A, vl_embed>): Partial specialization for embedded
layout.
(vec<T, A, vl_embed>::allocated): New.
(vec<T, A, vl_embed>::length): New.
(vec<T, A, vl_embed>::is_empty): New.
(vec<T, A, vl_embed>::address): New.
(vec<T, A, vl_embed>::operator[]): New.
(vec<T, A, vl_embed>::last New.
(vec<T, A, vl_embed>::space): New.
(vec<T, A, vl_embed>::iterate): New.
(vec<T, A, vl_embed>::iterate): New.
(vec<T, A, vl_embed>::copy): New.
(vec<T, A, vl_embed>::splice): New.
(vec<T, A, vl_embed>::quick_push New.
(vec<T, A, vl_embed>::pop New.
(vec<T, A, vl_embed>::truncate): New.
(vec<T, A, vl_embed>::quick_insert): New.
(vec<T, A, vl_embed>::ordered_remove): New.
(vec<T, A, vl_embed>::unordered_remove): New.
(vec<T, A, vl_embed>::block_remove): New.
(vec<T, A, vl_embed>::qsort): New.
(vec<T, A, vl_embed>::lower_bound): New.
(vec<T, A, vl_embed>::embedded_size): New.
(vec<T, A, vl_embed>::embedded_init): New.
(vec<T, A, vl_embed>::quick_grow): New.
(vec<T, A, vl_embed>::quick_grow_cleared): New.
(vec_safe_space): New.
(vec_safe_length): New.
(vec_safe_address): New.
(vec_safe_is_empty): New.
(vec_safe_reserve): New.
(vec_safe_reserve_exact): New.
(vec_alloc): New.
(vec_free): New.
(vec_safe_grow): New.
(vec_safe_grow_cleared): New.
(vec_safe_iterate): New.
(vec_safe_push): New.
(vec_safe_insert): New.
(vec_safe_truncate): New.
(vec_safe_copy): New.
(vec_safe_splice): New.
(vec<T, A, vl_ptr>): New partial specialization for the space
efficient layout.
(vec<T, A, vl_ptr>::exists): New.
(vec<T, A, vl_ptr>::is_empty): New.
(vec<T, A, vl_ptr>::length): New.
(vec<T, A, vl_ptr>::address): New.
(vec<T, A, vl_ptr>::operator[]): New.
(vec<T, A, vl_ptr>::operator!=): New.
(vec<T, A, vl_ptr>::operator==): New.
(vec<T, A, vl_ptr>::last): New.
(vec<T, A, vl_ptr>::space): New.
(vec<T, A, vl_ptr>::iterate): New.
(vec<T, A, vl_ptr>::copy): New.
(vec<T, A, vl_ptr>::reserve): New.
(vec<T, A, vl_ptr>::reserve_exact): New.
(vec<T, A, vl_ptr>::splice): New.
(vec<T, A, vl_ptr>::safe_splice): New.
(vec<T, A, vl_ptr>::quick_push): New.
(vec<T, A, vl_ptr>::safe_push): New.
(vec<T, A, vl_ptr>::pop): New.
(vec<T, A, vl_ptr>::truncate): New.
(vec<T, A, vl_ptr>::safe_grow): New.
(vec<T, A, vl_ptr>::safe_grow_cleared): New.
(vec<T, A, vl_ptr>::quick_grow): New.
(vec<T, A, vl_ptr>::quick_grow_cleared): New.
(vec<T, A, vl_ptr>::quick_insert): New.
(vec<T, A, vl_ptr>::safe_insert): New.
(vec<T, A, vl_ptr>::ordered_remove): New.
(vec<T, A, vl_ptr>::unordered_remove): New.
(vec<T, A, vl_ptr>::block_remove): New.
(vec<T, A, vl_ptr>::qsort): New.
(vec<T, A, vl_ptr>::lower_bound): New.
(vec_stack_alloc): Define.
(FOR_EACH_VEC_SAFE_ELT): Define.
* vecir.h: Remove. Update all users.
* vecprim.h: Remove. Update all users.
Move uchar to coretypes.h.
* Makefile.in (VEC_H): Add $(GGC_H).
Remove vecir.h and vecprim.h dependencies everywhere.
2012-11-16 Diego Novillo <dnovillo@google.com>
* gengtype-lex.l (VEC): Remove.
Add characters in the set [\!\>\.-].
* gengtype-parse.c (token_names): Remove "VEC".
(require_template_declaration): Remove handling of VEC_TOKEN.
(type): Likewise.
Call create_user_defined_type when parsing GTY((user)).
* gengtype-state.c (type_lineloc): handle TYPE_UNDEFINED.
(write_state_undefined_type): New.
(write_state_type): Call write_state_undefined_type for
TYPE_UNDEFINED.
(read_state_type): Call read_state_undefined_type for
TYPE_UNDEFINED.
* gengtype.c (dbgprint_count_type_at): Handle TYPE_UNDEFINED.
(create_user_defined_type): Make extern.
(type_for_name): Factor out of resolve_typedef.
(create_undefined_type): New
(resolve_typedef): Call it when we cannot find a previous
typedef and the type is not a template.
(find_structure): Accept TYPE_UNDEFINED.
(set_gc_used_type): Add argument ALLOWED_UNDEFINED_TYPES,
default to false.
Emit an error for TYPE_UNDEFINED unless LEVEL is GC_UNUSED or
ALLOWED_UNDEFINED_TYPES is set.
Set ALLOWED_UNDEFINED_TYPES to true for TYPE_USER_STRUCT.
(filter_type_name): Accept templates with more than one
argument.
(output_mangled_typename): Handle TYPE_UNDEFINED
(walk_type): Likewise.
(write_types_process_field): Likewise.
(write_func_for_structure): If CHAIN_NEXT is set, ORIG_S
should not be a user-defined type.
(write_types_local_user_process_field): Handle TYPE_ARRAY,
TYPE_NONE and TYPE_UNDEFINED.
(write_types_local_process_field): Likewise.
(contains_scalar_p): Return 0 for TYPE_USER_STRUCT.
(write_root): Reject user-defined types that are not pointers.
Handle TYPE_NONE, TYPE_UNDEFINED, TYPE_UNION, TYPE_LANG_STRUCT
and TYPE_PARAM_STRUCT.
(output_typename): Handle TYPE_NONE, TYPE_UNDEFINED, and
TYPE_ARRAY.
(dump_typekind): Handle TYPE_UNDEFINED.
* gengtype.h (enum typekind): Add TYPE_UNDEFINED.
(create_user_defined_type): Declare.
(enum gty_token): Remove VEC_TOKEN.
2012-11-16 Diego Novillo <dnovillo@google.com>
Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
* coretypes.h (uchar): Define.
* alias.c: Use new vec API in vec.h.
* asan.c: Likewise.
* attribs.c: Likewise.
* basic-block.h: Likewise.
* bb-reorder.c: Likewise.
* builtins.c: Likewise.
* calls.c: Likewise.
* cfg.c: Likewise.
* cfganal.c: Likewise.
* cfgcleanup.c: Likewise.
* cfgexpand.c: Likewise.
* cfghooks.c: Likewise.
* cfghooks.h: Likewise.
* cfgloop.c: Likewise.
* cfgloop.h: Likewise.
* cfgloopanal.c: Likewise.
* cfgloopmanip.c: Likewise.
* cfgrtl.c: Likewise.
* cgraph.c: Likewise.
* cgraph.h: Likewise.
* cgraphclones.c: Likewise.
* cgraphunit.c: Likewise.
* combine.c: Likewise.
* compare-elim.c: Likewise.
* coverage.c: Likewise.
* cprop.c: Likewise.
* data-streamer.h: Likewise.
* dbxout.c: Likewise.
* dce.c: Likewise.
* df-core.c: Likewise.
* df-problems.c: Likewise.
* df-scan.c: Likewise.
* dominance.c: Likewise.
* domwalk.c: Likewise.
* domwalk.h: Likewise.
* dse.c: Likewise.
* dwarf2cfi.c: Likewise.
* dwarf2out.c: Likewise.
* dwarf2out.h: Likewise.
* emit-rtl.c: Likewise.
* except.c: Likewise.
* except.h: Likewise.
* expr.c: Likewise.
* expr.h: Likewise.
* final.c: Likewise.
* fold-const.c: Likewise.
* function.c: Likewise.
* function.h: Likewise.
* fwprop.c: Likewise.
* gcc.c: Likewise.
* gcse.c: Likewise.
* genattr.c: Likewise.
* genattrtab.c: Likewise.
* genautomata.c: Likewise.
* genextract.c: Likewise.
* genopinit.c: Likewise
* ggc-common.c: Likewise.
* ggc.h: Likewise.
* gimple-low.c: Likewise.
* gimple-ssa-strength-reduction.c: Likewise.
* gimple-streamer-in.c: Likewise.
* gimple.c: Likewise.
* gimple.h: Likewise.
* gimplify.c: Likewise.
* graph.c: Likewise.
* graphds.c: Likewise.
* graphds.h: Likewise.
* graphite-blocking.c: Likewise.
* graphite-clast-to-gimple.c: Likewise.
* graphite-dependences.c: Likewise.
* graphite-interchange.c: Likewise.
* graphite-optimize-isl.c: Likewise.
* graphite-poly.c: Likewise.
* graphite-poly.h: Likewise.
* graphite-scop-detection.c: Likewise.
* graphite-scop-detection.h: Likewise.
* graphite-sese-to-poly.c: Likewise.
* graphite.c: Likewise.
* godump.c: Likewise.
* haifa-sched.c: Likewise.
* hw-doloop.c: Likewise.
* hw-doloop.h: Likewise.
* ifcvt.c: Likewise.
* insn-addr.h: Likewise.
* ipa-cp.c: Likewise.
* ipa-inline-analysis.c: Likewise.
* ipa-inline-transform.c: Likewise.
* ipa-inline.c: Likewise.
* ipa-inline.h: Likewise.
* ipa-prop.c: Likewise.
* ipa-prop.h: Likewise.
* ipa-pure-const.c: Likewise.
* ipa-ref-inline.h: Likewise.
* ipa-ref.c: Likewise.
* ipa-ref.h: Likewise.
* ipa-reference.c: Likewise.
* ipa-split.c: Likewise.
* ipa-utils.c: Likewise.
* ipa-utils.h: Likewise.
* ipa.c: Likewise.
* ira-build.c: Likewise.
* ira-color.c: Likewise.
* ira-emit.c: Likewise.
* ira-int.h: Likewise.
* ira.c: Likewise.
* loop-invariant.c: Likewise.
* loop-unroll.c: Likewise.
* lower-subreg.c: Likewise.
* lra-lives.c: Likewise.
* lra.c: Likewise.
* lto-cgraph.c: Likewise.
* lto-section-out.c: Likewise.
* lto-streamer-in.c: Likewise.
* lto-streamer-out.c: Likewise.
* lto-streamer.h: Likewise.
* lto-symtab.c: Likewise.
* mcf.c: Likewise.
* modulo-sched.c: Likewise.
* omp-low.c: Likewise.
* opts-common.c: Likewise.
* opts-global.c: Likewise.
* opts.c: Likewise.
* opts.h: Likewise.
* passes.c: Likewise.
* predict.c: Likewise.
* print-tree.c: Likewise.
* profile.c: Likewise.
* profile.h: Likewise.
* read-rtl.c: Likewise.
* ree.c: Likewise.
* reg-stack.c: Likewise.
* regrename.c: Likewise.
* regrename.h: Likewise.
* reload.c: Likewise.
* reload.h: Likewise.
* reload1.c: Likewise.
* rtl.h: Likewise.
* sched-deps.c: Likewise.
* sched-int.h: Likewise.
* sdbout.c: Likewise.
* sel-sched-dump.c: Likewise.
* sel-sched-ir.c: Likewise.
* sel-sched-ir.h: Likewise.
* sel-sched.c: Likewise.
* sese.c: Likewise.
* sese.h: Likewise.
* statistics.h: Likewise.
* stmt.c: Likewise.
* stor-layout.c: Likewise.
* store-motion.c: Likewise.
* tlink.c: Likewise.
* toplev.c: Likewise.
* trans-mem.c: Likewise.
* tree-browser.c: Likewise.
* tree-call-cdce.c: Likewise.
* tree-cfg.c: Likewise.
* tree-cfgcleanup.c: Likewise.
* tree-chrec.c: Likewise.
* tree-chrec.h: Likewise.
* tree-complex.c: Likewise.
* tree-data-ref.c: Likewise.
* tree-data-ref.h: Likewise.
* tree-dfa.c: Likewise.
* tree-diagnostic.c: Likewise.
* tree-dump.c: Likewise.
* tree-eh.c: Likewise.
* tree-emutls.c: Likewise.
* tree-flow.h: Likewise.
* tree-if-conv.c: Likewise.
* tree-inline.c: Likewise.
* tree-inline.h: Likewise.
* tree-into-ssa.c: Likewise.
* tree-iterator.c: Likewise.
* tree-loop-distribution.c: Likewise.
* tree-mudflap.c: Likewise.
* tree-optimize.c: Likewise.
* tree-outof-ssa.c: Likewise.
* tree-parloops.c: Likewise.
* tree-phinodes.c: Likewise.
* tree-predcom.c: Likewise.
* tree-pretty-print.c: Likewise.
* tree-scalar-evolution.c: Likewise.
* tree-sra.c: Likewise.
* tree-ssa-address.c: Likewise.
* tree-ssa-alias.c: Likewise.
* tree-ssa-ccp.c: Likewise.
* tree-ssa-coalesce.c: Likewise.
* tree-ssa-dce.c: Likewise.
* tree-ssa-dom.c: Likewise.
* tree-ssa-forwprop.c: Likewise.
* tree-ssa-live.c: Likewise.
* tree-ssa-live.h: Likewise.
* tree-ssa-loop-im.c: Likewise.
* tree-ssa-loop-ivcanon.c: Likewise.
* tree-ssa-loop-ivopts.c: Likewise.
* tree-ssa-loop-manip.c: Likewise.
* tree-ssa-loop-niter.c: Likewise.
* tree-ssa-loop-prefetch.c: Likewise.
* tree-ssa-math-opts.c: Likewise.
* tree-ssa-operands.c: Likewise.
* tree-ssa-phiopt.c: Likewise.
* tree-ssa-phiprop.c: Likewise.
* tree-ssa-pre.c: Likewise.
* tree-ssa-propagate.c: Likewise.
* tree-ssa-reassoc.c: Likewise.
* tree-ssa-sccvn.c: Likewise.
* tree-ssa-sccvn.h: Likewise.
* tree-ssa-strlen.c: Likewise.
* tree-ssa-structalias.c: Likewise.
* tree-ssa-tail-merge.c: Likewise.
* tree-ssa-threadedge.c: Likewise.
* tree-ssa-threadupdate.c: Likewise.
* tree-ssa-uncprop.c: Likewise.
* tree-ssa-uninit.c: Likewise.
* tree-ssa.c: Likewise.
* tree-ssanames.c: Likewise.
* tree-stdarg.c: Likewise.
* tree-streamer-in.c: Likewise.
* tree-streamer-out.c: Likewise.
* tree-streamer.c: Likewise.
* tree-streamer.h: Likewise.
* tree-switch-conversion.c: Likewise.
* tree-vect-data-refs.c: Likewise.
* tree-vect-generic.c: Likewise.
* tree-vect-loop-manip.c: Likewise.
* tree-vect-loop.c: Likewise.
* tree-vect-patterns.c: Likewise.
* tree-vect-slp.c: Likewise.
* tree-vect-stmts.c: Likewise.
* tree-vectorizer.c: Likewise.
* tree-vectorizer.h: Likewise.
* tree-vrp.c: Likewise.
* tree.c: Likewise.
* tree.h: Likewise.
* value-prof.c: Likewise.
* value-prof.h: Likewise.
* var-tracking.c: Likewise.
* varasm.c: Likewise.
* varpool.c: Likewise.
* vmsdbgout.c: Likewise.
* config/bfin/bfin.c: Likewise.
* config/c6x/c6x.c: Likewise.
* config/darwin.c: Likewise.
* config/i386/i386.c: Likewise.
* config/ia64/ia64.c: Likewise.
* config/mep/mep.c: Likewise.
* config/mips/mips.c: Likewise.
* config/pa/pa.c: Likewise.
* config/rs6000/rs6000-c.c: Likewise.
* config/rs6000/rs6000.c: Likewise.
* config/rx/rx.c: Likewise.
* config/spu/spu-c.c: Likewise.
* config/vms/vms.c: Likewise.
* config/vxworks.c: Likewise.
* config/epiphany/resolve-sw-modes.c: Likewise.
From-SVN: r193595
2012-11-18 03:54:30 +01:00
|
|
|
vec<constructor_elt, va_gc> *v;
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
char buf[20];
|
|
|
|
|
|
|
|
type = build_array_type_nelts (type, gcount);
|
|
|
|
ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
|
|
|
|
var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
|
|
|
|
type);
|
|
|
|
TREE_STATIC (var) = 1;
|
|
|
|
TREE_PUBLIC (var) = 0;
|
|
|
|
DECL_ARTIFICIAL (var) = 1;
|
|
|
|
DECL_IGNORED_P (var) = 1;
|
This patch rewrites the old VEC macro-based interface into a new one based on the template class 'vec'.
This patch rewrites the old VEC macro-based interface into a new one
based on the template class 'vec'. The user-visible changes are
described in http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec.
I have tested the patch pretty extensively:
- Regular bootstraps on x86_64, ppc, ia64, sparc and hppa.
- Bootstraps with --enable-checking=release
- Bootstraps with --enable-checking=gc,gcac
- Basic builds on all targets (using contrib/config-list.mk).
We no longer access the vectors via VEC_* macros. The pattern is
"VEC_operation (T, A, V, args)" becomes "V.operation (args)".
The only thing I could not do is create proper ctors and dtors for the
vec class. Since these vectors are stored in unions, we
have to keep them as PODs (C++03 does not allow non-PODs in unions).
This means that creation and destruction must be explicit. There is a
new method vec<type, allocation, layout>::create() and another vec<type,
allocation, layout>::destroy() to allocate the internal vector.
For vectors that must be pointers, there is a family of free functions
that implement the operations that need to tolerate NULL vectors.
These functions all start with the prefix 'vec_safe_'. See the wiki
page for details.
The gengtype change removes the special handling for VEC() that used
to exist in gengtype. Additionally, it allows gengtype to recognize
templates of more than one argument and introduces the concept of an
undefined type (useful for template arguments that may or may not be
types).
When a TYPE_UNDEFINED is reached, gengtype will ignore it if it
happens inside a type marked with GTY((user)). Otherwise, it will
emit an error.
Finally, gengtype rejects root types marked GTY((user)) that are not
first class pointers.
2012-11-16 Diego Novillo <dnovillo@google.com>
VEC API overhaul (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
* vec.c (register_overhead): Convert it into
member function of vec_prefix.
(release_overhead): Likewise.
(calculate_allocation): Likewise.
(vec_heap_free): Remove.
(vec_gc_o_reserve_1): Remove.
(vec_heap_o_reserve_1): Remove.
(vec_stack_o_reserve_1): Remove.
(vec_stack_o_reserve_exact): Remove.
(register_stack_vec): New.
(stack_vec_register_index): New.
(unregister_stack_vec): New.
(vec_assert_fail): Remove.
* vec.h: Conditionally include ggc.h. Document conditional
hackery.
Update top-level documentation.
(ALONE_VEC_CHECK_INFO): Remove.
(VEC_CHECK_INFO): Remove.
(ALONE_VEC_CHECK_DECL): Remove.
(VEC_CHECK_DECL): Remove.
(ALONE_VEC_CHECK_PASS): Remove.
(VEC_CHECK_PASS): Remove.
(VEC_ASSERT): Remove.
(vec_prefix): Add friends va_gc, va_gc_atomic, va_heap and
va_stack.
Mark fields alloc_ and num_ as protected.
(struct vec_t): Remove. Remove all function members.
(struct vl_embed): Declare.
(struct vl_ptr): Declare.
(free): Remove.
(reserve_exact): Remove.
(reserve): Remove.
(safe_splice): Remove.
(safe_push): Remove.
(safe_grow): Remove.
(safe_grow_cleared): Remove.
(safe_insert): Remove.
(DEF_VEC_I): Remove.
(DEF_VEC_ALLOC_I): Remove.
(DEF_VEC_P): Remove.
(DEF_VEC_ALLOC_P): Remove.
(DEF_VEC_O): Remove.
(DEF_VEC_ALLOC_O): Remove.
(DEF_VEC_ALLOC_P_STACK): Remove.
(DEF_VEC_ALLOC_O_STACK): Remove.
(DEF_VEC_ALLOC_I_STACK): Remove.
(DEF_VEC_A): Remove.
(DEF_VEC_ALLOC_A): Remove.
(vec_stack_p_reserve_exact_1): Remove.
(vec_stack_o_reserve): Remove.
(vec_stack_o_reserve_exact): Remove.
(VEC_length): Remove.
(VEC_empty): Remove.
(VEC_address): Remove.
(vec_address): Remove.
(VEC_last): Remove.
(VEC_index): Remove.
(VEC_iterate): Remove.
(VEC_embedded_size): Remove.
(VEC_embedded_init): Remove.
(VEC_free): Remove.
(VEC_copy): Remove.
(VEC_space): Remove.
(VEC_reserve): Remove.
(VEC_reserve_exact): Remove.
(VEC_splice): Remove.
(VEC_safe_splice): Remove.
(VEC_quick_push): Remove.
(VEC_safe_push): Remove.
(VEC_pop): Remove.
(VEC_truncate): Remove.
(VEC_safe_grow): Remove.
(VEC_replace): Remove.
(VEC_quick_insert): Remove.
(VEC_safe_insert): Remove.
(VEC_ordered_remove): Remove.
(VEC_unordered_remove): Remove.
(VEC_block_remove): Remove.
(VEC_lower_bound): Remove.
(VEC_alloc): Remove.
(VEC_qsort): Remove.
(va_heap): Declare.
(va_heap::default_layout): New typedef to vl_ptr.
(va_heap::reserve): New.
(va_heap::release): New.
(va_gc): Declare.
(va_gc::default_layout): New typedef to vl_embed.
(va_gc::reserve): New.
(va_gc::release): New.
(va_gc_atomic): Declare. Inherit from va_gc.
(va_stack): Declare.
(va_stack::default_layout): New typedef to vl_ptr.
(va_stack::alloc): New.
(va_stack::reserve): New.
(va_stack::release): New.
(register_stack_vec): Declare.
(stack_vec_register_index): Declare.
(unregister_stack_vec): Declare.
(vec<T, A = va_heap, L = typename A::default_layout>): Declare
empty vec template.
(vec<T, A, vl_embed>): Partial specialization for embedded
layout.
(vec<T, A, vl_embed>::allocated): New.
(vec<T, A, vl_embed>::length): New.
(vec<T, A, vl_embed>::is_empty): New.
(vec<T, A, vl_embed>::address): New.
(vec<T, A, vl_embed>::operator[]): New.
(vec<T, A, vl_embed>::last New.
(vec<T, A, vl_embed>::space): New.
(vec<T, A, vl_embed>::iterate): New.
(vec<T, A, vl_embed>::iterate): New.
(vec<T, A, vl_embed>::copy): New.
(vec<T, A, vl_embed>::splice): New.
(vec<T, A, vl_embed>::quick_push New.
(vec<T, A, vl_embed>::pop New.
(vec<T, A, vl_embed>::truncate): New.
(vec<T, A, vl_embed>::quick_insert): New.
(vec<T, A, vl_embed>::ordered_remove): New.
(vec<T, A, vl_embed>::unordered_remove): New.
(vec<T, A, vl_embed>::block_remove): New.
(vec<T, A, vl_embed>::qsort): New.
(vec<T, A, vl_embed>::lower_bound): New.
(vec<T, A, vl_embed>::embedded_size): New.
(vec<T, A, vl_embed>::embedded_init): New.
(vec<T, A, vl_embed>::quick_grow): New.
(vec<T, A, vl_embed>::quick_grow_cleared): New.
(vec_safe_space): New.
(vec_safe_length): New.
(vec_safe_address): New.
(vec_safe_is_empty): New.
(vec_safe_reserve): New.
(vec_safe_reserve_exact): New.
(vec_alloc): New.
(vec_free): New.
(vec_safe_grow): New.
(vec_safe_grow_cleared): New.
(vec_safe_iterate): New.
(vec_safe_push): New.
(vec_safe_insert): New.
(vec_safe_truncate): New.
(vec_safe_copy): New.
(vec_safe_splice): New.
(vec<T, A, vl_ptr>): New partial specialization for the space
efficient layout.
(vec<T, A, vl_ptr>::exists): New.
(vec<T, A, vl_ptr>::is_empty): New.
(vec<T, A, vl_ptr>::length): New.
(vec<T, A, vl_ptr>::address): New.
(vec<T, A, vl_ptr>::operator[]): New.
(vec<T, A, vl_ptr>::operator!=): New.
(vec<T, A, vl_ptr>::operator==): New.
(vec<T, A, vl_ptr>::last): New.
(vec<T, A, vl_ptr>::space): New.
(vec<T, A, vl_ptr>::iterate): New.
(vec<T, A, vl_ptr>::copy): New.
(vec<T, A, vl_ptr>::reserve): New.
(vec<T, A, vl_ptr>::reserve_exact): New.
(vec<T, A, vl_ptr>::splice): New.
(vec<T, A, vl_ptr>::safe_splice): New.
(vec<T, A, vl_ptr>::quick_push): New.
(vec<T, A, vl_ptr>::safe_push): New.
(vec<T, A, vl_ptr>::pop): New.
(vec<T, A, vl_ptr>::truncate): New.
(vec<T, A, vl_ptr>::safe_grow): New.
(vec<T, A, vl_ptr>::safe_grow_cleared): New.
(vec<T, A, vl_ptr>::quick_grow): New.
(vec<T, A, vl_ptr>::quick_grow_cleared): New.
(vec<T, A, vl_ptr>::quick_insert): New.
(vec<T, A, vl_ptr>::safe_insert): New.
(vec<T, A, vl_ptr>::ordered_remove): New.
(vec<T, A, vl_ptr>::unordered_remove): New.
(vec<T, A, vl_ptr>::block_remove): New.
(vec<T, A, vl_ptr>::qsort): New.
(vec<T, A, vl_ptr>::lower_bound): New.
(vec_stack_alloc): Define.
(FOR_EACH_VEC_SAFE_ELT): Define.
* vecir.h: Remove. Update all users.
* vecprim.h: Remove. Update all users.
Move uchar to coretypes.h.
* Makefile.in (VEC_H): Add $(GGC_H).
Remove vecir.h and vecprim.h dependencies everywhere.
2012-11-16 Diego Novillo <dnovillo@google.com>
* gengtype-lex.l (VEC): Remove.
Add characters in the set [\!\>\.-].
* gengtype-parse.c (token_names): Remove "VEC".
(require_template_declaration): Remove handling of VEC_TOKEN.
(type): Likewise.
Call create_user_defined_type when parsing GTY((user)).
* gengtype-state.c (type_lineloc): handle TYPE_UNDEFINED.
(write_state_undefined_type): New.
(write_state_type): Call write_state_undefined_type for
TYPE_UNDEFINED.
(read_state_type): Call read_state_undefined_type for
TYPE_UNDEFINED.
* gengtype.c (dbgprint_count_type_at): Handle TYPE_UNDEFINED.
(create_user_defined_type): Make extern.
(type_for_name): Factor out of resolve_typedef.
(create_undefined_type): New
(resolve_typedef): Call it when we cannot find a previous
typedef and the type is not a template.
(find_structure): Accept TYPE_UNDEFINED.
(set_gc_used_type): Add argument ALLOWED_UNDEFINED_TYPES,
default to false.
Emit an error for TYPE_UNDEFINED unless LEVEL is GC_UNUSED or
ALLOWED_UNDEFINED_TYPES is set.
Set ALLOWED_UNDEFINED_TYPES to true for TYPE_USER_STRUCT.
(filter_type_name): Accept templates with more than one
argument.
(output_mangled_typename): Handle TYPE_UNDEFINED
(walk_type): Likewise.
(write_types_process_field): Likewise.
(write_func_for_structure): If CHAIN_NEXT is set, ORIG_S
should not be a user-defined type.
(write_types_local_user_process_field): Handle TYPE_ARRAY,
TYPE_NONE and TYPE_UNDEFINED.
(write_types_local_process_field): Likewise.
(contains_scalar_p): Return 0 for TYPE_USER_STRUCT.
(write_root): Reject user-defined types that are not pointers.
Handle TYPE_NONE, TYPE_UNDEFINED, TYPE_UNION, TYPE_LANG_STRUCT
and TYPE_PARAM_STRUCT.
(output_typename): Handle TYPE_NONE, TYPE_UNDEFINED, and
TYPE_ARRAY.
(dump_typekind): Handle TYPE_UNDEFINED.
* gengtype.h (enum typekind): Add TYPE_UNDEFINED.
(create_user_defined_type): Declare.
(enum gty_token): Remove VEC_TOKEN.
2012-11-16 Diego Novillo <dnovillo@google.com>
Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
* coretypes.h (uchar): Define.
* alias.c: Use new vec API in vec.h.
* asan.c: Likewise.
* attribs.c: Likewise.
* basic-block.h: Likewise.
* bb-reorder.c: Likewise.
* builtins.c: Likewise.
* calls.c: Likewise.
* cfg.c: Likewise.
* cfganal.c: Likewise.
* cfgcleanup.c: Likewise.
* cfgexpand.c: Likewise.
* cfghooks.c: Likewise.
* cfghooks.h: Likewise.
* cfgloop.c: Likewise.
* cfgloop.h: Likewise.
* cfgloopanal.c: Likewise.
* cfgloopmanip.c: Likewise.
* cfgrtl.c: Likewise.
* cgraph.c: Likewise.
* cgraph.h: Likewise.
* cgraphclones.c: Likewise.
* cgraphunit.c: Likewise.
* combine.c: Likewise.
* compare-elim.c: Likewise.
* coverage.c: Likewise.
* cprop.c: Likewise.
* data-streamer.h: Likewise.
* dbxout.c: Likewise.
* dce.c: Likewise.
* df-core.c: Likewise.
* df-problems.c: Likewise.
* df-scan.c: Likewise.
* dominance.c: Likewise.
* domwalk.c: Likewise.
* domwalk.h: Likewise.
* dse.c: Likewise.
* dwarf2cfi.c: Likewise.
* dwarf2out.c: Likewise.
* dwarf2out.h: Likewise.
* emit-rtl.c: Likewise.
* except.c: Likewise.
* except.h: Likewise.
* expr.c: Likewise.
* expr.h: Likewise.
* final.c: Likewise.
* fold-const.c: Likewise.
* function.c: Likewise.
* function.h: Likewise.
* fwprop.c: Likewise.
* gcc.c: Likewise.
* gcse.c: Likewise.
* genattr.c: Likewise.
* genattrtab.c: Likewise.
* genautomata.c: Likewise.
* genextract.c: Likewise.
* genopinit.c: Likewise
* ggc-common.c: Likewise.
* ggc.h: Likewise.
* gimple-low.c: Likewise.
* gimple-ssa-strength-reduction.c: Likewise.
* gimple-streamer-in.c: Likewise.
* gimple.c: Likewise.
* gimple.h: Likewise.
* gimplify.c: Likewise.
* graph.c: Likewise.
* graphds.c: Likewise.
* graphds.h: Likewise.
* graphite-blocking.c: Likewise.
* graphite-clast-to-gimple.c: Likewise.
* graphite-dependences.c: Likewise.
* graphite-interchange.c: Likewise.
* graphite-optimize-isl.c: Likewise.
* graphite-poly.c: Likewise.
* graphite-poly.h: Likewise.
* graphite-scop-detection.c: Likewise.
* graphite-scop-detection.h: Likewise.
* graphite-sese-to-poly.c: Likewise.
* graphite.c: Likewise.
* godump.c: Likewise.
* haifa-sched.c: Likewise.
* hw-doloop.c: Likewise.
* hw-doloop.h: Likewise.
* ifcvt.c: Likewise.
* insn-addr.h: Likewise.
* ipa-cp.c: Likewise.
* ipa-inline-analysis.c: Likewise.
* ipa-inline-transform.c: Likewise.
* ipa-inline.c: Likewise.
* ipa-inline.h: Likewise.
* ipa-prop.c: Likewise.
* ipa-prop.h: Likewise.
* ipa-pure-const.c: Likewise.
* ipa-ref-inline.h: Likewise.
* ipa-ref.c: Likewise.
* ipa-ref.h: Likewise.
* ipa-reference.c: Likewise.
* ipa-split.c: Likewise.
* ipa-utils.c: Likewise.
* ipa-utils.h: Likewise.
* ipa.c: Likewise.
* ira-build.c: Likewise.
* ira-color.c: Likewise.
* ira-emit.c: Likewise.
* ira-int.h: Likewise.
* ira.c: Likewise.
* loop-invariant.c: Likewise.
* loop-unroll.c: Likewise.
* lower-subreg.c: Likewise.
* lra-lives.c: Likewise.
* lra.c: Likewise.
* lto-cgraph.c: Likewise.
* lto-section-out.c: Likewise.
* lto-streamer-in.c: Likewise.
* lto-streamer-out.c: Likewise.
* lto-streamer.h: Likewise.
* lto-symtab.c: Likewise.
* mcf.c: Likewise.
* modulo-sched.c: Likewise.
* omp-low.c: Likewise.
* opts-common.c: Likewise.
* opts-global.c: Likewise.
* opts.c: Likewise.
* opts.h: Likewise.
* passes.c: Likewise.
* predict.c: Likewise.
* print-tree.c: Likewise.
* profile.c: Likewise.
* profile.h: Likewise.
* read-rtl.c: Likewise.
* ree.c: Likewise.
* reg-stack.c: Likewise.
* regrename.c: Likewise.
* regrename.h: Likewise.
* reload.c: Likewise.
* reload.h: Likewise.
* reload1.c: Likewise.
* rtl.h: Likewise.
* sched-deps.c: Likewise.
* sched-int.h: Likewise.
* sdbout.c: Likewise.
* sel-sched-dump.c: Likewise.
* sel-sched-ir.c: Likewise.
* sel-sched-ir.h: Likewise.
* sel-sched.c: Likewise.
* sese.c: Likewise.
* sese.h: Likewise.
* statistics.h: Likewise.
* stmt.c: Likewise.
* stor-layout.c: Likewise.
* store-motion.c: Likewise.
* tlink.c: Likewise.
* toplev.c: Likewise.
* trans-mem.c: Likewise.
* tree-browser.c: Likewise.
* tree-call-cdce.c: Likewise.
* tree-cfg.c: Likewise.
* tree-cfgcleanup.c: Likewise.
* tree-chrec.c: Likewise.
* tree-chrec.h: Likewise.
* tree-complex.c: Likewise.
* tree-data-ref.c: Likewise.
* tree-data-ref.h: Likewise.
* tree-dfa.c: Likewise.
* tree-diagnostic.c: Likewise.
* tree-dump.c: Likewise.
* tree-eh.c: Likewise.
* tree-emutls.c: Likewise.
* tree-flow.h: Likewise.
* tree-if-conv.c: Likewise.
* tree-inline.c: Likewise.
* tree-inline.h: Likewise.
* tree-into-ssa.c: Likewise.
* tree-iterator.c: Likewise.
* tree-loop-distribution.c: Likewise.
* tree-mudflap.c: Likewise.
* tree-optimize.c: Likewise.
* tree-outof-ssa.c: Likewise.
* tree-parloops.c: Likewise.
* tree-phinodes.c: Likewise.
* tree-predcom.c: Likewise.
* tree-pretty-print.c: Likewise.
* tree-scalar-evolution.c: Likewise.
* tree-sra.c: Likewise.
* tree-ssa-address.c: Likewise.
* tree-ssa-alias.c: Likewise.
* tree-ssa-ccp.c: Likewise.
* tree-ssa-coalesce.c: Likewise.
* tree-ssa-dce.c: Likewise.
* tree-ssa-dom.c: Likewise.
* tree-ssa-forwprop.c: Likewise.
* tree-ssa-live.c: Likewise.
* tree-ssa-live.h: Likewise.
* tree-ssa-loop-im.c: Likewise.
* tree-ssa-loop-ivcanon.c: Likewise.
* tree-ssa-loop-ivopts.c: Likewise.
* tree-ssa-loop-manip.c: Likewise.
* tree-ssa-loop-niter.c: Likewise.
* tree-ssa-loop-prefetch.c: Likewise.
* tree-ssa-math-opts.c: Likewise.
* tree-ssa-operands.c: Likewise.
* tree-ssa-phiopt.c: Likewise.
* tree-ssa-phiprop.c: Likewise.
* tree-ssa-pre.c: Likewise.
* tree-ssa-propagate.c: Likewise.
* tree-ssa-reassoc.c: Likewise.
* tree-ssa-sccvn.c: Likewise.
* tree-ssa-sccvn.h: Likewise.
* tree-ssa-strlen.c: Likewise.
* tree-ssa-structalias.c: Likewise.
* tree-ssa-tail-merge.c: Likewise.
* tree-ssa-threadedge.c: Likewise.
* tree-ssa-threadupdate.c: Likewise.
* tree-ssa-uncprop.c: Likewise.
* tree-ssa-uninit.c: Likewise.
* tree-ssa.c: Likewise.
* tree-ssanames.c: Likewise.
* tree-stdarg.c: Likewise.
* tree-streamer-in.c: Likewise.
* tree-streamer-out.c: Likewise.
* tree-streamer.c: Likewise.
* tree-streamer.h: Likewise.
* tree-switch-conversion.c: Likewise.
* tree-vect-data-refs.c: Likewise.
* tree-vect-generic.c: Likewise.
* tree-vect-loop-manip.c: Likewise.
* tree-vect-loop.c: Likewise.
* tree-vect-patterns.c: Likewise.
* tree-vect-slp.c: Likewise.
* tree-vect-stmts.c: Likewise.
* tree-vectorizer.c: Likewise.
* tree-vectorizer.h: Likewise.
* tree-vrp.c: Likewise.
* tree.c: Likewise.
* tree.h: Likewise.
* value-prof.c: Likewise.
* value-prof.h: Likewise.
* var-tracking.c: Likewise.
* varasm.c: Likewise.
* varpool.c: Likewise.
* vmsdbgout.c: Likewise.
* config/bfin/bfin.c: Likewise.
* config/c6x/c6x.c: Likewise.
* config/darwin.c: Likewise.
* config/i386/i386.c: Likewise.
* config/ia64/ia64.c: Likewise.
* config/mep/mep.c: Likewise.
* config/mips/mips.c: Likewise.
* config/pa/pa.c: Likewise.
* config/rs6000/rs6000-c.c: Likewise.
* config/rs6000/rs6000.c: Likewise.
* config/rx/rx.c: Likewise.
* config/spu/spu-c.c: Likewise.
* config/vms/vms.c: Likewise.
* config/vxworks.c: Likewise.
* config/epiphany/resolve-sw-modes.c: Likewise.
From-SVN: r193595
2012-11-18 03:54:30 +01:00
|
|
|
vec_alloc (v, gcount);
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
FOR_EACH_DEFINED_VARIABLE (vnode)
|
2013-10-29 19:30:00 +01:00
|
|
|
if (TREE_ASM_WRITTEN (vnode->decl)
|
|
|
|
&& asan_protect_global (vnode->decl))
|
|
|
|
asan_add_global (vnode->decl, TREE_TYPE (type), v);
|
2012-12-10 13:14:36 +01:00
|
|
|
struct asan_add_string_csts_data aascd;
|
|
|
|
aascd.type = TREE_TYPE (type);
|
|
|
|
aascd.v = v;
|
move many gc hashtab to hash_table
gcc/
* asan.c, cfgloop.c, cfgloop.h, cgraph.c, cgraph.h,
config/darwin.c, config/m32c/m32c.c, config/mep/mep.c,
config/mips/mips.c, config/rs6000/rs6000.c, dwarf2out.c,
function.c, function.h, gimple-ssa.h, libfuncs.h, optabs.c,
output.h, rtl.h, sese.c, symtab.c, tree-cfg.c, tree-dfa.c,
tree-ssa.c, varasm.c: Use hash-table instead of hashtab.
* doc/gty.texi (for_user): Document new option.
* gengtype.c (create_user_defined_type): Don't try to get a struct for
char.
(walk_type): Don't error out on for_user option.
(write_func_for_structure): Emit user marking routines if requested by
for_user option.
(write_local_func_for_structure): Likewise.
(main): Mark types with for_user option as used.
* ggc.h (gt_pch_nx): Add overload for unsigned int.
* hash-map.h (hash_map::hash_entry::pch_nx_helper): AddOverloads.
* hash-table.h (ggc_hasher): New struct.
(hash_table::create_ggc): New function.
(gt_pch_nx): New overload for hash_table.
java/
* class.c, decl.c, except.c, expr.c, java-tree.h, lang.c: Use
hash_table instead of hashtab.
objc/
* objc-act.c: use hash_table instead of hashtab.
cp/
* cp-gimplify.c, cp-tree.h, decl.c, mangle.c, name-lookup.c,
pt.c, semantics.c, tree.c, typeck2.c: Use hash_table instead of
hashtab.
fortran/
* trans-decl.c, trans.c, trans.h: Use hash_table instead of hashtab.
c-family/
* c-common.c: Use hash_table instead of hashtab.
From-SVN: r216127
2014-10-13 00:22:53 +02:00
|
|
|
const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
|
|
|
|
(&aascd);
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
ctor = build_constructor (type, v);
|
|
|
|
TREE_CONSTANT (ctor) = 1;
|
|
|
|
TREE_STATIC (ctor) = 1;
|
|
|
|
DECL_INITIAL (var) = ctor;
|
2014-07-24 14:25:27 +02:00
|
|
|
varpool_node::finalize_decl (var);
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
|
2014-10-03 16:17:28 +02:00
|
|
|
tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
|
2013-08-30 18:12:58 +02:00
|
|
|
tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
|
2012-12-03 18:28:10 +01:00
|
|
|
append_to_statement_list (build_call_expr (fn, 2,
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
build_fold_addr_expr (var),
|
2013-08-30 18:12:58 +02:00
|
|
|
gcount_tree),
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
&asan_ctor_statements);
|
|
|
|
|
2012-12-03 18:28:10 +01:00
|
|
|
fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
|
|
|
|
append_to_statement_list (build_call_expr (fn, 2,
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
build_fold_addr_expr (var),
|
2013-08-30 18:12:58 +02:00
|
|
|
gcount_tree),
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
&dtor_statements);
|
2014-12-02 19:58:59 +01:00
|
|
|
cgraph_build_static_cdtor ('D', dtor_statements, priority);
|
Implement protection of global variables
This patch implements the protection of global variables. See the
comments appended to the beginning of the asan.c file.
* varasm.c: Include asan.h.
(assemble_noswitch_variable): Grow size by asan_red_zone_size
if decl is asan protected.
(place_block_symbol): Likewise.
(assemble_variable): If decl is asan protected, increase
DECL_ALIGN if needed, and for decls emitted using
assemble_variable_contents append padding zeros after it.
* Makefile.in (varasm.o): Depend on asan.h.
* asan.c: Include output.h.
(asan_pp, asan_pp_initialized, asan_ctor_statements): New variables.
(asan_pp_initialize, asan_pp_string): New functions.
(asan_emit_stack_protection): Use asan_pp{,_initialized}
instead of local pp{,_initialized} vars, use asan_pp_initialize
and asan_pp_string helpers.
(asan_needs_local_alias, asan_protect_global,
asan_global_struct, asan_add_global): New functions.
(asan_finish_file): Protect global vars that can be protected. Use
asan_ctor_statements instead of ctor_statements
* asan.h (asan_protect_global): New prototype.
(asan_red_zone_size): New inline function.
Co-Authored-By: Wei Mi <wmi@google.com>
From-SVN: r193437
2012-11-12 16:52:42 +01:00
|
|
|
}
|
2014-10-03 16:17:28 +02:00
|
|
|
if (asan_ctor_statements)
|
2014-12-02 19:58:59 +01:00
|
|
|
cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
|
2013-08-30 18:12:58 +02:00
|
|
|
flag_sanitize |= SANITIZE_ADDRESS;
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
}
|
|
|
|
|
2016-11-07 11:23:38 +01:00
|
|
|
/* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
|
|
|
|
on SHADOW address. Newly added statements will be added to ITER with
|
|
|
|
given location LOC. We mark SIZE bytes in shadow memory, where
|
|
|
|
LAST_CHUNK_SIZE is greater than zero in situation where we are at the
|
|
|
|
end of a variable. */
|
|
|
|
|
|
|
|
static void
|
|
|
|
asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
|
|
|
|
tree shadow,
|
|
|
|
unsigned HOST_WIDE_INT base_addr_offset,
|
|
|
|
bool is_clobber, unsigned size,
|
|
|
|
unsigned last_chunk_size)
|
|
|
|
{
|
|
|
|
tree shadow_ptr_type;
|
|
|
|
|
|
|
|
switch (size)
|
|
|
|
{
|
|
|
|
case 1:
|
|
|
|
shadow_ptr_type = shadow_ptr_types[0];
|
|
|
|
break;
|
|
|
|
case 2:
|
|
|
|
shadow_ptr_type = shadow_ptr_types[1];
|
|
|
|
break;
|
|
|
|
case 4:
|
|
|
|
shadow_ptr_type = shadow_ptr_types[2];
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
gcc_unreachable ();
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
|
|
|
|
unsigned HOST_WIDE_INT val = 0;
|
|
|
|
for (unsigned i = 0; i < size; ++i)
|
|
|
|
{
|
|
|
|
unsigned char shadow_c = c;
|
|
|
|
if (i == size - 1 && last_chunk_size && !is_clobber)
|
|
|
|
shadow_c = last_chunk_size;
|
|
|
|
val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Handle last chunk in unpoisoning. */
|
|
|
|
tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
|
|
|
|
|
|
|
|
tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
|
|
|
|
build_int_cst (shadow_ptr_type, base_addr_offset));
|
|
|
|
|
|
|
|
gimple *g = gimple_build_assign (dest, magic);
|
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_insert_after (iter, g, GSI_NEW_STMT);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Expand the ASAN_MARK builtins. */
|
|
|
|
|
|
|
|
bool
|
|
|
|
asan_expand_mark_ifn (gimple_stmt_iterator *iter)
|
|
|
|
{
|
|
|
|
gimple *g = gsi_stmt (*iter);
|
|
|
|
location_t loc = gimple_location (g);
|
2016-12-13 10:14:47 +01:00
|
|
|
HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
|
|
|
|
bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
|
2016-11-07 11:23:38 +01:00
|
|
|
|
|
|
|
tree base = gimple_call_arg (g, 1);
|
|
|
|
gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
|
|
|
|
tree decl = TREE_OPERAND (base, 0);
|
2016-11-30 10:26:51 +01:00
|
|
|
|
|
|
|
/* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
|
|
|
|
if (TREE_CODE (decl) == COMPONENT_REF
|
|
|
|
&& DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
|
|
|
|
decl = TREE_OPERAND (decl, 0);
|
|
|
|
|
2016-11-07 11:23:38 +01:00
|
|
|
gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
|
|
|
|
if (asan_handled_variables == NULL)
|
|
|
|
asan_handled_variables = new hash_set<tree> (16);
|
|
|
|
asan_handled_variables->add (decl);
|
|
|
|
tree len = gimple_call_arg (g, 2);
|
|
|
|
|
|
|
|
gcc_assert (tree_fits_shwi_p (len));
|
|
|
|
unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
|
|
|
|
gcc_assert (size_in_bytes);
|
|
|
|
|
|
|
|
g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
|
|
NOP_EXPR, base);
|
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_replace (iter, g, false);
|
|
|
|
tree base_addr = gimple_assign_lhs (g);
|
|
|
|
|
|
|
|
/* Generate direct emission if size_in_bytes is small. */
|
|
|
|
if (size_in_bytes <= ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD)
|
|
|
|
{
|
|
|
|
unsigned HOST_WIDE_INT shadow_size = shadow_mem_size (size_in_bytes);
|
|
|
|
|
|
|
|
tree shadow = build_shadow_mem_access (iter, loc, base_addr,
|
|
|
|
shadow_ptr_types[0], true);
|
|
|
|
|
|
|
|
for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
|
|
|
|
{
|
|
|
|
unsigned size = 1;
|
|
|
|
if (shadow_size - offset >= 4)
|
|
|
|
size = 4;
|
|
|
|
else if (shadow_size - offset >= 2)
|
|
|
|
size = 2;
|
|
|
|
|
|
|
|
unsigned HOST_WIDE_INT last_chunk_size = 0;
|
|
|
|
unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
|
|
|
|
if (s > size_in_bytes)
|
|
|
|
last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
|
|
|
|
|
2016-12-13 10:14:47 +01:00
|
|
|
asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
|
2016-11-07 11:23:38 +01:00
|
|
|
size, last_chunk_size);
|
|
|
|
offset += size;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
|
|
NOP_EXPR, len);
|
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
|
|
tree sz_arg = gimple_assign_lhs (g);
|
|
|
|
|
2016-12-13 14:33:35 +01:00
|
|
|
tree fun
|
|
|
|
= builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
|
|
|
|
: BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
|
2016-11-07 11:23:38 +01:00
|
|
|
g = gimple_build_call (fun, 2, base_addr, sz_arg);
|
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_insert_after (iter, g, GSI_NEW_STMT);
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2014-08-11 08:12:12 +02:00
|
|
|
/* Expand the ASAN_{LOAD,STORE} builtins. */
|
|
|
|
|
2014-11-04 20:43:01 +01:00
|
|
|
bool
|
2014-08-11 08:12:12 +02:00
|
|
|
asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
|
|
|
|
{
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *g = gsi_stmt (*iter);
|
2014-08-11 08:12:12 +02:00
|
|
|
location_t loc = gimple_location (g);
|
2015-11-23 10:15:13 +01:00
|
|
|
bool recover_p;
|
|
|
|
if (flag_sanitize & SANITIZE_USER_ADDRESS)
|
|
|
|
recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
|
|
|
|
else
|
|
|
|
recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
|
2014-10-28 11:33:04 +01:00
|
|
|
|
2014-08-11 08:12:12 +02:00
|
|
|
HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
|
|
|
|
gcc_assert (flags < ASAN_CHECK_LAST);
|
|
|
|
bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
|
|
|
|
bool is_store = (flags & ASAN_CHECK_STORE) != 0;
|
|
|
|
bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
|
|
|
|
|
|
|
|
tree base = gimple_call_arg (g, 1);
|
|
|
|
tree len = gimple_call_arg (g, 2);
|
2014-09-19 10:29:04 +02:00
|
|
|
HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
|
2014-08-11 08:12:12 +02:00
|
|
|
|
|
|
|
HOST_WIDE_INT size_in_bytes
|
|
|
|
= is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
|
|
|
|
|
|
|
|
if (use_calls)
|
|
|
|
{
|
|
|
|
/* Instrument using callbacks. */
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
gimple.h (gimple_build_assign_stat): Remove prototype.
* gimple.h (gimple_build_assign_stat): Remove prototype.
(gimple_build_assign): Remove define. Add overload prototypes
with tree lhs and either a tree rhs, or enum tree_code and
1, 2 or 3 tree operands.
* gimple.c (gimple_build_assign_stat): Renamed to...
(gimple_build_assign): ... this. Add overloads with
enum tree_code and 1, 2 or 3 tree operands.
(gimple_build_assign_with_ops): Remove 1 and 2 operand overloads.
Rename the 3 operand overload to ...
(gimple_build_assign_1): ... this. Make it static inline.
* tree-ssa-strlen.c (get_string_length): Use gimple_build_assign
instead of gimple_build_assign_with_ops, swap the order of first
two arguments and adjust formatting where necessary.
* tree-vect-slp.c (vect_get_constant_vectors,
vect_create_mask_and_perm): Likewise.
* tree-ssa-forwprop.c (simplify_rotate): Likewise.
* asan.c (build_shadow_mem_access, maybe_create_ssa_name,
maybe_cast_to_ptrmode, asan_expand_check_ifn): Likewise.
* tsan.c (instrument_builtin_call): Likewise.
* tree-chkp.c (chkp_compute_bounds_for_assignment,
chkp_generate_extern_var_bounds): Likewise.
* tree-loop-distribution.c (generate_memset_builtin): Likewise.
* tree-ssa-loop-im.c (rewrite_reciprocal): Likewise.
* gimple-builder.c (build_assign, build_type_cast): Likewise.
* tree-vect-loop-manip.c (vect_create_cond_for_align_checks): Likewise.
* value-prof.c (gimple_divmod_fixed_value, gimple_mod_pow2,
gimple_mod_subtract): Likewise.
* gimple-match-head.c (maybe_push_res_to_seq): Likewise.
* tree-vect-patterns.c (vect_recog_dot_prod_pattern,
vect_recog_sad_pattern, vect_handle_widen_op_by_const,
vect_recog_widen_mult_pattern, vect_recog_pow_pattern,
vect_recog_widen_sum_pattern, vect_operation_fits_smaller_type,
vect_recog_over_widening_pattern, vect_recog_widen_shift_pattern,
vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
adjust_bool_pattern_cast, adjust_bool_pattern,
vect_recog_bool_pattern): Likewise.
* gimple-ssa-strength-reduction.c (create_add_on_incoming_edge,
insert_initializers, introduce_cast_before_cand,
replace_one_candidate): Likewise.
* tree-ssa-math-opts.c (insert_reciprocals, powi_as_mults_1,
powi_as_mults, build_and_insert_binop, build_and_insert_cast,
pass_cse_sincos::execute, bswap_replace, convert_mult_to_fma):
Likewise.
* tree-tailcall.c (adjust_return_value_with_ops,
update_accumulator_with_ops): Likewise.
* tree-predcom.c (reassociate_to_the_same_stmt): Likewise.
* tree-ssa-reassoc.c (build_and_add_sum,
optimize_range_tests_to_bit_test, update_ops,
maybe_optimize_range_tests, rewrite_expr_tree, linearize_expr,
negate_value, repropagate_negates, attempt_builtin_powi,
reassociate_bb): Likewise.
* tree-vect-loop.c (vect_is_simple_reduction_1,
get_initial_def_for_induction, vect_create_epilog_for_reduction):
Likewise.
* ipa-split.c (split_function): Likewise.
* tree-ssa-phiopt.c (conditional_replacement, minmax_replacement,
abs_replacement, neg_replacement): Likewise.
* tree-profile.c (gimple_gen_edge_profiler): Likewise.
* tree-vrp.c (simplify_truth_ops_using_ranges,
simplify_float_conversion_using_ranges,
simplify_internal_call_using_ranges): Likewise.
* gimple-fold.c (rewrite_to_defined_overflow, gimple_build): Likewise.
* tree-vect-generic.c (expand_vector_divmod,
optimize_vector_constructor): Likewise.
* ubsan.c (ubsan_expand_null_ifn, ubsan_expand_objsize_ifn,
instrument_bool_enum_load): Likewise.
* tree-ssa-loop-manip.c (create_iv): Likewise.
* omp-low.c (lower_rec_input_clauses, expand_omp_for_generic,
expand_omp_for_static_nochunk, expand_omp_for_static_chunk,
expand_cilk_for, simd_clone_adjust): Likewise.
* trans-mem.c (expand_transaction): Likewise.
* tree-vect-data-refs.c (bump_vector_ptr, vect_permute_store_chain,
vect_setup_realignment, vect_permute_load_chain,
vect_shift_permute_load_chain): Likewise.
* tree-vect-stmts.c (vect_init_vector, vectorizable_mask_load_store,
vectorizable_simd_clone_call, vect_gen_widened_results_half,
vect_create_vectorized_demotion_stmts, vectorizable_conversion,
vectorizable_shift, vectorizable_operation, vectorizable_store,
permute_vec_elements, vectorizable_load): Likewise.
From-SVN: r218216
2014-12-01 14:58:10 +01:00
|
|
|
NOP_EXPR, base);
|
2014-08-11 08:12:12 +02:00
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
|
|
tree base_addr = gimple_assign_lhs (g);
|
|
|
|
|
|
|
|
int nargs;
|
2014-10-28 11:33:04 +01:00
|
|
|
tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
|
2014-08-11 08:12:12 +02:00
|
|
|
if (nargs == 1)
|
|
|
|
g = gimple_build_call (fun, 1, base_addr);
|
|
|
|
else
|
|
|
|
{
|
|
|
|
gcc_assert (nargs == 2);
|
gimple.h (gimple_build_assign_stat): Remove prototype.
* gimple.h (gimple_build_assign_stat): Remove prototype.
(gimple_build_assign): Remove define. Add overload prototypes
with tree lhs and either a tree rhs, or enum tree_code and
1, 2 or 3 tree operands.
* gimple.c (gimple_build_assign_stat): Renamed to...
(gimple_build_assign): ... this. Add overloads with
enum tree_code and 1, 2 or 3 tree operands.
(gimple_build_assign_with_ops): Remove 1 and 2 operand overloads.
Rename the 3 operand overload to ...
(gimple_build_assign_1): ... this. Make it static inline.
* tree-ssa-strlen.c (get_string_length): Use gimple_build_assign
instead of gimple_build_assign_with_ops, swap the order of first
two arguments and adjust formatting where necessary.
* tree-vect-slp.c (vect_get_constant_vectors,
vect_create_mask_and_perm): Likewise.
* tree-ssa-forwprop.c (simplify_rotate): Likewise.
* asan.c (build_shadow_mem_access, maybe_create_ssa_name,
maybe_cast_to_ptrmode, asan_expand_check_ifn): Likewise.
* tsan.c (instrument_builtin_call): Likewise.
* tree-chkp.c (chkp_compute_bounds_for_assignment,
chkp_generate_extern_var_bounds): Likewise.
* tree-loop-distribution.c (generate_memset_builtin): Likewise.
* tree-ssa-loop-im.c (rewrite_reciprocal): Likewise.
* gimple-builder.c (build_assign, build_type_cast): Likewise.
* tree-vect-loop-manip.c (vect_create_cond_for_align_checks): Likewise.
* value-prof.c (gimple_divmod_fixed_value, gimple_mod_pow2,
gimple_mod_subtract): Likewise.
* gimple-match-head.c (maybe_push_res_to_seq): Likewise.
* tree-vect-patterns.c (vect_recog_dot_prod_pattern,
vect_recog_sad_pattern, vect_handle_widen_op_by_const,
vect_recog_widen_mult_pattern, vect_recog_pow_pattern,
vect_recog_widen_sum_pattern, vect_operation_fits_smaller_type,
vect_recog_over_widening_pattern, vect_recog_widen_shift_pattern,
vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
adjust_bool_pattern_cast, adjust_bool_pattern,
vect_recog_bool_pattern): Likewise.
* gimple-ssa-strength-reduction.c (create_add_on_incoming_edge,
insert_initializers, introduce_cast_before_cand,
replace_one_candidate): Likewise.
* tree-ssa-math-opts.c (insert_reciprocals, powi_as_mults_1,
powi_as_mults, build_and_insert_binop, build_and_insert_cast,
pass_cse_sincos::execute, bswap_replace, convert_mult_to_fma):
Likewise.
* tree-tailcall.c (adjust_return_value_with_ops,
update_accumulator_with_ops): Likewise.
* tree-predcom.c (reassociate_to_the_same_stmt): Likewise.
* tree-ssa-reassoc.c (build_and_add_sum,
optimize_range_tests_to_bit_test, update_ops,
maybe_optimize_range_tests, rewrite_expr_tree, linearize_expr,
negate_value, repropagate_negates, attempt_builtin_powi,
reassociate_bb): Likewise.
* tree-vect-loop.c (vect_is_simple_reduction_1,
get_initial_def_for_induction, vect_create_epilog_for_reduction):
Likewise.
* ipa-split.c (split_function): Likewise.
* tree-ssa-phiopt.c (conditional_replacement, minmax_replacement,
abs_replacement, neg_replacement): Likewise.
* tree-profile.c (gimple_gen_edge_profiler): Likewise.
* tree-vrp.c (simplify_truth_ops_using_ranges,
simplify_float_conversion_using_ranges,
simplify_internal_call_using_ranges): Likewise.
* gimple-fold.c (rewrite_to_defined_overflow, gimple_build): Likewise.
* tree-vect-generic.c (expand_vector_divmod,
optimize_vector_constructor): Likewise.
* ubsan.c (ubsan_expand_null_ifn, ubsan_expand_objsize_ifn,
instrument_bool_enum_load): Likewise.
* tree-ssa-loop-manip.c (create_iv): Likewise.
* omp-low.c (lower_rec_input_clauses, expand_omp_for_generic,
expand_omp_for_static_nochunk, expand_omp_for_static_chunk,
expand_cilk_for, simd_clone_adjust): Likewise.
* trans-mem.c (expand_transaction): Likewise.
* tree-vect-data-refs.c (bump_vector_ptr, vect_permute_store_chain,
vect_setup_realignment, vect_permute_load_chain,
vect_shift_permute_load_chain): Likewise.
* tree-vect-stmts.c (vect_init_vector, vectorizable_mask_load_store,
vectorizable_simd_clone_call, vect_gen_widened_results_half,
vect_create_vectorized_demotion_stmts, vectorizable_conversion,
vectorizable_shift, vectorizable_operation, vectorizable_store,
permute_vec_elements, vectorizable_load): Likewise.
From-SVN: r218216
2014-12-01 14:58:10 +01:00
|
|
|
g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
|
|
NOP_EXPR, len);
|
2014-08-11 08:12:12 +02:00
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
|
|
tree sz_arg = gimple_assign_lhs (g);
|
|
|
|
g = gimple_build_call (fun, nargs, base_addr, sz_arg);
|
|
|
|
}
|
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_replace (iter, g, false);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
|
|
|
|
|
|
|
|
tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
|
|
|
|
tree shadow_type = TREE_TYPE (shadow_ptr_type);
|
|
|
|
|
|
|
|
gimple_stmt_iterator gsi = *iter;
|
|
|
|
|
|
|
|
if (!is_non_zero_len)
|
|
|
|
{
|
|
|
|
/* So, the length of the memory area to asan-protect is
|
|
|
|
non-constant. Let's guard the generated instrumentation code
|
|
|
|
like:
|
|
|
|
|
|
|
|
if (len != 0)
|
|
|
|
{
|
|
|
|
//asan instrumentation code goes here.
|
|
|
|
}
|
|
|
|
// falltrough instructions, starting with *ITER. */
|
|
|
|
|
|
|
|
g = gimple_build_cond (NE_EXPR,
|
|
|
|
len,
|
|
|
|
build_int_cst (TREE_TYPE (len), 0),
|
|
|
|
NULL_TREE, NULL_TREE);
|
|
|
|
gimple_set_location (g, loc);
|
|
|
|
|
|
|
|
basic_block then_bb, fallthrough_bb;
|
2014-11-19 18:00:54 +01:00
|
|
|
insert_if_then_before_iter (as_a <gcond *> (g), iter,
|
|
|
|
/*then_more_likely_p=*/true,
|
|
|
|
&then_bb, &fallthrough_bb);
|
2014-08-11 08:12:12 +02:00
|
|
|
/* Note that fallthrough_bb starts with the statement that was
|
|
|
|
pointed to by ITER. */
|
|
|
|
|
|
|
|
/* The 'then block' of the 'if (len != 0) condition is where
|
|
|
|
we'll generate the asan instrumentation code now. */
|
|
|
|
gsi = gsi_last_bb (then_bb);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Get an iterator on the point where we can add the condition
|
|
|
|
statement for the instrumentation. */
|
|
|
|
basic_block then_bb, else_bb;
|
|
|
|
gsi = create_cond_insert_point (&gsi, /*before_p*/false,
|
|
|
|
/*then_more_likely_p=*/false,
|
2014-10-28 11:33:04 +01:00
|
|
|
/*create_then_fallthru_edge*/recover_p,
|
2014-08-11 08:12:12 +02:00
|
|
|
&then_bb,
|
|
|
|
&else_bb);
|
|
|
|
|
gimple.h (gimple_build_assign_stat): Remove prototype.
* gimple.h (gimple_build_assign_stat): Remove prototype.
(gimple_build_assign): Remove define. Add overload prototypes
with tree lhs and either a tree rhs, or enum tree_code and
1, 2 or 3 tree operands.
* gimple.c (gimple_build_assign_stat): Renamed to...
(gimple_build_assign): ... this. Add overloads with
enum tree_code and 1, 2 or 3 tree operands.
(gimple_build_assign_with_ops): Remove 1 and 2 operand overloads.
Rename the 3 operand overload to ...
(gimple_build_assign_1): ... this. Make it static inline.
* tree-ssa-strlen.c (get_string_length): Use gimple_build_assign
instead of gimple_build_assign_with_ops, swap the order of first
two arguments and adjust formatting where necessary.
* tree-vect-slp.c (vect_get_constant_vectors,
vect_create_mask_and_perm): Likewise.
* tree-ssa-forwprop.c (simplify_rotate): Likewise.
* asan.c (build_shadow_mem_access, maybe_create_ssa_name,
maybe_cast_to_ptrmode, asan_expand_check_ifn): Likewise.
* tsan.c (instrument_builtin_call): Likewise.
* tree-chkp.c (chkp_compute_bounds_for_assignment,
chkp_generate_extern_var_bounds): Likewise.
* tree-loop-distribution.c (generate_memset_builtin): Likewise.
* tree-ssa-loop-im.c (rewrite_reciprocal): Likewise.
* gimple-builder.c (build_assign, build_type_cast): Likewise.
* tree-vect-loop-manip.c (vect_create_cond_for_align_checks): Likewise.
* value-prof.c (gimple_divmod_fixed_value, gimple_mod_pow2,
gimple_mod_subtract): Likewise.
* gimple-match-head.c (maybe_push_res_to_seq): Likewise.
* tree-vect-patterns.c (vect_recog_dot_prod_pattern,
vect_recog_sad_pattern, vect_handle_widen_op_by_const,
vect_recog_widen_mult_pattern, vect_recog_pow_pattern,
vect_recog_widen_sum_pattern, vect_operation_fits_smaller_type,
vect_recog_over_widening_pattern, vect_recog_widen_shift_pattern,
vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
adjust_bool_pattern_cast, adjust_bool_pattern,
vect_recog_bool_pattern): Likewise.
* gimple-ssa-strength-reduction.c (create_add_on_incoming_edge,
insert_initializers, introduce_cast_before_cand,
replace_one_candidate): Likewise.
* tree-ssa-math-opts.c (insert_reciprocals, powi_as_mults_1,
powi_as_mults, build_and_insert_binop, build_and_insert_cast,
pass_cse_sincos::execute, bswap_replace, convert_mult_to_fma):
Likewise.
* tree-tailcall.c (adjust_return_value_with_ops,
update_accumulator_with_ops): Likewise.
* tree-predcom.c (reassociate_to_the_same_stmt): Likewise.
* tree-ssa-reassoc.c (build_and_add_sum,
optimize_range_tests_to_bit_test, update_ops,
maybe_optimize_range_tests, rewrite_expr_tree, linearize_expr,
negate_value, repropagate_negates, attempt_builtin_powi,
reassociate_bb): Likewise.
* tree-vect-loop.c (vect_is_simple_reduction_1,
get_initial_def_for_induction, vect_create_epilog_for_reduction):
Likewise.
* ipa-split.c (split_function): Likewise.
* tree-ssa-phiopt.c (conditional_replacement, minmax_replacement,
abs_replacement, neg_replacement): Likewise.
* tree-profile.c (gimple_gen_edge_profiler): Likewise.
* tree-vrp.c (simplify_truth_ops_using_ranges,
simplify_float_conversion_using_ranges,
simplify_internal_call_using_ranges): Likewise.
* gimple-fold.c (rewrite_to_defined_overflow, gimple_build): Likewise.
* tree-vect-generic.c (expand_vector_divmod,
optimize_vector_constructor): Likewise.
* ubsan.c (ubsan_expand_null_ifn, ubsan_expand_objsize_ifn,
instrument_bool_enum_load): Likewise.
* tree-ssa-loop-manip.c (create_iv): Likewise.
* omp-low.c (lower_rec_input_clauses, expand_omp_for_generic,
expand_omp_for_static_nochunk, expand_omp_for_static_chunk,
expand_cilk_for, simd_clone_adjust): Likewise.
* trans-mem.c (expand_transaction): Likewise.
* tree-vect-data-refs.c (bump_vector_ptr, vect_permute_store_chain,
vect_setup_realignment, vect_permute_load_chain,
vect_shift_permute_load_chain): Likewise.
* tree-vect-stmts.c (vect_init_vector, vectorizable_mask_load_store,
vectorizable_simd_clone_call, vect_gen_widened_results_half,
vect_create_vectorized_demotion_stmts, vectorizable_conversion,
vectorizable_shift, vectorizable_operation, vectorizable_store,
permute_vec_elements, vectorizable_load): Likewise.
From-SVN: r218216
2014-12-01 14:58:10 +01:00
|
|
|
g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
|
|
NOP_EXPR, base);
|
2014-08-11 08:12:12 +02:00
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_insert_before (&gsi, g, GSI_NEW_STMT);
|
|
|
|
tree base_addr = gimple_assign_lhs (g);
|
|
|
|
|
|
|
|
tree t = NULL_TREE;
|
|
|
|
if (real_size_in_bytes >= 8)
|
|
|
|
{
|
|
|
|
tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
|
|
|
|
shadow_ptr_type);
|
|
|
|
t = shadow;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
/* Slow path for 1, 2 and 4 byte accesses. */
|
2014-10-28 13:36:54 +01:00
|
|
|
/* Test (shadow != 0)
|
|
|
|
& ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
|
|
|
|
tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
|
|
|
|
shadow_ptr_type);
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
|
2014-10-28 13:36:54 +01:00
|
|
|
gimple_seq seq = NULL;
|
|
|
|
gimple_seq_add_stmt (&seq, shadow_test);
|
|
|
|
/* Aligned (>= 8 bytes) can test just
|
|
|
|
(real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
|
|
|
|
to be 0. */
|
|
|
|
if (align < 8)
|
2014-08-11 08:12:12 +02:00
|
|
|
{
|
2014-10-28 13:36:54 +01:00
|
|
|
gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
|
|
|
|
base_addr, 7));
|
|
|
|
gimple_seq_add_stmt (&seq,
|
|
|
|
build_type_cast (shadow_type,
|
|
|
|
gimple_seq_last (seq)));
|
|
|
|
if (real_size_in_bytes > 1)
|
|
|
|
gimple_seq_add_stmt (&seq,
|
|
|
|
build_assign (PLUS_EXPR,
|
|
|
|
gimple_seq_last (seq),
|
|
|
|
real_size_in_bytes - 1));
|
|
|
|
t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
|
2014-08-11 08:12:12 +02:00
|
|
|
}
|
2014-10-28 13:36:54 +01:00
|
|
|
else
|
|
|
|
t = build_int_cst (shadow_type, real_size_in_bytes - 1);
|
|
|
|
gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
|
|
|
|
gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
|
|
|
|
gimple_seq_last (seq)));
|
|
|
|
t = gimple_assign_lhs (gimple_seq_last (seq));
|
|
|
|
gimple_seq_set_location (seq, loc);
|
|
|
|
gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
|
2014-08-11 08:12:12 +02:00
|
|
|
|
|
|
|
/* For non-constant, misaligned or otherwise weird access sizes,
|
2014-10-28 13:36:54 +01:00
|
|
|
check first and last byte. */
|
|
|
|
if (size_in_bytes == -1)
|
2014-08-11 08:12:12 +02:00
|
|
|
{
|
gimple.h (gimple_build_assign_stat): Remove prototype.
* gimple.h (gimple_build_assign_stat): Remove prototype.
(gimple_build_assign): Remove define. Add overload prototypes
with tree lhs and either a tree rhs, or enum tree_code and
1, 2 or 3 tree operands.
* gimple.c (gimple_build_assign_stat): Renamed to...
(gimple_build_assign): ... this. Add overloads with
enum tree_code and 1, 2 or 3 tree operands.
(gimple_build_assign_with_ops): Remove 1 and 2 operand overloads.
Rename the 3 operand overload to ...
(gimple_build_assign_1): ... this. Make it static inline.
* tree-ssa-strlen.c (get_string_length): Use gimple_build_assign
instead of gimple_build_assign_with_ops, swap the order of first
two arguments and adjust formatting where necessary.
* tree-vect-slp.c (vect_get_constant_vectors,
vect_create_mask_and_perm): Likewise.
* tree-ssa-forwprop.c (simplify_rotate): Likewise.
* asan.c (build_shadow_mem_access, maybe_create_ssa_name,
maybe_cast_to_ptrmode, asan_expand_check_ifn): Likewise.
* tsan.c (instrument_builtin_call): Likewise.
* tree-chkp.c (chkp_compute_bounds_for_assignment,
chkp_generate_extern_var_bounds): Likewise.
* tree-loop-distribution.c (generate_memset_builtin): Likewise.
* tree-ssa-loop-im.c (rewrite_reciprocal): Likewise.
* gimple-builder.c (build_assign, build_type_cast): Likewise.
* tree-vect-loop-manip.c (vect_create_cond_for_align_checks): Likewise.
* value-prof.c (gimple_divmod_fixed_value, gimple_mod_pow2,
gimple_mod_subtract): Likewise.
* gimple-match-head.c (maybe_push_res_to_seq): Likewise.
* tree-vect-patterns.c (vect_recog_dot_prod_pattern,
vect_recog_sad_pattern, vect_handle_widen_op_by_const,
vect_recog_widen_mult_pattern, vect_recog_pow_pattern,
vect_recog_widen_sum_pattern, vect_operation_fits_smaller_type,
vect_recog_over_widening_pattern, vect_recog_widen_shift_pattern,
vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
adjust_bool_pattern_cast, adjust_bool_pattern,
vect_recog_bool_pattern): Likewise.
* gimple-ssa-strength-reduction.c (create_add_on_incoming_edge,
insert_initializers, introduce_cast_before_cand,
replace_one_candidate): Likewise.
* tree-ssa-math-opts.c (insert_reciprocals, powi_as_mults_1,
powi_as_mults, build_and_insert_binop, build_and_insert_cast,
pass_cse_sincos::execute, bswap_replace, convert_mult_to_fma):
Likewise.
* tree-tailcall.c (adjust_return_value_with_ops,
update_accumulator_with_ops): Likewise.
* tree-predcom.c (reassociate_to_the_same_stmt): Likewise.
* tree-ssa-reassoc.c (build_and_add_sum,
optimize_range_tests_to_bit_test, update_ops,
maybe_optimize_range_tests, rewrite_expr_tree, linearize_expr,
negate_value, repropagate_negates, attempt_builtin_powi,
reassociate_bb): Likewise.
* tree-vect-loop.c (vect_is_simple_reduction_1,
get_initial_def_for_induction, vect_create_epilog_for_reduction):
Likewise.
* ipa-split.c (split_function): Likewise.
* tree-ssa-phiopt.c (conditional_replacement, minmax_replacement,
abs_replacement, neg_replacement): Likewise.
* tree-profile.c (gimple_gen_edge_profiler): Likewise.
* tree-vrp.c (simplify_truth_ops_using_ranges,
simplify_float_conversion_using_ranges,
simplify_internal_call_using_ranges): Likewise.
* gimple-fold.c (rewrite_to_defined_overflow, gimple_build): Likewise.
* tree-vect-generic.c (expand_vector_divmod,
optimize_vector_constructor): Likewise.
* ubsan.c (ubsan_expand_null_ifn, ubsan_expand_objsize_ifn,
instrument_bool_enum_load): Likewise.
* tree-ssa-loop-manip.c (create_iv): Likewise.
* omp-low.c (lower_rec_input_clauses, expand_omp_for_generic,
expand_omp_for_static_nochunk, expand_omp_for_static_chunk,
expand_cilk_for, simd_clone_adjust): Likewise.
* trans-mem.c (expand_transaction): Likewise.
* tree-vect-data-refs.c (bump_vector_ptr, vect_permute_store_chain,
vect_setup_realignment, vect_permute_load_chain,
vect_shift_permute_load_chain): Likewise.
* tree-vect-stmts.c (vect_init_vector, vectorizable_mask_load_store,
vectorizable_simd_clone_call, vect_gen_widened_results_half,
vect_create_vectorized_demotion_stmts, vectorizable_conversion,
vectorizable_shift, vectorizable_operation, vectorizable_store,
permute_vec_elements, vectorizable_load): Likewise.
From-SVN: r218216
2014-12-01 14:58:10 +01:00
|
|
|
g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
|
|
MINUS_EXPR, len,
|
|
|
|
build_int_cst (pointer_sized_int_node, 1));
|
2014-08-11 08:12:12 +02:00
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
|
|
|
|
tree last = gimple_assign_lhs (g);
|
gimple.h (gimple_build_assign_stat): Remove prototype.
* gimple.h (gimple_build_assign_stat): Remove prototype.
(gimple_build_assign): Remove define. Add overload prototypes
with tree lhs and either a tree rhs, or enum tree_code and
1, 2 or 3 tree operands.
* gimple.c (gimple_build_assign_stat): Renamed to...
(gimple_build_assign): ... this. Add overloads with
enum tree_code and 1, 2 or 3 tree operands.
(gimple_build_assign_with_ops): Remove 1 and 2 operand overloads.
Rename the 3 operand overload to ...
(gimple_build_assign_1): ... this. Make it static inline.
* tree-ssa-strlen.c (get_string_length): Use gimple_build_assign
instead of gimple_build_assign_with_ops, swap the order of first
two arguments and adjust formatting where necessary.
* tree-vect-slp.c (vect_get_constant_vectors,
vect_create_mask_and_perm): Likewise.
* tree-ssa-forwprop.c (simplify_rotate): Likewise.
* asan.c (build_shadow_mem_access, maybe_create_ssa_name,
maybe_cast_to_ptrmode, asan_expand_check_ifn): Likewise.
* tsan.c (instrument_builtin_call): Likewise.
* tree-chkp.c (chkp_compute_bounds_for_assignment,
chkp_generate_extern_var_bounds): Likewise.
* tree-loop-distribution.c (generate_memset_builtin): Likewise.
* tree-ssa-loop-im.c (rewrite_reciprocal): Likewise.
* gimple-builder.c (build_assign, build_type_cast): Likewise.
* tree-vect-loop-manip.c (vect_create_cond_for_align_checks): Likewise.
* value-prof.c (gimple_divmod_fixed_value, gimple_mod_pow2,
gimple_mod_subtract): Likewise.
* gimple-match-head.c (maybe_push_res_to_seq): Likewise.
* tree-vect-patterns.c (vect_recog_dot_prod_pattern,
vect_recog_sad_pattern, vect_handle_widen_op_by_const,
vect_recog_widen_mult_pattern, vect_recog_pow_pattern,
vect_recog_widen_sum_pattern, vect_operation_fits_smaller_type,
vect_recog_over_widening_pattern, vect_recog_widen_shift_pattern,
vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
adjust_bool_pattern_cast, adjust_bool_pattern,
vect_recog_bool_pattern): Likewise.
* gimple-ssa-strength-reduction.c (create_add_on_incoming_edge,
insert_initializers, introduce_cast_before_cand,
replace_one_candidate): Likewise.
* tree-ssa-math-opts.c (insert_reciprocals, powi_as_mults_1,
powi_as_mults, build_and_insert_binop, build_and_insert_cast,
pass_cse_sincos::execute, bswap_replace, convert_mult_to_fma):
Likewise.
* tree-tailcall.c (adjust_return_value_with_ops,
update_accumulator_with_ops): Likewise.
* tree-predcom.c (reassociate_to_the_same_stmt): Likewise.
* tree-ssa-reassoc.c (build_and_add_sum,
optimize_range_tests_to_bit_test, update_ops,
maybe_optimize_range_tests, rewrite_expr_tree, linearize_expr,
negate_value, repropagate_negates, attempt_builtin_powi,
reassociate_bb): Likewise.
* tree-vect-loop.c (vect_is_simple_reduction_1,
get_initial_def_for_induction, vect_create_epilog_for_reduction):
Likewise.
* ipa-split.c (split_function): Likewise.
* tree-ssa-phiopt.c (conditional_replacement, minmax_replacement,
abs_replacement, neg_replacement): Likewise.
* tree-profile.c (gimple_gen_edge_profiler): Likewise.
* tree-vrp.c (simplify_truth_ops_using_ranges,
simplify_float_conversion_using_ranges,
simplify_internal_call_using_ranges): Likewise.
* gimple-fold.c (rewrite_to_defined_overflow, gimple_build): Likewise.
* tree-vect-generic.c (expand_vector_divmod,
optimize_vector_constructor): Likewise.
* ubsan.c (ubsan_expand_null_ifn, ubsan_expand_objsize_ifn,
instrument_bool_enum_load): Likewise.
* tree-ssa-loop-manip.c (create_iv): Likewise.
* omp-low.c (lower_rec_input_clauses, expand_omp_for_generic,
expand_omp_for_static_nochunk, expand_omp_for_static_chunk,
expand_cilk_for, simd_clone_adjust): Likewise.
* trans-mem.c (expand_transaction): Likewise.
* tree-vect-data-refs.c (bump_vector_ptr, vect_permute_store_chain,
vect_setup_realignment, vect_permute_load_chain,
vect_shift_permute_load_chain): Likewise.
* tree-vect-stmts.c (vect_init_vector, vectorizable_mask_load_store,
vectorizable_simd_clone_call, vect_gen_widened_results_half,
vect_create_vectorized_demotion_stmts, vectorizable_conversion,
vectorizable_shift, vectorizable_operation, vectorizable_store,
permute_vec_elements, vectorizable_load): Likewise.
From-SVN: r218216
2014-12-01 14:58:10 +01:00
|
|
|
g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
|
|
PLUS_EXPR, base_addr, last);
|
2014-08-11 08:12:12 +02:00
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
|
|
|
|
tree base_end_addr = gimple_assign_lhs (g);
|
|
|
|
|
|
|
|
tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
|
|
|
|
shadow_ptr_type);
|
2015-09-20 02:52:59 +02:00
|
|
|
gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
|
2014-08-11 08:12:12 +02:00
|
|
|
gimple_seq seq = NULL;
|
|
|
|
gimple_seq_add_stmt (&seq, shadow_test);
|
|
|
|
gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
|
|
|
|
base_end_addr, 7));
|
|
|
|
gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
|
|
|
|
gimple_seq_last (seq)));
|
|
|
|
gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
|
|
|
|
gimple_seq_last (seq),
|
|
|
|
shadow));
|
|
|
|
gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
|
|
|
|
gimple_seq_last (seq)));
|
2014-10-28 13:36:54 +01:00
|
|
|
gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
|
|
|
|
gimple_seq_last (seq)));
|
2014-08-11 08:12:12 +02:00
|
|
|
t = gimple_assign_lhs (gimple_seq_last (seq));
|
|
|
|
gimple_seq_set_location (seq, loc);
|
|
|
|
gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
|
|
|
|
NULL_TREE, NULL_TREE);
|
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
|
|
|
|
|
|
|
|
/* Generate call to the run-time library (e.g. __asan_report_load8). */
|
|
|
|
gsi = gsi_start_bb (then_bb);
|
|
|
|
int nargs;
|
2014-10-28 11:33:04 +01:00
|
|
|
tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
|
2014-08-11 08:12:12 +02:00
|
|
|
g = gimple_build_call (fun, nargs, base_addr, len);
|
|
|
|
gimple_set_location (g, loc);
|
|
|
|
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
|
|
|
|
|
|
|
|
gsi_remove (iter, true);
|
|
|
|
*iter = gsi_start_bb (else_bb);
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2017-01-23 13:02:13 +01:00
|
|
|
/* Create ASAN shadow variable for a VAR_DECL which has been rewritten
|
|
|
|
into SSA. Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING. */
|
|
|
|
|
|
|
|
static tree
|
|
|
|
create_asan_shadow_var (tree var_decl,
|
|
|
|
hash_map<tree, tree> &shadow_vars_mapping)
|
|
|
|
{
|
|
|
|
tree *slot = shadow_vars_mapping.get (var_decl);
|
|
|
|
if (slot == NULL)
|
|
|
|
{
|
|
|
|
tree shadow_var = copy_node (var_decl);
|
|
|
|
|
|
|
|
copy_body_data id;
|
|
|
|
memset (&id, 0, sizeof (copy_body_data));
|
|
|
|
id.src_fn = id.dst_fn = current_function_decl;
|
|
|
|
copy_decl_for_dup_finish (&id, var_decl, shadow_var);
|
|
|
|
|
|
|
|
DECL_ARTIFICIAL (shadow_var) = 1;
|
|
|
|
DECL_IGNORED_P (shadow_var) = 1;
|
|
|
|
DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
|
|
|
|
gimple_add_tmp_var (shadow_var);
|
|
|
|
|
|
|
|
shadow_vars_mapping.put (var_decl, shadow_var);
|
|
|
|
return shadow_var;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
return *slot;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
asan_expand_poison_ifn (gimple_stmt_iterator *iter,
|
|
|
|
bool *need_commit_edge_insert,
|
|
|
|
hash_map<tree, tree> &shadow_vars_mapping)
|
|
|
|
{
|
|
|
|
gimple *g = gsi_stmt (*iter);
|
|
|
|
tree poisoned_var = gimple_call_lhs (g);
|
|
|
|
if (!poisoned_var)
|
|
|
|
{
|
|
|
|
gsi_remove (iter, true);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
|
|
|
|
shadow_vars_mapping);
|
|
|
|
|
|
|
|
bool recover_p;
|
|
|
|
if (flag_sanitize & SANITIZE_USER_ADDRESS)
|
|
|
|
recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
|
|
|
|
else
|
|
|
|
recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
|
|
|
|
tree size = DECL_SIZE_UNIT (shadow_var);
|
|
|
|
gimple *poison_call
|
|
|
|
= gimple_build_call_internal (IFN_ASAN_MARK, 3,
|
|
|
|
build_int_cst (integer_type_node,
|
|
|
|
ASAN_MARK_POISON),
|
|
|
|
build_fold_addr_expr (shadow_var), size);
|
|
|
|
|
|
|
|
use_operand_p use_p;
|
|
|
|
imm_use_iterator imm_iter;
|
|
|
|
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, poisoned_var)
|
|
|
|
{
|
|
|
|
gimple *use = USE_STMT (use_p);
|
|
|
|
if (is_gimple_debug (use))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
int nargs;
|
|
|
|
tree fun = report_error_func (false, recover_p, tree_to_uhwi (size),
|
|
|
|
&nargs);
|
|
|
|
|
|
|
|
gcall *call = gimple_build_call (fun, 1,
|
|
|
|
build_fold_addr_expr (shadow_var));
|
|
|
|
gimple_set_location (call, gimple_location (use));
|
|
|
|
gimple *call_to_insert = call;
|
|
|
|
|
|
|
|
/* The USE can be a gimple PHI node. If so, insert the call on
|
|
|
|
all edges leading to the PHI node. */
|
|
|
|
if (is_a <gphi *> (use))
|
|
|
|
{
|
|
|
|
gphi *phi = dyn_cast<gphi *> (use);
|
|
|
|
for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
|
|
|
|
if (gimple_phi_arg_def (phi, i) == poisoned_var)
|
|
|
|
{
|
|
|
|
edge e = gimple_phi_arg_edge (phi, i);
|
|
|
|
|
|
|
|
if (call_to_insert == NULL)
|
|
|
|
call_to_insert = gimple_copy (call);
|
|
|
|
|
|
|
|
gsi_insert_seq_on_edge (e, call_to_insert);
|
|
|
|
*need_commit_edge_insert = true;
|
|
|
|
call_to_insert = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
gimple_stmt_iterator gsi = gsi_for_stmt (use);
|
|
|
|
gsi_insert_before (&gsi, call, GSI_NEW_STMT);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
|
|
|
|
SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
|
|
|
|
gsi_replace (iter, poison_call, false);
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-11-12 16:51:13 +01:00
|
|
|
/* Instrument the current function. */
|
|
|
|
|
|
|
|
static unsigned int
|
|
|
|
asan_instrument (void)
|
|
|
|
{
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
if (shadow_ptr_types[0] == NULL_TREE)
|
2012-12-10 13:14:36 +01:00
|
|
|
asan_init_shadow_ptr_types ();
|
2012-11-12 16:51:13 +01:00
|
|
|
transform_statements ();
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool
|
|
|
|
gate_asan (void)
|
|
|
|
{
|
2013-08-30 18:12:58 +02:00
|
|
|
return (flag_sanitize & SANITIZE_ADDRESS) != 0
|
2013-02-28 22:23:23 +01:00
|
|
|
&& !lookup_attribute ("no_sanitize_address",
|
2012-11-23 10:02:28 +01:00
|
|
|
DECL_ATTRIBUTES (current_function_decl));
|
2012-11-12 16:51:13 +01:00
|
|
|
}
|
|
|
|
|
2013-08-05 22:16:05 +02:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
const pass_data pass_data_asan =
|
2012-11-12 16:51:13 +01:00
|
|
|
{
|
2013-08-05 22:16:05 +02:00
|
|
|
GIMPLE_PASS, /* type */
|
|
|
|
"asan", /* name */
|
|
|
|
OPTGROUP_NONE, /* optinfo_flags */
|
|
|
|
TV_NONE, /* tv_id */
|
|
|
|
( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
|
|
|
|
0, /* properties_provided */
|
|
|
|
0, /* properties_destroyed */
|
|
|
|
0, /* todo_flags_start */
|
2014-05-06 15:35:40 +02:00
|
|
|
TODO_update_ssa, /* todo_flags_finish */
|
2012-11-12 16:51:13 +01:00
|
|
|
};
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
|
2013-08-05 22:16:05 +02:00
|
|
|
class pass_asan : public gimple_opt_pass
|
|
|
|
{
|
|
|
|
public:
|
alloc-pool.c, [...]: Add missing whitespace before "(".
gcc/
* alloc-pool.c, asan.c, auto-inc-dec.c, basic-block.h, bb-reorder.c,
bitmap.c, bitmap.h, bt-load.c, builtins.c, calls.c, cfgcleanup.c,
cfgexpand.c, cfghooks.c, cfgloop.c, cfgloopmanip.c, cfgrtl.c, cgraph.c,
cgraph.h, cgraphbuild.c, cgraphclones.c, cgraphunit.c, collect2.c,
combine-stack-adj.c, combine.c, compare-elim.c, context.c, context.h,
cprop.c, cse.c, cselib.c, dbxout.c, dce.c, defaults.h, df-core.c,
df-problems.c, df-scan.c, df.h, diagnostic.c, double-int.c, dse.c,
dumpfile.c, dwarf2asm.c, dwarf2cfi.c, dwarf2out.c, emit-rtl.c,
errors.c, except.c, expmed.c, expr.c, file-find.c, final.c,
fixed-value.c, fold-const.c, function.c, fwprop.c, gcc-ar.c, gcc.c,
gcov-io.c, gcov-io.h, gcov.c, gcse.c, genattr-common.c, genattr.c,
genattrtab.c, genautomata.c, genconfig.c, genemit.c, genextract.c,
genflags.c, gengenrtl.c, gengtype-state.c, gengtype.c, genmodes.c,
genopinit.c, genoutput.c, genpeep.c, genpreds.c, genrecog.c,
gensupport.c, ggc-common.c, ggc-page.c, gimple-fold.c, gimple-low.c,
gimple-pretty-print.c, gimple-ssa-strength-reduction.c, gimple.c,
gimple.h, godump.c, graphite-clast-to-gimple.c,
graphite-optimize-isl.c, graphite-poly.h, graphite-sese-to-poly.c,
graphite.c, haifa-sched.c, hash-table.c, hash-table.h, hwint.c,
hwint.h, ifcvt.c, incpath.c, init-regs.c, input.h, intl.c, intl.h,
ipa-cp.c, ipa-devirt.c, ipa-inline-analysis.c, ipa-inline.c,
ipa-profile.c, ipa-pure-const.c, ipa-reference.c, ipa-split.c,
ipa-utils.c, ipa.c, ira-build.c, ira.c, jump.c, loop-doloop.c,
loop-init.c, loop-invariant.c, loop-iv.c, lower-subreg.c, lto-cgraph.c,
lto-streamer-in.c, lto-streamer-out.c, lto-wrapper.c, mcf.c,
mode-switching.c, modulo-sched.c, omp-low.c, optabs.c, opts.c,
pass_manager.h, passes.c, plugin.c, postreload-gcse.c, postreload.c,
predict.c, prefix.c, pretty-print.c, print-rtl.c, print-tree.c,
profile.c, read-md.c, real.c, real.h, recog.c, ree.c, reg-stack.c,
regcprop.c, reginfo.c, regmove.c, regrename.c, regs.h, regstat.c,
reload1.c, reorg.c, rtl.c, rtl.h, rtlanal.c, sbitmap.c, sched-rgn.c,
sdbout.c, sel-sched-ir.c, sel-sched.c, sparseset.c, stack-ptr-mod.c,
statistics.c, stmt.c, stor-layout.c, store-motion.c, streamer-hooks.h,
system.h, target-hooks-macros.h, targhooks.c, targhooks.h, toplev.c,
tracer.c, trans-mem.c, tree-browser.c, tree-call-cdce.c, tree-cfg.c,
tree-cfgcleanup.c, tree-complex.c, tree-data-ref.c, tree-data-ref.h,
tree-eh.c, tree-emutls.c, tree-flow.h, tree-if-conv.c, tree-into-ssa.c,
tree-iterator.c, tree-loop-distribution.c, tree-mudflap.c,
tree-nested.c, tree-nomudflap.c, tree-nrv.c, tree-object-size.c,
tree-optimize.c, tree-pass.h, tree-pretty-print.c, tree-profile.c,
tree-scalar-evolution.c, tree-sra.c, tree-ssa-ccp.c,
tree-ssa-coalesce.c, tree-ssa-copy.c, tree-ssa-copyrename.c,
tree-ssa-dce.c, tree-ssa-dom.c, tree-ssa-dse.c, tree-ssa-forwprop.c,
tree-ssa-ifcombine.c, tree-ssa-live.c, tree-ssa-loop-ch.c,
tree-ssa-loop-im.c, tree-ssa-loop-ivopts.c, tree-ssa-loop-prefetch.c,
tree-ssa-loop.c, tree-ssa-math-opts.c, tree-ssa-operands.c,
tree-ssa-phiopt.c, tree-ssa-phiprop.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sink.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-threadedge.c, tree-ssa-threadupdate.c,
tree-ssa-uncprop.c, tree-ssa-uninit.c, tree-ssa.c, tree-ssanames.c,
tree-stdarg.c, tree-switch-conversion.c, tree-tailcall.c,
tree-vect-data-refs.c, tree-vect-generic.c, tree-vect-loop-manip.c,
tree-vect-stmts.c, tree-vectorizer.c, tree-vectorizer.h, tree-vrp.c,
tree.c, tree.h, tsan.c, tsystem.h, value-prof.c, var-tracking.c,
varasm.c, vec.h, vmsdbgout.c, vtable-verify.c, web.c: Add missing
whitespace before "(".
From-SVN: r203004
2013-09-28 10:42:34 +02:00
|
|
|
pass_asan (gcc::context *ctxt)
|
|
|
|
: gimple_opt_pass (pass_data_asan, ctxt)
|
2013-08-05 22:16:05 +02:00
|
|
|
{}
|
|
|
|
|
|
|
|
/* opt_pass methods: */
|
2013-09-30 22:35:40 +02:00
|
|
|
opt_pass * clone () { return new pass_asan (m_ctxt); }
|
pass current function to opt_pass::gate ()
gcc/
* passes.c (opt_pass::gate): Take function * argument.
(gate_all_early_local_passes): Merge into
(early_local_passes::gate): this.
(gate_all_early_optimizations): Merge into
(all_early_optimizations::gate): this.
(gate_all_optimizations): Mege into
(all_optimizations::gate): this.
(gate_all_optimizations_g): Merge into
(all_optimizations_g::gate): this.
(gate_rest_of_compilation): Mege into
(rest_of_compilation::gate): this.
(gate_postreload): Merge into
(postreload::gate): this.
(dump_one_pass): Pass cfun to the pass's gate method.
(execute_ipa_summary_passes): Likewise.
(execute_one_pass): Likewise.
(ipa_write_summaries_2): Likewise.
(ipa_write_optimization_summaries_1): Likewise.
(ipa_read_summaries_1): Likewise.
(ipa_read_optimization_summaries_1): Likewise.
(execute_ipa_stmt_fixups): Likewise.
* tree-pass.h (opt_pass::gate): Add function * argument.
* asan.c, auto-inc-dec.c, bb-reorder.c, bt-load.c,
combine-stack-adj.c, combine.c, compare-elim.c,
config/epiphany/resolve-sw-modes.c, config/i386/i386.c,
config/rl78/rl78.c, config/sh/sh_optimize_sett_clrt.cc,
config/sh/sh_treg_combine.cc, config/sparc/sparc.c, cprop.c, cse.c,
dce.c, df-core.c, dse.c, dwarf2cfi.c, except.c, fwprop.c, gcse.c,
gimple-ssa-isolate-paths.c, gimple-ssa-strength-reduction.c,
graphite.c, ifcvt.c, init-regs.c, ipa-cp.c, ipa-devirt.c,
ipa-profile.c, ipa-pure-const.c, ipa-reference.c, ipa-split.c, ipa.c,
loop-init.c, lower-subreg.c, mode-switching.c, modulo-sched.c,
omp-low.c, postreload-gcse.c, postreload.c, predict.c, recog.c, ree.c,
reg-stack.c, regcprop.c, regrename.c, reorg.c, sched-rgn.c,
store-motion.c, tracer.c, trans-mem.c, tree-call-cdce.c, tree-cfg.c,
tree-cfgcleanup.c, tree-complex.c, tree-eh.c, tree-emutls.c,
tree-if-conv.c, tree-into-ssa.c, tree-loop-distribution.c,
tree-nrv.c, tree-parloops.c, tree-predcom.c, tree-profile.c,
tree-sra.c, tree-ssa-ccp.c, tree-ssa-copy.c, tree-ssa-copyrename.c,
tree-ssa-dce.c, tree-ssa-dom.c, tree-ssa-dse.c, tree-ssa-forwprop.c,
tree-ssa-ifcombine.c, tree-ssa-loop-ch.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivcanon.c, tree-ssa-loop-prefetch.c,
tree-ssa-loop-unswitch.c, tree-ssa-loop.c, tree-ssa-math-opts.c,
tree-ssa-phiopt.c, tree-ssa-phiprop.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sink.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-uncprop.c, tree-ssa-uninit.c,
tree-ssa.c, tree-stdarg.c, tree-switch-conversion.c, tree-tailcall.c,
tree-vect-generic.c, tree-vectorizer.c, tree-vrp.c, tsan.c, ubsan.c,
var-tracking.c, vtable-verify.c, web.c: Adjust.
gcc/testsuite/
* g++.dg/plugin/dumb_plugin.c, g++.dg/plugin/selfasign.c,
gcc.dg/plugin/one_time_plugin.c, gcc.dg/plugin/selfasign.c: Adjust.
From-SVN: r209481
2014-04-17 14:37:16 +02:00
|
|
|
virtual bool gate (function *) { return gate_asan (); }
|
pass cfun to pass::execute
gcc/
* passes.c (opt_pass::execute): Adjust.
(pass_manager::execute_pass_mode_switching): Likewise.
(early_local_passes::execute): Likewise.
(execute_one_pass): Pass cfun to the pass's execute method.
* tree-pass.h (opt_pass::execute): Add function * argument.
* asan.c, auto-inc-dec.c, bb-reorder.c, bt-load.c, cfgcleanup.c,
cfgexpand.c, cfgrtl.c, cgraphbuild.c, combine-stack-adj.c, combine.c,
compare-elim.c, config/arc/arc.c, config/epiphany/mode-switch-use.c,
config/epiphany/resolve-sw-modes.c, config/i386/i386.c,
config/mips/mips.c, config/rl78/rl78.c, config/s390/s390.c,
config/sparc/sparc.c, cprop.c, dce.c, df-core.c, dse.c, dwarf2cfi.c,
except.c, final.c, function.c, fwprop.c, gcse.c, gimple-low.c,
gimple-ssa-isolate-paths.c, gimple-ssa-strength-reduction.c,
graphite.c, ifcvt.c, init-regs.c, ipa-cp.c, ipa-devirt.c,
ipa-inline-analysis.c, ipa-inline.c, ipa-profile.c, ipa-pure-const.c,
ipa-reference.c, ipa-split.c, ipa.c, ira.c, jump.c, loop-init.c,
lower-subreg.c, mode-switching.c, omp-low.c, postreload-gcse.c,
postreload.c, predict.c, recog.c, ree.c, reg-stack.c, regcprop.c,
reginfo.c, regrename.c, reorg.c, sched-rgn.c, stack-ptr-mod.c,
store-motion.c, tracer.c, trans-mem.c, tree-call-cdce.c, tree-cfg.c,
tree-cfgcleanup.c, tree-complex.c, tree-eh.c, tree-emutls.c,
tree-if-conv.c, tree-into-ssa.c, tree-loop-distribution.c, tree-nrv.c,
tree-object-size.c, tree-parloops.c, tree-predcom.c, tree-ssa-ccp.c,
tree-ssa-copy.c, tree-ssa-copyrename.c, tree-ssa-dce.c,
tree-ssa-dom.c, tree-ssa-dse.c, tree-ssa-forwprop.c,
tree-ssa-ifcombine.c, tree-ssa-loop-ch.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivcanon.c, tree-ssa-loop-prefetch.c,
tree-ssa-loop-unswitch.c, tree-ssa-loop.c, tree-ssa-math-opts.c,
tree-ssa-phiopt.c, tree-ssa-phiprop.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sink.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-uncprop.c, tree-ssa-uninit.c,
tree-ssa.c, tree-ssanames.c, tree-stdarg.c, tree-switch-conversion.c,
tree-tailcall.c, tree-vect-generic.c, tree-vectorizer.c, tree-vrp.c,
tree.c, tsan.c, ubsan.c, var-tracking.c, vtable-verify.c, web.c:
Adjust.
From-SVN: r209482
2014-04-17 14:37:34 +02:00
|
|
|
virtual unsigned int execute (function *) { return asan_instrument (); }
|
2013-08-05 22:16:05 +02:00
|
|
|
|
|
|
|
}; // class pass_asan
|
|
|
|
|
|
|
|
} // anon namespace
|
|
|
|
|
|
|
|
gimple_opt_pass *
|
|
|
|
make_pass_asan (gcc::context *ctxt)
|
|
|
|
{
|
|
|
|
return new pass_asan (ctxt);
|
|
|
|
}
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
const pass_data pass_data_asan_O0 =
|
2012-11-12 16:52:09 +01:00
|
|
|
{
|
2013-08-05 22:16:05 +02:00
|
|
|
GIMPLE_PASS, /* type */
|
|
|
|
"asan0", /* name */
|
|
|
|
OPTGROUP_NONE, /* optinfo_flags */
|
|
|
|
TV_NONE, /* tv_id */
|
|
|
|
( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
|
|
|
|
0, /* properties_provided */
|
|
|
|
0, /* properties_destroyed */
|
|
|
|
0, /* todo_flags_start */
|
2014-05-06 15:35:40 +02:00
|
|
|
TODO_update_ssa, /* todo_flags_finish */
|
2012-11-12 16:52:09 +01:00
|
|
|
};
|
|
|
|
|
2013-08-05 22:16:05 +02:00
|
|
|
class pass_asan_O0 : public gimple_opt_pass
|
|
|
|
{
|
|
|
|
public:
|
alloc-pool.c, [...]: Add missing whitespace before "(".
gcc/
* alloc-pool.c, asan.c, auto-inc-dec.c, basic-block.h, bb-reorder.c,
bitmap.c, bitmap.h, bt-load.c, builtins.c, calls.c, cfgcleanup.c,
cfgexpand.c, cfghooks.c, cfgloop.c, cfgloopmanip.c, cfgrtl.c, cgraph.c,
cgraph.h, cgraphbuild.c, cgraphclones.c, cgraphunit.c, collect2.c,
combine-stack-adj.c, combine.c, compare-elim.c, context.c, context.h,
cprop.c, cse.c, cselib.c, dbxout.c, dce.c, defaults.h, df-core.c,
df-problems.c, df-scan.c, df.h, diagnostic.c, double-int.c, dse.c,
dumpfile.c, dwarf2asm.c, dwarf2cfi.c, dwarf2out.c, emit-rtl.c,
errors.c, except.c, expmed.c, expr.c, file-find.c, final.c,
fixed-value.c, fold-const.c, function.c, fwprop.c, gcc-ar.c, gcc.c,
gcov-io.c, gcov-io.h, gcov.c, gcse.c, genattr-common.c, genattr.c,
genattrtab.c, genautomata.c, genconfig.c, genemit.c, genextract.c,
genflags.c, gengenrtl.c, gengtype-state.c, gengtype.c, genmodes.c,
genopinit.c, genoutput.c, genpeep.c, genpreds.c, genrecog.c,
gensupport.c, ggc-common.c, ggc-page.c, gimple-fold.c, gimple-low.c,
gimple-pretty-print.c, gimple-ssa-strength-reduction.c, gimple.c,
gimple.h, godump.c, graphite-clast-to-gimple.c,
graphite-optimize-isl.c, graphite-poly.h, graphite-sese-to-poly.c,
graphite.c, haifa-sched.c, hash-table.c, hash-table.h, hwint.c,
hwint.h, ifcvt.c, incpath.c, init-regs.c, input.h, intl.c, intl.h,
ipa-cp.c, ipa-devirt.c, ipa-inline-analysis.c, ipa-inline.c,
ipa-profile.c, ipa-pure-const.c, ipa-reference.c, ipa-split.c,
ipa-utils.c, ipa.c, ira-build.c, ira.c, jump.c, loop-doloop.c,
loop-init.c, loop-invariant.c, loop-iv.c, lower-subreg.c, lto-cgraph.c,
lto-streamer-in.c, lto-streamer-out.c, lto-wrapper.c, mcf.c,
mode-switching.c, modulo-sched.c, omp-low.c, optabs.c, opts.c,
pass_manager.h, passes.c, plugin.c, postreload-gcse.c, postreload.c,
predict.c, prefix.c, pretty-print.c, print-rtl.c, print-tree.c,
profile.c, read-md.c, real.c, real.h, recog.c, ree.c, reg-stack.c,
regcprop.c, reginfo.c, regmove.c, regrename.c, regs.h, regstat.c,
reload1.c, reorg.c, rtl.c, rtl.h, rtlanal.c, sbitmap.c, sched-rgn.c,
sdbout.c, sel-sched-ir.c, sel-sched.c, sparseset.c, stack-ptr-mod.c,
statistics.c, stmt.c, stor-layout.c, store-motion.c, streamer-hooks.h,
system.h, target-hooks-macros.h, targhooks.c, targhooks.h, toplev.c,
tracer.c, trans-mem.c, tree-browser.c, tree-call-cdce.c, tree-cfg.c,
tree-cfgcleanup.c, tree-complex.c, tree-data-ref.c, tree-data-ref.h,
tree-eh.c, tree-emutls.c, tree-flow.h, tree-if-conv.c, tree-into-ssa.c,
tree-iterator.c, tree-loop-distribution.c, tree-mudflap.c,
tree-nested.c, tree-nomudflap.c, tree-nrv.c, tree-object-size.c,
tree-optimize.c, tree-pass.h, tree-pretty-print.c, tree-profile.c,
tree-scalar-evolution.c, tree-sra.c, tree-ssa-ccp.c,
tree-ssa-coalesce.c, tree-ssa-copy.c, tree-ssa-copyrename.c,
tree-ssa-dce.c, tree-ssa-dom.c, tree-ssa-dse.c, tree-ssa-forwprop.c,
tree-ssa-ifcombine.c, tree-ssa-live.c, tree-ssa-loop-ch.c,
tree-ssa-loop-im.c, tree-ssa-loop-ivopts.c, tree-ssa-loop-prefetch.c,
tree-ssa-loop.c, tree-ssa-math-opts.c, tree-ssa-operands.c,
tree-ssa-phiopt.c, tree-ssa-phiprop.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sink.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-threadedge.c, tree-ssa-threadupdate.c,
tree-ssa-uncprop.c, tree-ssa-uninit.c, tree-ssa.c, tree-ssanames.c,
tree-stdarg.c, tree-switch-conversion.c, tree-tailcall.c,
tree-vect-data-refs.c, tree-vect-generic.c, tree-vect-loop-manip.c,
tree-vect-stmts.c, tree-vectorizer.c, tree-vectorizer.h, tree-vrp.c,
tree.c, tree.h, tsan.c, tsystem.h, value-prof.c, var-tracking.c,
varasm.c, vec.h, vmsdbgout.c, vtable-verify.c, web.c: Add missing
whitespace before "(".
From-SVN: r203004
2013-09-28 10:42:34 +02:00
|
|
|
pass_asan_O0 (gcc::context *ctxt)
|
|
|
|
: gimple_opt_pass (pass_data_asan_O0, ctxt)
|
2013-08-05 22:16:05 +02:00
|
|
|
{}
|
|
|
|
|
|
|
|
/* opt_pass methods: */
|
pass current function to opt_pass::gate ()
gcc/
* passes.c (opt_pass::gate): Take function * argument.
(gate_all_early_local_passes): Merge into
(early_local_passes::gate): this.
(gate_all_early_optimizations): Merge into
(all_early_optimizations::gate): this.
(gate_all_optimizations): Mege into
(all_optimizations::gate): this.
(gate_all_optimizations_g): Merge into
(all_optimizations_g::gate): this.
(gate_rest_of_compilation): Mege into
(rest_of_compilation::gate): this.
(gate_postreload): Merge into
(postreload::gate): this.
(dump_one_pass): Pass cfun to the pass's gate method.
(execute_ipa_summary_passes): Likewise.
(execute_one_pass): Likewise.
(ipa_write_summaries_2): Likewise.
(ipa_write_optimization_summaries_1): Likewise.
(ipa_read_summaries_1): Likewise.
(ipa_read_optimization_summaries_1): Likewise.
(execute_ipa_stmt_fixups): Likewise.
* tree-pass.h (opt_pass::gate): Add function * argument.
* asan.c, auto-inc-dec.c, bb-reorder.c, bt-load.c,
combine-stack-adj.c, combine.c, compare-elim.c,
config/epiphany/resolve-sw-modes.c, config/i386/i386.c,
config/rl78/rl78.c, config/sh/sh_optimize_sett_clrt.cc,
config/sh/sh_treg_combine.cc, config/sparc/sparc.c, cprop.c, cse.c,
dce.c, df-core.c, dse.c, dwarf2cfi.c, except.c, fwprop.c, gcse.c,
gimple-ssa-isolate-paths.c, gimple-ssa-strength-reduction.c,
graphite.c, ifcvt.c, init-regs.c, ipa-cp.c, ipa-devirt.c,
ipa-profile.c, ipa-pure-const.c, ipa-reference.c, ipa-split.c, ipa.c,
loop-init.c, lower-subreg.c, mode-switching.c, modulo-sched.c,
omp-low.c, postreload-gcse.c, postreload.c, predict.c, recog.c, ree.c,
reg-stack.c, regcprop.c, regrename.c, reorg.c, sched-rgn.c,
store-motion.c, tracer.c, trans-mem.c, tree-call-cdce.c, tree-cfg.c,
tree-cfgcleanup.c, tree-complex.c, tree-eh.c, tree-emutls.c,
tree-if-conv.c, tree-into-ssa.c, tree-loop-distribution.c,
tree-nrv.c, tree-parloops.c, tree-predcom.c, tree-profile.c,
tree-sra.c, tree-ssa-ccp.c, tree-ssa-copy.c, tree-ssa-copyrename.c,
tree-ssa-dce.c, tree-ssa-dom.c, tree-ssa-dse.c, tree-ssa-forwprop.c,
tree-ssa-ifcombine.c, tree-ssa-loop-ch.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivcanon.c, tree-ssa-loop-prefetch.c,
tree-ssa-loop-unswitch.c, tree-ssa-loop.c, tree-ssa-math-opts.c,
tree-ssa-phiopt.c, tree-ssa-phiprop.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sink.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-uncprop.c, tree-ssa-uninit.c,
tree-ssa.c, tree-stdarg.c, tree-switch-conversion.c, tree-tailcall.c,
tree-vect-generic.c, tree-vectorizer.c, tree-vrp.c, tsan.c, ubsan.c,
var-tracking.c, vtable-verify.c, web.c: Adjust.
gcc/testsuite/
* g++.dg/plugin/dumb_plugin.c, g++.dg/plugin/selfasign.c,
gcc.dg/plugin/one_time_plugin.c, gcc.dg/plugin/selfasign.c: Adjust.
From-SVN: r209481
2014-04-17 14:37:16 +02:00
|
|
|
virtual bool gate (function *) { return !optimize && gate_asan (); }
|
pass cfun to pass::execute
gcc/
* passes.c (opt_pass::execute): Adjust.
(pass_manager::execute_pass_mode_switching): Likewise.
(early_local_passes::execute): Likewise.
(execute_one_pass): Pass cfun to the pass's execute method.
* tree-pass.h (opt_pass::execute): Add function * argument.
* asan.c, auto-inc-dec.c, bb-reorder.c, bt-load.c, cfgcleanup.c,
cfgexpand.c, cfgrtl.c, cgraphbuild.c, combine-stack-adj.c, combine.c,
compare-elim.c, config/arc/arc.c, config/epiphany/mode-switch-use.c,
config/epiphany/resolve-sw-modes.c, config/i386/i386.c,
config/mips/mips.c, config/rl78/rl78.c, config/s390/s390.c,
config/sparc/sparc.c, cprop.c, dce.c, df-core.c, dse.c, dwarf2cfi.c,
except.c, final.c, function.c, fwprop.c, gcse.c, gimple-low.c,
gimple-ssa-isolate-paths.c, gimple-ssa-strength-reduction.c,
graphite.c, ifcvt.c, init-regs.c, ipa-cp.c, ipa-devirt.c,
ipa-inline-analysis.c, ipa-inline.c, ipa-profile.c, ipa-pure-const.c,
ipa-reference.c, ipa-split.c, ipa.c, ira.c, jump.c, loop-init.c,
lower-subreg.c, mode-switching.c, omp-low.c, postreload-gcse.c,
postreload.c, predict.c, recog.c, ree.c, reg-stack.c, regcprop.c,
reginfo.c, regrename.c, reorg.c, sched-rgn.c, stack-ptr-mod.c,
store-motion.c, tracer.c, trans-mem.c, tree-call-cdce.c, tree-cfg.c,
tree-cfgcleanup.c, tree-complex.c, tree-eh.c, tree-emutls.c,
tree-if-conv.c, tree-into-ssa.c, tree-loop-distribution.c, tree-nrv.c,
tree-object-size.c, tree-parloops.c, tree-predcom.c, tree-ssa-ccp.c,
tree-ssa-copy.c, tree-ssa-copyrename.c, tree-ssa-dce.c,
tree-ssa-dom.c, tree-ssa-dse.c, tree-ssa-forwprop.c,
tree-ssa-ifcombine.c, tree-ssa-loop-ch.c, tree-ssa-loop-im.c,
tree-ssa-loop-ivcanon.c, tree-ssa-loop-prefetch.c,
tree-ssa-loop-unswitch.c, tree-ssa-loop.c, tree-ssa-math-opts.c,
tree-ssa-phiopt.c, tree-ssa-phiprop.c, tree-ssa-pre.c,
tree-ssa-reassoc.c, tree-ssa-sink.c, tree-ssa-strlen.c,
tree-ssa-structalias.c, tree-ssa-uncprop.c, tree-ssa-uninit.c,
tree-ssa.c, tree-ssanames.c, tree-stdarg.c, tree-switch-conversion.c,
tree-tailcall.c, tree-vect-generic.c, tree-vectorizer.c, tree-vrp.c,
tree.c, tsan.c, ubsan.c, var-tracking.c, vtable-verify.c, web.c:
Adjust.
From-SVN: r209482
2014-04-17 14:37:34 +02:00
|
|
|
virtual unsigned int execute (function *) { return asan_instrument (); }
|
2013-08-05 22:16:05 +02:00
|
|
|
|
|
|
|
}; // class pass_asan_O0
|
|
|
|
|
|
|
|
} // anon namespace
|
|
|
|
|
|
|
|
gimple_opt_pass *
|
|
|
|
make_pass_asan_O0 (gcc::context *ctxt)
|
|
|
|
{
|
|
|
|
return new pass_asan_O0 (ctxt);
|
|
|
|
}
|
|
|
|
|
Emit GIMPLE directly instead of gimplifying GENERIC.
This patch cleanups the instrumentation code generation by emitting
GIMPLE directly, as opposed to emitting GENERIC tree and then
gimplifying them. It also does some cleanups here and there
* Makefile.in (GTFILES): Add $(srcdir)/asan.c.
(asan.o): Update the dependencies of asan.o.
* asan.c (tm.h, tree.h, tm_p.h, basic-block.h, flags.h
function.h, tree-inline.h, tree-dump.h, diagnostic.h, demangle.h,
langhooks.h, ggc.h, cgraph.h, gimple.h): Remove these unused but
included headers.
(shadow_ptr_types): New variable.
(report_error_func): Change is_store argument to bool, don't append
newline to function name.
(PROB_VERY_UNLIKELY, PROB_ALWAYS): Define.
(build_check_stmt): Change is_store argument to bool. Emit GIMPLE
directly instead of creating trees and gimplifying them. Mark
the error reporting function as very unlikely.
(instrument_derefs): Change is_store argument to bool. Use
int_size_in_bytes to compute size_in_bytes, simplify size check.
Use build_fold_addr_expr instead of build_addr.
(transform_statements): Adjust instrument_derefs caller.
Use gimple_assign_single_p as stmt test. Don't look at MEM refs
in rhs2.
(asan_init_shadow_ptr_types): New function.
(asan_instrument): Don't push/pop gimplify context.
Call asan_init_shadow_ptr_types if not yet initialized.
* asan.h (ASAN_SHADOW_SHIFT): Adjust comment.
Co-Authored-By: Dodji Seketeli <dodji@redhat.com>
Co-Authored-By: Xinliang David Li <davidxl@google.com>
From-SVN: r193434
2012-11-12 16:51:53 +01:00
|
|
|
#include "gt-asan.h"
|