analyzer: fix bitfield endianness issues [PR99212,PR101082]
Looks like my patch for PR analyzer/99212 implicitly assumed little-endian, which the following patch fixes. Fixes bitfields-1.c on: - armeb-none-linux-gnueabihf - cris-elf - powerpc64-darwin - s390-linux-gnu gcc/analyzer/ChangeLog: PR analyzer/99212 PR analyzer/101082 * engine.cc: Include "target.h". (impl_run_checkers): Log BITS_BIG_ENDIAN, BYTES_BIG_ENDIAN, and WORDS_BIG_ENDIAN. * region-model-manager.cc (region_model_manager::maybe_fold_binop): Move support for masking via ARG0 & CST into... (region_model_manager::maybe_undo_optimize_bit_field_compare): ...this new function. Flatten by converting from nested conditionals to a series of early return statements to reject failures. Reject if type is not unsigned_char_type_node. Handle BYTES_BIG_ENDIAN when determining which bits are bound in the binding_map. * region-model.h (region_model_manager::maybe_undo_optimize_bit_field_compare): New decl. * store.cc (bit_range::dump): New function. * store.h (bit_range::dump): New decl. Signed-off-by: David Malcolm <dmalcolm@redhat.com>
This commit is contained in:
parent
f018044145
commit
21b470a9c9
@ -64,6 +64,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
#include "analyzer/bar-chart.h"
|
||||
#include <zlib.h>
|
||||
#include "plugin.h"
|
||||
#include "target.h"
|
||||
|
||||
/* For an overview, see gcc/doc/analyzer.texi. */
|
||||
|
||||
@ -4821,6 +4822,13 @@ impl_run_checkers (logger *logger)
|
||||
{
|
||||
LOG_SCOPE (logger);
|
||||
|
||||
if (logger)
|
||||
{
|
||||
logger->log ("BITS_BIG_ENDIAN: %i", BITS_BIG_ENDIAN ? 1 : 0);
|
||||
logger->log ("BYTES_BIG_ENDIAN: %i", BYTES_BIG_ENDIAN ? 1 : 0);
|
||||
logger->log ("WORDS_BIG_ENDIAN: %i", WORDS_BIG_ENDIAN ? 1 : 0);
|
||||
}
|
||||
|
||||
/* If using LTO, ensure that the cgraph nodes have function bodies. */
|
||||
cgraph_node *node;
|
||||
FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
|
||||
|
@ -431,6 +431,60 @@ region_model_manager::get_or_create_cast (tree type, const svalue *arg)
|
||||
return get_or_create_unaryop (type, op, arg);
|
||||
}
|
||||
|
||||
/* Subroutine of region_model_manager::maybe_fold_binop for handling
|
||||
(TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
|
||||
optimize_bit_field_compare, where CST is from ARG1.
|
||||
|
||||
Support masking out bits from a compound_svalue for comparing a bitfield
|
||||
against a value, as generated by optimize_bit_field_compare for
|
||||
BITFIELD == VALUE.
|
||||
|
||||
If COMPOUND_SVAL has a value for the appropriate bits, return it,
|
||||
shifted accordingly.
|
||||
Otherwise return NULL. */
|
||||
|
||||
const svalue *
|
||||
region_model_manager::
|
||||
maybe_undo_optimize_bit_field_compare (tree type,
|
||||
const compound_svalue *compound_sval,
|
||||
tree cst,
|
||||
const svalue *arg1)
|
||||
{
|
||||
if (type != unsigned_char_type_node)
|
||||
return NULL;
|
||||
|
||||
const binding_map &map = compound_sval->get_map ();
|
||||
unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
|
||||
/* If "mask" is a contiguous range of set bits, see if the
|
||||
compound_sval has a value for those bits. */
|
||||
bit_range bits (0, 0);
|
||||
if (!bit_range::from_mask (mask, &bits))
|
||||
return NULL;
|
||||
|
||||
bit_range bound_bits (bits);
|
||||
if (BYTES_BIG_ENDIAN)
|
||||
bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
|
||||
bits.m_size_in_bits);
|
||||
const concrete_binding *conc
|
||||
= get_store_manager ()->get_concrete_binding (bound_bits, BK_direct);
|
||||
const svalue *sval = map.get (conc);
|
||||
if (!sval)
|
||||
return NULL;
|
||||
|
||||
/* We have a value;
|
||||
shift it by the correct number of bits. */
|
||||
const svalue *lhs = get_or_create_cast (type, sval);
|
||||
HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
|
||||
tree shift_amt = build_int_cst (type, bit_offset);
|
||||
const svalue *shift_sval = get_or_create_constant_svalue (shift_amt);
|
||||
const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
|
||||
lhs, shift_sval);
|
||||
/* Reapply the mask (needed for negative
|
||||
signed bitfields). */
|
||||
return get_or_create_binop (type, BIT_AND_EXPR,
|
||||
shifted_sval, arg1);
|
||||
}
|
||||
|
||||
/* Subroutine of region_model_manager::get_or_create_binop.
|
||||
Attempt to fold the inputs and return a simpler svalue *.
|
||||
Otherwise, return NULL. */
|
||||
@ -485,43 +539,13 @@ region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
|
||||
/* "(ARG0 & 0)" -> "0". */
|
||||
return get_or_create_constant_svalue (build_int_cst (type, 0));
|
||||
|
||||
/* Support masking out bits from a compound_svalue, as this
|
||||
is generated when accessing bitfields. */
|
||||
if (const compound_svalue *compound_sval
|
||||
= arg0->dyn_cast_compound_svalue ())
|
||||
{
|
||||
const binding_map &map = compound_sval->get_map ();
|
||||
unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst1);
|
||||
/* If "mask" is a contiguous range of set bits, see if the
|
||||
compound_sval has a value for those bits. */
|
||||
bit_range bits (0, 0);
|
||||
if (bit_range::from_mask (mask, &bits))
|
||||
{
|
||||
const concrete_binding *conc
|
||||
= get_store_manager ()->get_concrete_binding (bits,
|
||||
BK_direct);
|
||||
if (const svalue *sval = map.get (conc))
|
||||
{
|
||||
/* We have a value;
|
||||
shift it by the correct number of bits. */
|
||||
const svalue *lhs = get_or_create_cast (type, sval);
|
||||
HOST_WIDE_INT bit_offset
|
||||
= bits.get_start_bit_offset ().to_shwi ();
|
||||
tree shift_amt = build_int_cst (type, bit_offset);
|
||||
const svalue *shift_sval
|
||||
= get_or_create_constant_svalue (shift_amt);
|
||||
const svalue *shifted_sval
|
||||
= get_or_create_binop (type,
|
||||
LSHIFT_EXPR,
|
||||
lhs, shift_sval);
|
||||
/* Reapply the mask (needed for negative
|
||||
signed bitfields). */
|
||||
return get_or_create_binop (type,
|
||||
BIT_AND_EXPR,
|
||||
shifted_sval, arg1);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (const svalue *sval
|
||||
= maybe_undo_optimize_bit_field_compare (type,
|
||||
compound_sval,
|
||||
cst1, arg1))
|
||||
return sval;
|
||||
}
|
||||
break;
|
||||
case TRUTH_ANDIF_EXPR:
|
||||
|
@ -273,6 +273,9 @@ private:
|
||||
const svalue *maybe_fold_sub_svalue (tree type,
|
||||
const svalue *parent_svalue,
|
||||
const region *subregion);
|
||||
const svalue *maybe_undo_optimize_bit_field_compare (tree type,
|
||||
const compound_svalue *compound_sval,
|
||||
tree cst, const svalue *arg1);
|
||||
|
||||
unsigned m_next_region_id;
|
||||
root_region m_root_region;
|
||||
|
@ -249,6 +249,18 @@ bit_range::dump_to_pp (pretty_printer *pp) const
|
||||
pp_wide_int (pp, get_next_bit_offset (), SIGNED);
|
||||
}
|
||||
|
||||
/* Dump this object to stderr. */
|
||||
|
||||
DEBUG_FUNCTION void
|
||||
bit_range::dump () const
|
||||
{
|
||||
pretty_printer pp;
|
||||
pp.buffer->stream = stderr;
|
||||
dump_to_pp (&pp);
|
||||
pp_newline (&pp);
|
||||
pp_flush (&pp);
|
||||
}
|
||||
|
||||
int
|
||||
bit_range::cmp (const bit_range &br1, const bit_range &br2)
|
||||
{
|
||||
|
@ -275,6 +275,7 @@ struct bit_range
|
||||
{}
|
||||
|
||||
void dump_to_pp (pretty_printer *pp) const;
|
||||
void dump () const;
|
||||
|
||||
bit_offset_t get_start_bit_offset () const
|
||||
{
|
||||
|
Loading…
Reference in New Issue
Block a user