Support << and >> for offset_int and widest_int
Following on from the comparison patch, I think it makes sense to support << and >> for offset_int (int128_t) and widest_int (intNNN_t), with >> being arithmetic shift. It doesn't make sense to use logical right shift on a potentially negative offset_int, since the precision of 128 bits has no meaning on the target. Tested on x86_64-linux-gnu and aarch64-linux-gnu. gcc/ * wide-int.h: Update offset_int and widest_int documentation. (WI_SIGNED_SHIFT_RESULT): New macro. (wi::binary_shift): Define signed_shift_result_type for shifts on offset_int- and widest_int-like types. (generic_wide_int): Support <<= and >>= if << and >> are supported. * tree.h (int_bit_position): Use shift operators instead of wi:: shifts. * alias.c (adjust_offset_for_component_ref): Likewise. * expr.c (get_inner_reference): Likewise. * fold-const.c (fold_comparison): Likewise. * gimple-fold.c (fold_nonarray_ctor_reference): Likewise. * gimple-ssa-strength-reduction.c (restructure_reference): Likewise. * tree-dfa.c (get_ref_base_and_extent): Likewise. * tree-ssa-alias.c (indirect_ref_may_alias_decl_p): Likewise. (stmt_kills_ref_p): Likewise. * tree-ssa-ccp.c (bit_value_binop_1): Likewise. * tree-ssa-math-opts.c (find_bswap_or_nop_load): Likewise. * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Likewise. (ao_ref_init_from_vn_reference): Likewise. gcc/cp/ * init.c (build_new_1): Use shift operators instead of wi:: shifts. From-SVN: r235720
This commit is contained in:
parent
032c80e999
commit
8de73453a4
@ -1,3 +1,25 @@
|
||||
2016-05-02 Richard Sandiford <richard.sandiford@arm.com>
|
||||
|
||||
* wide-int.h: Update offset_int and widest_int documentation.
|
||||
(WI_SIGNED_SHIFT_RESULT): New macro.
|
||||
(wi::binary_shift): Define signed_shift_result_type for
|
||||
shifts on offset_int- and widest_int-like types.
|
||||
(generic_wide_int): Support <<= and >>= if << and >> are supported.
|
||||
* tree.h (int_bit_position): Use shift operators instead of wi::
|
||||
shifts.
|
||||
* alias.c (adjust_offset_for_component_ref): Likewise.
|
||||
* expr.c (get_inner_reference): Likewise.
|
||||
* fold-const.c (fold_comparison): Likewise.
|
||||
* gimple-fold.c (fold_nonarray_ctor_reference): Likewise.
|
||||
* gimple-ssa-strength-reduction.c (restructure_reference): Likewise.
|
||||
* tree-dfa.c (get_ref_base_and_extent): Likewise.
|
||||
* tree-ssa-alias.c (indirect_ref_may_alias_decl_p): Likewise.
|
||||
(stmt_kills_ref_p): Likewise.
|
||||
* tree-ssa-ccp.c (bit_value_binop_1): Likewise.
|
||||
* tree-ssa-math-opts.c (find_bswap_or_nop_load): Likewise.
|
||||
* tree-ssa-sccvn.c (copy_reference_ops_from_ref): Likewise.
|
||||
(ao_ref_init_from_vn_reference): Likewise.
|
||||
|
||||
2016-05-02 Richard Sandiford <richard.sandiford@arm.com>
|
||||
|
||||
* wide-int.h: Update offset_int and widest_int documentation.
|
||||
|
@ -2651,8 +2651,8 @@ adjust_offset_for_component_ref (tree x, bool *known_p,
|
||||
|
||||
offset_int woffset
|
||||
= (wi::to_offset (xoffset)
|
||||
+ wi::lrshift (wi::to_offset (DECL_FIELD_BIT_OFFSET (field)),
|
||||
LOG2_BITS_PER_UNIT));
|
||||
+ (wi::to_offset (DECL_FIELD_BIT_OFFSET (field))
|
||||
>> LOG2_BITS_PER_UNIT));
|
||||
if (!wi::fits_uhwi_p (woffset))
|
||||
{
|
||||
*known_p = false;
|
||||
|
@ -1,3 +1,7 @@
|
||||
2016-05-02 Richard Sandiford <richard.sandiford@arm.com>
|
||||
|
||||
* init.c (build_new_1): Use shift operators instead of wi:: shifts.
|
||||
|
||||
2016-05-02 Richard Biener <rguenther@suse.de>
|
||||
|
||||
* decl.c (grokdeclarator): Properly insert a DECL_EXPR for
|
||||
|
@ -2812,8 +2812,7 @@ build_new_1 (vec<tree, va_gc> **placement, tree type, tree nelts,
|
||||
|
||||
unsigned shift = (max_outer_nelts.get_precision ()) - 7
|
||||
- wi::clz (max_outer_nelts);
|
||||
max_outer_nelts = wi::lshift (wi::lrshift (max_outer_nelts, shift),
|
||||
shift);
|
||||
max_outer_nelts = (max_outer_nelts >> shift) << shift;
|
||||
|
||||
outer_nelts_check = fold_build2 (LE_EXPR, boolean_type_node,
|
||||
outer_nelts,
|
||||
|
@ -6989,7 +6989,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
||||
if (!integer_zerop (off))
|
||||
{
|
||||
offset_int boff, coff = mem_ref_offset (exp);
|
||||
boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
|
||||
boff = coff << LOG2_BITS_PER_UNIT;
|
||||
bit_offset += boff;
|
||||
}
|
||||
exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
|
||||
@ -7015,7 +7015,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
||||
{
|
||||
offset_int tem = wi::sext (wi::to_offset (offset),
|
||||
TYPE_PRECISION (sizetype));
|
||||
tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
|
||||
tem <<= LOG2_BITS_PER_UNIT;
|
||||
tem += bit_offset;
|
||||
if (wi::fits_shwi_p (tem))
|
||||
{
|
||||
@ -7035,7 +7035,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
||||
/* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
|
||||
Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
|
||||
bit_offset -= tem;
|
||||
tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
|
||||
tem >>= LOG2_BITS_PER_UNIT;
|
||||
offset = size_binop (PLUS_EXPR, offset,
|
||||
wide_int_to_tree (sizetype, tem));
|
||||
}
|
||||
|
@ -8521,7 +8521,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
|
||||
{
|
||||
offset_int tem = wi::sext (wi::to_offset (offset0),
|
||||
TYPE_PRECISION (sizetype));
|
||||
tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
|
||||
tem <<= LOG2_BITS_PER_UNIT;
|
||||
tem += bitpos0;
|
||||
if (wi::fits_shwi_p (tem))
|
||||
{
|
||||
@ -8568,7 +8568,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
|
||||
{
|
||||
offset_int tem = wi::sext (wi::to_offset (offset1),
|
||||
TYPE_PRECISION (sizetype));
|
||||
tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
|
||||
tem <<= LOG2_BITS_PER_UNIT;
|
||||
tem += bitpos1;
|
||||
if (wi::fits_shwi_p (tem))
|
||||
{
|
||||
|
@ -5435,8 +5435,7 @@ fold_nonarray_ctor_reference (tree type, tree ctor,
|
||||
|
||||
/* Compute bit offset of the field. */
|
||||
bitoffset = (wi::to_offset (field_offset)
|
||||
+ wi::lshift (wi::to_offset (byte_offset),
|
||||
LOG2_BITS_PER_UNIT));
|
||||
+ (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
|
||||
/* Compute bit offset where the field ends. */
|
||||
if (field_size != NULL_TREE)
|
||||
bitoffset_end = bitoffset + wi::to_offset (field_size);
|
||||
|
@ -951,7 +951,7 @@ restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
|
||||
c2 = 0;
|
||||
}
|
||||
|
||||
c4 = wi::lrshift (index, LOG2_BITS_PER_UNIT);
|
||||
c4 = index >> LOG2_BITS_PER_UNIT;
|
||||
c5 = backtrace_base_for_ref (&t2);
|
||||
|
||||
*pbase = t1;
|
||||
|
@ -424,8 +424,8 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
|
||||
|
||||
if (this_offset && TREE_CODE (this_offset) == INTEGER_CST)
|
||||
{
|
||||
offset_int woffset = wi::lshift (wi::to_offset (this_offset),
|
||||
LOG2_BITS_PER_UNIT);
|
||||
offset_int woffset = (wi::to_offset (this_offset)
|
||||
<< LOG2_BITS_PER_UNIT);
|
||||
woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
|
||||
bit_offset += woffset;
|
||||
|
||||
@ -453,7 +453,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
|
||||
{
|
||||
offset_int tem = (wi::to_offset (ssize)
|
||||
- wi::to_offset (fsize));
|
||||
tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
|
||||
tem <<= LOG2_BITS_PER_UNIT;
|
||||
tem -= woffset;
|
||||
maxsize += tem;
|
||||
}
|
||||
@ -493,7 +493,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
|
||||
= wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
|
||||
TYPE_PRECISION (TREE_TYPE (index)));
|
||||
woffset *= wi::to_offset (unit_size);
|
||||
woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
|
||||
woffset <<= LOG2_BITS_PER_UNIT;
|
||||
bit_offset += woffset;
|
||||
|
||||
/* An array ref with a constant index up in the structure
|
||||
@ -570,7 +570,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
|
||||
else
|
||||
{
|
||||
offset_int off = mem_ref_offset (exp);
|
||||
off = wi::lshift (off, LOG2_BITS_PER_UNIT);
|
||||
off <<= LOG2_BITS_PER_UNIT;
|
||||
off += bit_offset;
|
||||
if (wi::fits_shwi_p (off))
|
||||
{
|
||||
|
@ -1101,7 +1101,7 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
|
||||
/* The offset embedded in MEM_REFs can be negative. Bias them
|
||||
so that the resulting offset adjustment is positive. */
|
||||
offset_int moff = mem_ref_offset (base1);
|
||||
moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
|
||||
moff <<= LOG2_BITS_PER_UNIT;
|
||||
if (wi::neg_p (moff))
|
||||
offset2p += (-moff).to_short_addr ();
|
||||
else
|
||||
@ -1173,7 +1173,7 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
|
||||
|| TREE_CODE (dbase2) == TARGET_MEM_REF)
|
||||
{
|
||||
offset_int moff = mem_ref_offset (dbase2);
|
||||
moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
|
||||
moff <<= LOG2_BITS_PER_UNIT;
|
||||
if (wi::neg_p (moff))
|
||||
doffset1 -= (-moff).to_short_addr ();
|
||||
else
|
||||
@ -1271,13 +1271,13 @@ indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
|
||||
/* The offset embedded in MEM_REFs can be negative. Bias them
|
||||
so that the resulting offset adjustment is positive. */
|
||||
moff = mem_ref_offset (base1);
|
||||
moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
|
||||
moff <<= LOG2_BITS_PER_UNIT;
|
||||
if (wi::neg_p (moff))
|
||||
offset2 += (-moff).to_short_addr ();
|
||||
else
|
||||
offset1 += moff.to_shwi ();
|
||||
moff = mem_ref_offset (base2);
|
||||
moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
|
||||
moff <<= LOG2_BITS_PER_UNIT;
|
||||
if (wi::neg_p (moff))
|
||||
offset1 += (-moff).to_short_addr ();
|
||||
else
|
||||
@ -2358,10 +2358,10 @@ stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
|
||||
TREE_OPERAND (ref->base, 1)))
|
||||
{
|
||||
offset_int off1 = mem_ref_offset (base);
|
||||
off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
|
||||
off1 <<= LOG2_BITS_PER_UNIT;
|
||||
off1 += offset;
|
||||
offset_int off2 = mem_ref_offset (ref->base);
|
||||
off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
|
||||
off2 <<= LOG2_BITS_PER_UNIT;
|
||||
off2 += ref_offset;
|
||||
if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
|
||||
{
|
||||
@ -2432,18 +2432,15 @@ stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
|
||||
if (TREE_CODE (rbase) != MEM_REF)
|
||||
return false;
|
||||
// Compare pointers.
|
||||
offset += wi::lshift (mem_ref_offset (base),
|
||||
LOG2_BITS_PER_UNIT);
|
||||
roffset += wi::lshift (mem_ref_offset (rbase),
|
||||
LOG2_BITS_PER_UNIT);
|
||||
offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
|
||||
roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
|
||||
base = TREE_OPERAND (base, 0);
|
||||
rbase = TREE_OPERAND (rbase, 0);
|
||||
}
|
||||
if (base == rbase
|
||||
&& offset <= roffset
|
||||
&& (roffset + ref->max_size
|
||||
<= offset + wi::lshift (wi::to_offset (len),
|
||||
LOG2_BITS_PER_UNIT)))
|
||||
<= offset + (wi::to_offset (len) << LOG2_BITS_PER_UNIT)))
|
||||
return true;
|
||||
break;
|
||||
}
|
||||
|
@ -1372,8 +1372,8 @@ bit_value_binop_1 (enum tree_code code, tree type,
|
||||
}
|
||||
else
|
||||
{
|
||||
*mask = wi::ext (wi::lshift (r1mask, shift), width, sgn);
|
||||
*val = wi::ext (wi::lshift (r1val, shift), width, sgn);
|
||||
*mask = wi::ext (r1mask << shift, width, sgn);
|
||||
*val = wi::ext (r1val << shift, width, sgn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2104,7 +2104,7 @@ find_bswap_or_nop_load (gimple *stmt, tree ref, struct symbolic_number *n)
|
||||
if (!integer_zerop (off))
|
||||
{
|
||||
offset_int boff, coff = mem_ref_offset (base_addr);
|
||||
boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
|
||||
boff = coff << LOG2_BITS_PER_UNIT;
|
||||
bit_offset += boff;
|
||||
}
|
||||
|
||||
@ -2118,7 +2118,7 @@ find_bswap_or_nop_load (gimple *stmt, tree ref, struct symbolic_number *n)
|
||||
/* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
|
||||
Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
|
||||
bit_offset -= tem;
|
||||
tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
|
||||
tem >>= LOG2_BITS_PER_UNIT;
|
||||
if (offset)
|
||||
offset = size_binop (PLUS_EXPR, offset,
|
||||
wide_int_to_tree (sizetype, tem));
|
||||
|
@ -788,8 +788,7 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
|
||||
{
|
||||
offset_int off
|
||||
= (wi::to_offset (this_offset)
|
||||
+ wi::lrshift (wi::to_offset (bit_offset),
|
||||
LOG2_BITS_PER_UNIT));
|
||||
+ (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
|
||||
if (wi::fits_shwi_p (off)
|
||||
/* Probibit value-numbering zero offset components
|
||||
of addresses the same before the pass folding
|
||||
@ -999,8 +998,8 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
|
||||
max_size = -1;
|
||||
else
|
||||
{
|
||||
offset_int woffset = wi::lshift (wi::to_offset (this_offset),
|
||||
LOG2_BITS_PER_UNIT);
|
||||
offset_int woffset = (wi::to_offset (this_offset)
|
||||
<< LOG2_BITS_PER_UNIT);
|
||||
woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
|
||||
offset += woffset;
|
||||
}
|
||||
@ -1020,7 +1019,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
|
||||
= wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
|
||||
TYPE_PRECISION (TREE_TYPE (op->op0)));
|
||||
woffset *= wi::to_offset (op->op2);
|
||||
woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
|
||||
woffset <<= LOG2_BITS_PER_UNIT;
|
||||
offset += woffset;
|
||||
}
|
||||
break;
|
||||
|
@ -5375,7 +5375,7 @@ extern GTY(()) struct int_n_trees_t int_n_trees[NUM_INT_N_ENTS];
|
||||
inline HOST_WIDE_INT
|
||||
int_bit_position (const_tree field)
|
||||
{
|
||||
return (wi::lshift (wi::to_offset (DECL_FIELD_OFFSET (field)), BITS_PER_UNIT_LOG)
|
||||
return ((wi::to_offset (DECL_FIELD_OFFSET (field)) << BITS_PER_UNIT_LOG)
|
||||
+ wi::to_offset (DECL_FIELD_BIT_OFFSET (field))).to_shwi ();
|
||||
}
|
||||
|
||||
|
@ -68,6 +68,8 @@ along with GCC; see the file COPYING3. If not see
|
||||
Since the values are logically signed, there is no need to
|
||||
distinguish between signed and unsigned operations. Sign-sensitive
|
||||
comparison operators <, <=, > and >= are therefore supported.
|
||||
Shift operators << and >> are also supported, with >> being
|
||||
an _arithmetic_ right shift.
|
||||
|
||||
[ Note that, even though offset_int is effectively int128_t,
|
||||
it can still be useful to use unsigned comparisons like
|
||||
@ -82,7 +84,8 @@ along with GCC; see the file COPYING3. If not see
|
||||
|
||||
Like offset_int, widest_int is wider than all the values that
|
||||
it needs to represent, so the integers are logically signed.
|
||||
Sign-sensitive comparison operators <, <=, > and >= are supported.
|
||||
Sign-sensitive comparison operators <, <=, > and >= are supported,
|
||||
as are << and >>.
|
||||
|
||||
There are several places in the GCC where this should/must be used:
|
||||
|
||||
@ -259,6 +262,11 @@ along with GCC; see the file COPYING3. If not see
|
||||
#define WI_BINARY_RESULT(T1, T2) \
|
||||
typename wi::binary_traits <T1, T2>::result_type
|
||||
|
||||
/* The type of result produced by T1 << T2. Leads to substitution failure
|
||||
if the operation isn't supported. Defined purely for brevity. */
|
||||
#define WI_SIGNED_SHIFT_RESULT(T1, T2) \
|
||||
typename wi::binary_traits <T1, T2>::signed_shift_result_type
|
||||
|
||||
/* The type of result produced by a signed binary predicate on types T1 and T2.
|
||||
This is bool if signed comparisons make sense for T1 and T2 and leads to
|
||||
substitution failure otherwise. */
|
||||
@ -405,6 +413,7 @@ namespace wi
|
||||
so as not to confuse gengtype. */
|
||||
typedef generic_wide_int < fixed_wide_int_storage
|
||||
<int_traits <T1>::precision> > result_type;
|
||||
typedef result_type signed_shift_result_type;
|
||||
typedef bool signed_predicate_result;
|
||||
};
|
||||
|
||||
@ -416,6 +425,7 @@ namespace wi
|
||||
STATIC_ASSERT (int_traits <T1>::precision == int_traits <T2>::precision);
|
||||
typedef generic_wide_int < fixed_wide_int_storage
|
||||
<int_traits <T1>::precision> > result_type;
|
||||
typedef result_type signed_shift_result_type;
|
||||
typedef bool signed_predicate_result;
|
||||
};
|
||||
|
||||
@ -681,6 +691,11 @@ public:
|
||||
template <typename T> \
|
||||
generic_wide_int &OP (const T &c) { return (*this = wi::F (*this, c)); }
|
||||
|
||||
/* Restrict these to cases where the shift operator is defined. */
|
||||
#define SHIFT_ASSIGNMENT_OPERATOR(OP, OP2) \
|
||||
template <typename T> \
|
||||
generic_wide_int &OP (const T &c) { return (*this = *this OP2 c); }
|
||||
|
||||
#define INCDEC_OPERATOR(OP, DELTA) \
|
||||
generic_wide_int &OP () { *this += DELTA; return *this; }
|
||||
|
||||
@ -702,12 +717,15 @@ public:
|
||||
ASSIGNMENT_OPERATOR (operator +=, add)
|
||||
ASSIGNMENT_OPERATOR (operator -=, sub)
|
||||
ASSIGNMENT_OPERATOR (operator *=, mul)
|
||||
SHIFT_ASSIGNMENT_OPERATOR (operator <<=, <<)
|
||||
SHIFT_ASSIGNMENT_OPERATOR (operator >>=, >>)
|
||||
INCDEC_OPERATOR (operator ++, 1)
|
||||
INCDEC_OPERATOR (operator --, -1)
|
||||
|
||||
#undef BINARY_PREDICATE
|
||||
#undef UNARY_OPERATOR
|
||||
#undef BINARY_OPERATOR
|
||||
#undef SHIFT_ASSIGNMENT_OPERATOR
|
||||
#undef ASSIGNMENT_OPERATOR
|
||||
#undef INCDEC_OPERATOR
|
||||
|
||||
@ -857,7 +875,7 @@ generic_wide_int <storage>::elt (unsigned int i) const
|
||||
|
||||
template <typename storage>
|
||||
template <typename T>
|
||||
generic_wide_int <storage> &
|
||||
inline generic_wide_int <storage> &
|
||||
generic_wide_int <storage>::operator = (const T &x)
|
||||
{
|
||||
storage::operator = (x);
|
||||
@ -3078,6 +3096,20 @@ SIGNED_BINARY_PREDICATE (operator >=, ges_p)
|
||||
|
||||
#undef SIGNED_BINARY_PREDICATE
|
||||
|
||||
template <typename T1, typename T2>
|
||||
inline WI_SIGNED_SHIFT_RESULT (T1, T2)
|
||||
operator << (const T1 &x, const T2 &y)
|
||||
{
|
||||
return wi::lshift (x, y);
|
||||
}
|
||||
|
||||
template <typename T1, typename T2>
|
||||
inline WI_SIGNED_SHIFT_RESULT (T1, T2)
|
||||
operator >> (const T1 &x, const T2 &y)
|
||||
{
|
||||
return wi::arshift (x, y);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void
|
||||
gt_ggc_mx (generic_wide_int <T> *)
|
||||
|
Loading…
Reference in New Issue
Block a user