decl.c, [...]: Replace tree_low_cst (..., 1) with tree_to_uhwi throughout.

gcc/ada/
	* gcc-interface/decl.c, gcc-interface/utils.c, gcc-interface/utils2.c:
	Replace tree_low_cst (..., 1) with tree_to_uhwi throughout.

gcc/c-family/
	* c-common.c, c-cppbuiltin.c: Replace tree_low_cst (..., 1) with
	tree_to_uhwi throughout.

gcc/c/
	* c-decl.c, c-typeck.c: Replace tree_low_cst (..., 1) with
	tree_to_uhwi throughout.

gcc/cp/
	* call.c, class.c, decl.c, error.c: Replace tree_low_cst (..., 1) with
	tree_to_uhwi throughout.

gcc/objc/
	* objc-encoding.c: Replace tree_low_cst (..., 1) with tree_to_uhwi
	throughout.

gcc/
	* alias.c, asan.c, builtins.c, cfgexpand.c, cgraph.c,
	config/aarch64/aarch64.c, config/alpha/predicates.md,
	config/arm/arm.c, config/darwin.c, config/epiphany/epiphany.c,
	config/i386/i386.c, config/iq2000/iq2000.c, config/m32c/m32c-pragma.c,
	config/mep/mep-pragma.c, config/mips/mips.c,
	config/picochip/picochip.c, config/rs6000/rs6000.c, cppbuiltin.c,
	dbxout.c, dwarf2out.c, emit-rtl.c, except.c, expr.c, fold-const.c,
	function.c, gimple-fold.c, godump.c, ipa-cp.c, ipa-prop.c, omp-low.c,
	predict.c, sdbout.c, stor-layout.c, trans-mem.c, tree-object-size.c,
	tree-sra.c, tree-ssa-ccp.c, tree-ssa-forwprop.c,
	tree-ssa-loop-ivcanon.c, tree-ssa-loop-ivopts.c, tree-ssa-loop-niter.c,
	tree-ssa-loop-prefetch.c, tree-ssa-strlen.c, tree-stdarg.c,
	tree-switch-conversion.c, tree-vect-generic.c, tree-vect-loop.c,
	tree-vect-patterns.c, tree-vrp.c, tree.c, tsan.c, ubsan.c, varasm.c:
	Replace tree_low_cst (..., 1) with tree_to_uhwi throughout.

From-SVN: r204961
This commit is contained in:
Richard Sandiford 2013-11-18 14:52:19 +00:00 committed by Richard Sandiford
parent 9439e9a1a4
commit ae7e9dddb8
71 changed files with 352 additions and 309 deletions

View File

@ -1,3 +1,21 @@
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* alias.c, asan.c, builtins.c, cfgexpand.c, cgraph.c,
config/aarch64/aarch64.c, config/alpha/predicates.md,
config/arm/arm.c, config/darwin.c, config/epiphany/epiphany.c,
config/i386/i386.c, config/iq2000/iq2000.c, config/m32c/m32c-pragma.c,
config/mep/mep-pragma.c, config/mips/mips.c,
config/picochip/picochip.c, config/rs6000/rs6000.c, cppbuiltin.c,
dbxout.c, dwarf2out.c, emit-rtl.c, except.c, expr.c, fold-const.c,
function.c, gimple-fold.c, godump.c, ipa-cp.c, ipa-prop.c, omp-low.c,
predict.c, sdbout.c, stor-layout.c, trans-mem.c, tree-object-size.c,
tree-sra.c, tree-ssa-ccp.c, tree-ssa-forwprop.c,
tree-ssa-loop-ivcanon.c, tree-ssa-loop-ivopts.c, tree-ssa-loop-niter.c,
tree-ssa-loop-prefetch.c, tree-ssa-strlen.c, tree-stdarg.c,
tree-switch-conversion.c, tree-vect-generic.c, tree-vect-loop.c,
tree-vect-patterns.c, tree-vrp.c, tree.c, tsan.c, ubsan.c, varasm.c:
Replace tree_low_cst (..., 1) with tree_to_uhwi throughout.
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* builtins.c, cilk-common.c, config/aarch64/aarch64.c,

View File

@ -1,3 +1,8 @@
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* gcc-interface/decl.c, gcc-interface/utils.c, gcc-interface/utils2.c:
Replace tree_low_cst (..., 1) with tree_to_uhwi throughout.
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* gcc-interface/cuintp.c: Update comments to refer to

View File

@ -843,7 +843,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
else if (compare_tree_int (TYPE_SIZE (gnu_type), align_cap) > 0)
align = align_cap;
else
align = ceil_pow2 (tree_low_cst (TYPE_SIZE (gnu_type), 1));
align = ceil_pow2 (tree_to_uhwi (TYPE_SIZE (gnu_type)));
/* But make sure not to under-align the object. */
if (align <= TYPE_ALIGN (gnu_type))
@ -4933,11 +4933,11 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
&& tree_fits_uhwi_p (TYPE_SIZE (gnu_type))
&& integer_pow2p (TYPE_SIZE (gnu_type)))
align = MIN (BIGGEST_ALIGNMENT,
tree_low_cst (TYPE_SIZE (gnu_type), 1));
tree_to_uhwi (TYPE_SIZE (gnu_type)));
else if (Is_Atomic (gnat_entity) && gnu_size
&& tree_fits_uhwi_p (gnu_size)
&& integer_pow2p (gnu_size))
align = MIN (BIGGEST_ALIGNMENT, tree_low_cst (gnu_size, 1));
align = MIN (BIGGEST_ALIGNMENT, tree_to_uhwi (gnu_size));
/* See if we need to pad the type. If we did, and made a record,
the name of the new type may be changed. So get it back for
@ -8362,7 +8362,7 @@ create_field_decl_from (tree old_field, tree field_type, tree record_type,
{
tree t = TREE_VALUE (purpose_member (old_field, pos_list));
tree pos = TREE_VEC_ELT (t, 0), bitpos = TREE_VEC_ELT (t, 2);
unsigned int offset_align = tree_low_cst (TREE_VEC_ELT (t, 1), 1);
unsigned int offset_align = tree_to_uhwi (TREE_VEC_ELT (t, 1));
tree new_pos, new_field;
unsigned int i;
subst_pair *s;

View File

@ -771,7 +771,7 @@ make_aligning_type (tree type, unsigned int align, tree size,
tree
make_packable_type (tree type, bool in_record)
{
unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE (type), 1);
unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
unsigned HOST_WIDE_INT new_size;
tree new_type, old_field, field_list = NULL_TREE;
unsigned int align;
@ -918,7 +918,7 @@ make_type_from_size (tree type, tree size_tree, bool for_biased)
if (!size_tree || !tree_fits_uhwi_p (size_tree))
return type;
size = tree_low_cst (size_tree, 1);
size = tree_to_uhwi (size_tree);
switch (TREE_CODE (type))
{
@ -1744,7 +1744,7 @@ rest_of_record_type_compilation (tree record_type)
&& tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
{
tree offset = TREE_OPERAND (curpos, 0);
align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
align = scale_by_factor_of (offset, align);
last_pos = round_up (last_pos, align);
pos = compute_related_constant (curpos, last_pos);
@ -1758,9 +1758,9 @@ rest_of_record_type_compilation (tree record_type)
{
tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
unsigned HOST_WIDE_INT addend
= tree_low_cst (TREE_OPERAND (curpos, 1), 1);
= tree_to_uhwi (TREE_OPERAND (curpos, 1));
align
= tree_low_cst (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
align = scale_by_factor_of (offset, align);
align = MIN (align, addend & -addend);
last_pos = round_up (last_pos, align);
@ -2378,7 +2378,7 @@ create_field_decl (tree field_name, tree field_type, tree record_type,
unsigned int known_align;
if (tree_fits_uhwi_p (pos))
known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
else
known_align = BITS_PER_UNIT;
@ -2549,7 +2549,7 @@ bool
value_factor_p (tree value, HOST_WIDE_INT factor)
{
if (tree_fits_uhwi_p (value))
return tree_low_cst (value, 1) % factor == 0;
return tree_to_uhwi (value) % factor == 0;
if (TREE_CODE (value) == MULT_EXPR)
return (value_factor_p (TREE_OPERAND (value, 0), factor)
@ -2616,8 +2616,8 @@ potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
iff it is not multiple of the current field alignment. */
if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
&& tree_fits_uhwi_p (bit_position (prev_field)))
return ((tree_low_cst (bit_position (prev_field), 1)
+ tree_low_cst (DECL_SIZE (prev_field), 1))
return ((tree_to_uhwi (bit_position (prev_field))
+ tree_to_uhwi (DECL_SIZE (prev_field)))
% DECL_ALIGN (curr_field) != 0);
/* If both the position and size of the previous field are multiples
@ -3274,7 +3274,7 @@ build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
if (TYPE_VAX_FLOATING_POINT_P (type))
switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
{
case 6:
dtype = 10;
@ -3314,7 +3314,7 @@ build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
case COMPLEX_TYPE:
if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type))
switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
{
case 6:
dtype = 12;
@ -3575,7 +3575,7 @@ build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
if (TYPE_VAX_FLOATING_POINT_P (type))
switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
{
case 6:
dtype = 10;
@ -3615,7 +3615,7 @@ build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
case COMPLEX_TYPE:
if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type))
switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
{
case 6:
dtype = 12;
@ -6310,7 +6310,7 @@ handle_vector_size_attribute (tree *node, tree name, tree args,
}
/* Get the vector size (in bytes). */
vecsize = tree_low_cst (size, 1);
vecsize = tree_to_uhwi (size);
/* We need to provide for vector pointers, vector arrays, and
functions returning vectors. For example:
@ -6342,7 +6342,7 @@ handle_vector_size_attribute (tree *node, tree name, tree args,
return NULL_TREE;
}
if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
if (vecsize % tree_to_uhwi (TYPE_SIZE_UNIT (type)))
{
error ("vector size not an integral multiple of component size");
return NULL;
@ -6355,7 +6355,7 @@ handle_vector_size_attribute (tree *node, tree name, tree args,
}
/* Calculate how many units fit in the vector. */
nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
nunits = vecsize / tree_to_uhwi (TYPE_SIZE_UNIT (type));
if (nunits & (nunits - 1))
{
error ("number of components of the vector not a power of two");
@ -6427,9 +6427,9 @@ handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
/* Sanity check the vector size and element type consistency. */
vec_bytes = tree_low_cst (rep_size, 1);
vec_bytes = tree_to_uhwi (rep_size);
if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
if (vec_bytes % tree_to_uhwi (TYPE_SIZE_UNIT (elem_type)))
{
error ("vector size not an integral multiple of component size");
return NULL;
@ -6441,7 +6441,7 @@ handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
return NULL;
}
vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
vec_units = vec_bytes / tree_to_uhwi (TYPE_SIZE_UNIT (elem_type));
if (vec_units & (vec_units - 1))
{
error ("number of components of the vector not a power of two");

View File

@ -626,7 +626,7 @@ nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
static unsigned int
resolve_atomic_size (tree type)
{
unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
return size;

View File

@ -2352,8 +2352,8 @@ adjust_offset_for_component_ref (tree x, bool *known_p,
*known_p = false;
return;
}
*offset += (tree_low_cst (xoffset, 1)
+ (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
*offset += (tree_to_uhwi (xoffset)
+ (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
/ BITS_PER_UNIT));
x = TREE_OPERAND (x, 0);

View File

@ -2029,7 +2029,7 @@ asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
fold_convert (const_ptr_type_node,
build_fold_addr_expr (refdecl)));
size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
size += asan_red_zone_size (size);
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));

View File

@ -662,7 +662,7 @@ c_getstr (tree src)
|| compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
return 0;
return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
}
/* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
@ -3126,11 +3126,11 @@ determine_block_size (tree len, rtx len_rtx,
else
{
if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
*min_size = tree_low_cst (TYPE_MIN_VALUE (TREE_TYPE (len)), 1);
*min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
else
*min_size = 0;
if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
*max_size = tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (len)), 1);
*max_size = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
else
*max_size = GET_MODE_MASK (GET_MODE (len_rtx));
}
@ -3529,14 +3529,14 @@ expand_builtin_strncpy (tree exp, rtx target)
rtx dest_mem;
if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
|| !can_store_by_pieces (tree_low_cst (len, 1),
|| !can_store_by_pieces (tree_to_uhwi (len),
builtin_strncpy_read_str,
CONST_CAST (char *, p),
dest_align, false))
return NULL_RTX;
dest_mem = get_memory_rtx (dest, len);
store_by_pieces (dest_mem, tree_low_cst (len, 1),
store_by_pieces (dest_mem, tree_to_uhwi (len),
builtin_strncpy_read_str,
CONST_CAST (char *, p), dest_align, false, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), target);
@ -3673,12 +3673,12 @@ expand_builtin_memset_args (tree dest, tree val, tree len,
* We can't pass builtin_memset_gen_str as that emits RTL. */
c = 1;
if (tree_fits_uhwi_p (len)
&& can_store_by_pieces (tree_low_cst (len, 1),
&& can_store_by_pieces (tree_to_uhwi (len),
builtin_memset_read_str, &c, dest_align,
true))
{
val_rtx = force_reg (val_mode, val_rtx);
store_by_pieces (dest_mem, tree_low_cst (len, 1),
store_by_pieces (dest_mem, tree_to_uhwi (len),
builtin_memset_gen_str, val_rtx, dest_align,
true, 0);
}
@ -3698,10 +3698,10 @@ expand_builtin_memset_args (tree dest, tree val, tree len,
if (c)
{
if (tree_fits_uhwi_p (len)
&& can_store_by_pieces (tree_low_cst (len, 1),
&& can_store_by_pieces (tree_to_uhwi (len),
builtin_memset_read_str, &c, dest_align,
true))
store_by_pieces (dest_mem, tree_low_cst (len, 1),
store_by_pieces (dest_mem, tree_to_uhwi (len),
builtin_memset_read_str, &c, dest_align, true, 0);
else if (!set_storage_via_setmem (dest_mem, len_rtx,
gen_int_mode (c, val_mode),
@ -4406,7 +4406,7 @@ expand_builtin_frame_address (tree fndecl, tree exp)
{
rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
/* Some ports cannot access arbitrary stack frames. */
if (tem == NULL)
@ -8676,7 +8676,7 @@ fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
if (! var_decl_component_p (var))
return NULL_TREE;
length = tree_low_cst (len, 1);
length = tree_to_uhwi (len);
if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
|| get_pointer_alignment (dest) / BITS_PER_UNIT < length)
return NULL_TREE;
@ -8781,7 +8781,7 @@ fold_builtin_memory_op (location_t loc, tree dest, tree src,
if (readonly_data_expr (src)
|| (tree_fits_uhwi_p (len)
&& (MIN (src_align, dest_align) / BITS_PER_UNIT
>= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
>= (unsigned HOST_WIDE_INT) tree_to_uhwi (len))))
{
tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
if (!fn)
@ -8805,7 +8805,7 @@ fold_builtin_memory_op (location_t loc, tree dest, tree src,
dest_base = get_ref_base_and_extent (destvar, &dest_offset,
&size, &maxsize);
if (tree_fits_uhwi_p (len))
maxsize = tree_low_cst (len, 1);
maxsize = tree_to_uhwi (len);
else
maxsize = -1;
src_offset /= BITS_PER_UNIT;
@ -9172,7 +9172,7 @@ fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
if (target_char_cast (arg2, &c))
return NULL_TREE;
r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
r = (const char *) memchr (p1, c, tree_to_uhwi (len));
if (r == NULL)
return build_int_cst (TREE_TYPE (arg1), 0);
@ -9215,7 +9215,7 @@ fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
&& compare_tree_int (len, strlen (p1) + 1) <= 0
&& compare_tree_int (len, strlen (p2) + 1) <= 0)
{
const int r = memcmp (p1, p2, tree_low_cst (len, 1));
const int r = memcmp (p1, p2, tree_to_uhwi (len));
if (r > 0)
return integer_one_node;
@ -9227,7 +9227,7 @@ fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
/* If len parameter is one, return an expression corresponding to
(*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
if (tree_fits_uhwi_p (len) && tree_low_cst (len, 1) == 1)
if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
{
tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
tree cst_uchar_ptr_node
@ -9341,7 +9341,7 @@ fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
if (tree_fits_uhwi_p (len) && p1 && p2)
{
const int i = strncmp (p1, p2, tree_low_cst (len, 1));
const int i = strncmp (p1, p2, tree_to_uhwi (len));
if (i > 0)
return integer_one_node;
else if (i < 0)
@ -9387,7 +9387,7 @@ fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
/* If len parameter is one, return an expression corresponding to
(*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
if (tree_fits_uhwi_p (len) && tree_low_cst (len, 1) == 1)
if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
{
tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
tree cst_uchar_ptr_node
@ -12256,7 +12256,7 @@ fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
if (!init_target_chars ())
return NULL_TREE;
destlen = tree_low_cst (destsize, 1);
destlen = tree_to_uhwi (destsize);
/* If the format doesn't contain % args or %%, use strcpy. */
if (strchr (fmt_str, target_percent) == NULL)
@ -12304,7 +12304,7 @@ fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
if (!retval || !tree_fits_uhwi_p (retval))
return NULL_TREE;
origlen = tree_low_cst (retval, 1);
origlen = tree_to_uhwi (retval);
/* We could expand this as
memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
or to

View File

@ -1,3 +1,8 @@
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* c-common.c, c-cppbuiltin.c: Replace tree_low_cst (..., 1) with
tree_to_uhwi throughout.
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* c-common.c, c-format.c, c-omp.c, c-pretty-print.c: Replace

View File

@ -8484,7 +8484,7 @@ handle_vector_size_attribute (tree *node, tree name, tree args,
}
/* Get the vector size (in bytes). */
vecsize = tree_low_cst (size, 1);
vecsize = tree_to_uhwi (size);
/* We need to provide for vector pointers, vector arrays, and
functions returning vectors. For example:
@ -8517,7 +8517,7 @@ handle_vector_size_attribute (tree *node, tree name, tree args,
return NULL_TREE;
}
if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
if (vecsize % tree_to_uhwi (TYPE_SIZE_UNIT (type)))
{
error ("vector size not an integral multiple of component size");
return NULL;
@ -8530,7 +8530,7 @@ handle_vector_size_attribute (tree *node, tree name, tree args,
}
/* Calculate how many units fit in the vector. */
nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
nunits = vecsize / tree_to_uhwi (TYPE_SIZE_UNIT (type));
if (nunits & (nunits - 1))
{
error ("number of components of the vector not a power of two");
@ -10149,7 +10149,7 @@ sync_resolve_size (tree function, vec<tree, va_gc> *params)
if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
goto incompatible;
size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
return size;
@ -10309,7 +10309,7 @@ get_atomic_generic_size (location_t loc, tree function,
return 0;
}
size_0 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (type_0)), 1);
size_0 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type_0)));
/* Zero size objects are not allowed. */
if (size_0 == 0)
@ -10334,7 +10334,7 @@ get_atomic_generic_size (location_t loc, tree function,
function);
return 0;
}
size = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (type)), 1);
size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type)));
if (size != size_0)
{
error_at (loc, "size mismatch in argument %d of %qE", x + 1,
@ -10349,7 +10349,7 @@ get_atomic_generic_size (location_t loc, tree function,
tree p = (*params)[x];
if (TREE_CODE (p) == INTEGER_CST)
{
int i = tree_low_cst (p, 1);
int i = tree_to_uhwi (p);
if (i < 0 || (i & MEMMODEL_MASK) >= MEMMODEL_LAST)
{
warning_at (loc, OPT_Winvalid_memory_model,
@ -11703,7 +11703,7 @@ convert_vector_to_pointer_for_subscript (location_t loc,
if (TREE_CODE (index) == INTEGER_CST)
if (!tree_fits_uhwi_p (index)
|| ((unsigned HOST_WIDE_INT) tree_low_cst (index, 1)
|| ((unsigned HOST_WIDE_INT) tree_to_uhwi (index)
>= TYPE_VECTOR_SUBPARTS (type)))
warning_at (loc, OPT_Warray_bounds, "index value is out of bound");

View File

@ -106,7 +106,7 @@ static void
builtin_define_type_sizeof (const char *name, tree type)
{
builtin_define_with_int_value (name,
tree_low_cst (TYPE_SIZE_UNIT (type), 1));
tree_to_uhwi (TYPE_SIZE_UNIT (type)));
}
/* Define the float.h constants for TYPE using NAME_PREFIX, FP_SUFFIX,
@ -648,7 +648,7 @@ cpp_atomic_builtins (cpp_reader *pfile)
/* Tell the source code about various types. These map to the C++11 and C11
macros where 2 indicates lock-free always, and 1 indicates sometimes
lock free. */
#define SIZEOF_NODE(T) (tree_low_cst (TYPE_SIZE_UNIT (T), 1))
#define SIZEOF_NODE(T) (tree_to_uhwi (TYPE_SIZE_UNIT (T)))
#define SWAP_INDEX(T) ((SIZEOF_NODE (T) < SWAP_LIMIT) ? SIZEOF_NODE (T) : 0)
builtin_define_with_int_value ("__GCC_ATOMIC_BOOL_LOCK_FREE",
(have_swap[SWAP_INDEX (boolean_type_node)]? 2 : 1));

View File

@ -1,3 +1,8 @@
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* c-decl.c, c-typeck.c: Replace tree_low_cst (..., 1) with
tree_to_uhwi throughout.
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* c-parser.c: Replace tree_low_cst (..., 0) with tree_to_shwi

View File

@ -4832,7 +4832,7 @@ check_bitfield_type_and_width (tree *type, tree *width, tree orig_name)
*width = build_int_cst (integer_type_node, w);
}
else
w = tree_low_cst (*width, 1);
w = tree_to_uhwi (*width);
if (TREE_CODE (*type) == ENUMERAL_TYPE)
{
@ -7263,7 +7263,7 @@ finish_struct (location_t loc, tree t, tree fieldlist, tree attributes,
if (DECL_INITIAL (x))
{
unsigned HOST_WIDE_INT width = tree_low_cst (DECL_INITIAL (x), 1);
unsigned HOST_WIDE_INT width = tree_to_uhwi (DECL_INITIAL (x));
DECL_SIZE (x) = bitsize_int (width);
DECL_BIT_FIELD (x) = 1;
SET_DECL_C_BIT_FIELD (x);
@ -7334,7 +7334,7 @@ finish_struct (location_t loc, tree t, tree fieldlist, tree attributes,
&& TREE_TYPE (*fieldlistp) != error_mark_node)
{
unsigned HOST_WIDE_INT width
= tree_low_cst (DECL_INITIAL (*fieldlistp), 1);
= tree_to_uhwi (DECL_INITIAL (*fieldlistp));
tree type = TREE_TYPE (*fieldlistp);
if (width != TYPE_PRECISION (type))
{

View File

@ -7210,7 +7210,7 @@ push_init_level (int implicit, struct obstack * braced_init_obstack)
else if (TREE_CODE (constructor_type) == ARRAY_TYPE)
{
constructor_type = TREE_TYPE (constructor_type);
push_array_bounds (tree_low_cst (constructor_index, 1));
push_array_bounds (tree_to_uhwi (constructor_index));
constructor_depth++;
}
@ -8760,7 +8760,7 @@ process_init_element (struct c_expr value, bool implicit,
/* Now output the actual element. */
if (value.value)
{
push_array_bounds (tree_low_cst (constructor_index, 1));
push_array_bounds (tree_to_uhwi (constructor_index));
output_init_element (value.value, value.original_type,
strict_string, elttype,
constructor_index, 1, implicit,

View File

@ -293,7 +293,7 @@ add_stack_var (tree decl)
* (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
v->decl = decl;
v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
/* Ensure that all variables have size, so that &a != &b for any two
variables that are simultaneously live. */
if (v->size == 0)
@ -1057,7 +1057,7 @@ expand_one_stack_var (tree var)
HOST_WIDE_INT size, offset;
unsigned byte_align;
size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
byte_align = align_local_variable (SSAVAR (var));
/* We handle highly aligned variables in expand_stack_vars. */
@ -1133,7 +1133,7 @@ defer_stack_allocation (tree var, bool toplevel)
/* Whether the variable is small enough for immediate allocation not to be
a problem with regard to the frame size. */
bool smallish
= (tree_low_cst (DECL_SIZE_UNIT (var), 1)
= (tree_to_uhwi (DECL_SIZE_UNIT (var))
< PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
/* If stack protection is enabled, *all* stack variables must be deferred,
@ -1281,7 +1281,7 @@ expand_one_var (tree var, bool toplevel, bool really_expand)
{
if (really_expand)
expand_one_stack_var (origvar);
return tree_low_cst (DECL_SIZE_UNIT (var), 1);
return tree_to_uhwi (DECL_SIZE_UNIT (var));
}
return 0;
}
@ -1361,7 +1361,7 @@ stack_protect_classify_type (tree type)
|| !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
len = max;
else
len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
if (len < max)
ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;

View File

@ -966,7 +966,7 @@ cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
gcc_assert (TREE_CODE (type) == RECORD_TYPE);
edge->indirect_info->param_index = -1;
edge->indirect_info->otr_token
= tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
edge->indirect_info->otr_type = type;
edge->indirect_info->polymorphic = 1;
}

View File

@ -6036,12 +6036,12 @@ aapcs_vfp_sub_candidate (const_tree type, enum machine_mode *modep)
|| count < 0)
return -1;
count *= (1 + tree_low_cst (TYPE_MAX_VALUE (index), 1)
- tree_low_cst (TYPE_MIN_VALUE (index), 1));
count *= (1 + tree_to_uhwi (TYPE_MAX_VALUE (index))
- tree_to_uhwi (TYPE_MIN_VALUE (index)));
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
|| (tree_low_cst (TYPE_SIZE (type), 1)
|| (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
@ -6071,7 +6071,7 @@ aapcs_vfp_sub_candidate (const_tree type, enum machine_mode *modep)
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
|| (tree_low_cst (TYPE_SIZE (type), 1)
|| (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
@ -6103,7 +6103,7 @@ aapcs_vfp_sub_candidate (const_tree type, enum machine_mode *modep)
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
|| (tree_low_cst (TYPE_SIZE (type), 1)
|| (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;

View File

@ -357,7 +357,7 @@
&& !SYMBOL_REF_TLS_MODEL (op))
{
if (SYMBOL_REF_DECL (op))
max_ofs = tree_low_cst (DECL_SIZE_UNIT (SYMBOL_REF_DECL (op)), 1);
max_ofs = tree_to_uhwi (DECL_SIZE_UNIT (SYMBOL_REF_DECL (op)));
}
else
return false;

View File

@ -4734,12 +4734,12 @@ aapcs_vfp_sub_candidate (const_tree type, enum machine_mode *modep)
|| count < 0)
return -1;
count *= (1 + tree_low_cst (TYPE_MAX_VALUE (index), 1)
- tree_low_cst (TYPE_MIN_VALUE (index), 1));
count *= (1 + tree_to_uhwi (TYPE_MAX_VALUE (index))
- tree_to_uhwi (TYPE_MIN_VALUE (index)));
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
|| (tree_low_cst (TYPE_SIZE (type), 1)
|| (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
@ -4769,7 +4769,7 @@ aapcs_vfp_sub_candidate (const_tree type, enum machine_mode *modep)
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
|| (tree_low_cst (TYPE_SIZE (type), 1)
|| (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
@ -4801,7 +4801,7 @@ aapcs_vfp_sub_candidate (const_tree type, enum machine_mode *modep)
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
|| (tree_low_cst (TYPE_SIZE (type), 1)
|| (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;

View File

@ -1509,7 +1509,7 @@ machopic_select_section (tree decl,
zsize = (DECL_P (decl)
&& (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
&& tree_low_cst (DECL_SIZE_UNIT (decl), 1) == 0);
&& tree_to_uhwi (DECL_SIZE_UNIT (decl)) == 0);
one = DECL_P (decl)
&& TREE_CODE (decl) == VAR_DECL
@ -1650,7 +1650,7 @@ machopic_select_section (tree decl,
static bool warned_objc_46 = false;
/* We shall assert that zero-sized objects are an error in ObjC
meta-data. */
gcc_assert (tree_low_cst (DECL_SIZE_UNIT (decl), 1) != 0);
gcc_assert (tree_to_uhwi (DECL_SIZE_UNIT (decl)) != 0);
/* ??? This mechanism for determining the metadata section is
broken when LTO is in use, since the frontend that generated
@ -2187,7 +2187,7 @@ darwin_asm_declare_object_name (FILE *file,
machopic_define_symbol (DECL_RTL (decl));
}
size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
#ifdef DEBUG_DARWIN_MEM_ALLOCATORS
fprintf (file, "# dadon: %s %s (%llu, %u) local %d weak %d"

View File

@ -2785,7 +2785,7 @@ epiphany_adjust_field_align (tree field, unsigned computed)
{
tree elmsz = TYPE_SIZE (TREE_TYPE (TREE_TYPE (field)));
if (!tree_fits_uhwi_p (elmsz) || tree_low_cst (elmsz, 1) >= 32)
if (!tree_fits_uhwi_p (elmsz) || tree_to_uhwi (elmsz) >= 32)
return 64;
}
return computed;

View File

@ -29317,7 +29317,7 @@ ix86_builtin_tm_load (tree type)
{
if (TREE_CODE (type) == VECTOR_TYPE)
{
switch (tree_low_cst (TYPE_SIZE (type), 1))
switch (tree_to_uhwi (TYPE_SIZE (type)))
{
case 64:
return builtin_decl_explicit (BUILT_IN_TM_LOAD_M64);
@ -29337,7 +29337,7 @@ ix86_builtin_tm_store (tree type)
{
if (TREE_CODE (type) == VECTOR_TYPE)
{
switch (tree_low_cst (TYPE_SIZE (type), 1))
switch (tree_to_uhwi (TYPE_SIZE (type)))
{
case 64:
return builtin_decl_explicit (BUILT_IN_TM_STORE_M64);
@ -32658,7 +32658,7 @@ get_element_number (tree vec_type, tree arg)
unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
if (!tree_fits_uhwi_p (arg)
|| (elt = tree_low_cst (arg, 1), elt > max))
|| (elt = tree_to_uhwi (arg), elt > max))
{
error ("selector must be an integer constant in the range 0..%wi", max);
return 0;

View File

@ -1307,7 +1307,7 @@ iq2000_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
/* ??? If this is a packed structure, then the last hunk won't
be 64 bits. */
chunks
= tree_low_cst (TYPE_SIZE_UNIT (type), 1) / UNITS_PER_WORD;
= tree_to_uhwi (TYPE_SIZE_UNIT (type)) / UNITS_PER_WORD;
if (chunks + *arg_words + bias > (unsigned) MAX_ARGS_IN_REGISTERS)
chunks = MAX_ARGS_IN_REGISTERS - *arg_words - bias;

View File

@ -48,7 +48,7 @@ m32c_pragma_memregs (cpp_reader * reader ATTRIBUTE_UNUSED)
{
if (tree_fits_uhwi_p (val))
{
i = tree_low_cst (val, 1);
i = tree_to_uhwi (val);
type = pragma_lex (&val);
if (type != CPP_EOF)
@ -95,7 +95,7 @@ m32c_pragma_address (cpp_reader * reader ATTRIBUTE_UNUSED)
{
if (var != error_mark_node)
{
unsigned uaddr = tree_low_cst (addr, 1);
unsigned uaddr = tree_to_uhwi (addr);
m32c_note_pragma_address (IDENTIFIER_POINTER (var), uaddr);
}

View File

@ -234,7 +234,7 @@ mep_pragma_coprocessor_width (void)
case CPP_NUMBER:
if (! tree_fits_uhwi_p (val))
break;
i = tree_low_cst (val, 1);
i = tree_to_uhwi (val);
/* This pragma no longer has any effect. */
#if 0
if (i == 32)
@ -273,7 +273,7 @@ mep_pragma_coprocessor_subclass (void)
type = mep_pragma_lex (&val);
if (type != CPP_CHAR)
goto syntax_error;
class_letter = tree_low_cst (val, 1);
class_letter = tree_to_uhwi (val);
if (class_letter >= 'A' && class_letter <= 'D')
switch (class_letter)
{

View File

@ -14940,7 +14940,7 @@ r10k_safe_mem_expr_p (tree expr, HOST_WIDE_INT offset)
return false;
offset += bitoffset / BITS_PER_UNIT;
return offset >= 0 && offset < tree_low_cst (DECL_SIZE_UNIT (inner), 1);
return offset >= 0 && offset < tree_to_uhwi (DECL_SIZE_UNIT (inner));
}
/* A for_each_rtx callback for which DATA points to the instruction

View File

@ -810,7 +810,7 @@ picochip_compute_arg_size (const_tree type, enum machine_mode mode)
int type_size_in_units = 0;
if (type)
type_size_in_units = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
type_size_in_units = tree_to_uhwi (TYPE_SIZE_UNIT (type));
else
type_size_in_units = GET_MODE_SIZE (mode);

View File

@ -6128,7 +6128,7 @@ offsettable_ok_by_alignment (rtx op, HOST_WIDE_INT offset,
if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)))
return false;
dsize = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
dsize = tree_to_uhwi (DECL_SIZE_UNIT (decl));
if (dsize > 32768)
return false;
@ -6152,7 +6152,7 @@ offsettable_ok_by_alignment (rtx op, HOST_WIDE_INT offset,
dsize = TREE_STRING_LENGTH (decl);
else if (TYPE_SIZE_UNIT (type)
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
dsize = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
dsize = tree_to_uhwi (TYPE_SIZE_UNIT (type));
else
return false;
if (dsize > 32768)
@ -8559,12 +8559,12 @@ rs6000_aggregate_candidate (const_tree type, enum machine_mode *modep)
|| count < 0)
return -1;
count *= (1 + tree_low_cst (TYPE_MAX_VALUE (index), 1)
- tree_low_cst (TYPE_MIN_VALUE (index), 1));
count *= (1 + tree_to_uhwi (TYPE_MAX_VALUE (index))
- tree_to_uhwi (TYPE_MIN_VALUE (index)));
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
|| (tree_low_cst (TYPE_SIZE (type), 1)
|| (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
@ -8594,7 +8594,7 @@ rs6000_aggregate_candidate (const_tree type, enum machine_mode *modep)
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
|| (tree_low_cst (TYPE_SIZE (type), 1)
|| (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
@ -8626,7 +8626,7 @@ rs6000_aggregate_candidate (const_tree type, enum machine_mode *modep)
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
|| (tree_low_cst (TYPE_SIZE (type), 1)
|| (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
@ -12387,7 +12387,7 @@ get_element_number (tree vec_type, tree arg)
unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
if (!tree_fits_uhwi_p (arg)
|| (elt = tree_low_cst (arg, 1), elt > max))
|| (elt = tree_to_uhwi (arg), elt > max))
{
error ("selector must be an integer constant in the range 0..%wi", max);
return 0;

View File

@ -1,3 +1,8 @@
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* call.c, class.c, decl.c, error.c: Replace tree_low_cst (..., 1) with
tree_to_uhwi throughout.
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* class.c, dump.c, error.c, init.c, method.c, parser.c, semantics.c:

View File

@ -941,7 +941,7 @@ build_array_conv (tree type, tree ctor, int flags, tsubst_flags_t complain)
if (TYPE_DOMAIN (type))
{
unsigned HOST_WIDE_INT alen = tree_low_cst (array_type_nelts_top (type), 1);
unsigned HOST_WIDE_INT alen = tree_to_uhwi (array_type_nelts_top (type));
if (alen < len)
return NULL;
}

View File

@ -6169,7 +6169,7 @@ layout_class_type (tree t, tree *virtuals_p)
{
unsigned HOST_WIDE_INT width;
tree ftype = TREE_TYPE (field);
width = tree_low_cst (DECL_SIZE (field), /*unsignedp=*/1);
width = tree_to_uhwi (DECL_SIZE (field));
if (width != TYPE_PRECISION (ftype))
{
TREE_TYPE (field)

View File

@ -5094,7 +5094,7 @@ reshape_init_array_1 (tree elt_type, tree max_index, reshape_iter *d,
return new_init;
if (tree_fits_uhwi_p (max_index))
max_index_cst = tree_low_cst (max_index, 1);
max_index_cst = tree_to_uhwi (max_index);
/* sizetype is sign extended, not zero extended. */
else
max_index_cst = tree_low_cst (fold_convert (size_type_node, max_index),

View File

@ -1853,7 +1853,7 @@ static tree
resolve_virtual_fun_from_obj_type_ref (tree ref)
{
tree obj_type = TREE_TYPE (OBJ_TYPE_REF_OBJECT (ref));
HOST_WIDE_INT index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
HOST_WIDE_INT index = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
tree fun = BINFO_VIRTUALS (TYPE_BINFO (TREE_TYPE (obj_type)));
while (index)
{

View File

@ -130,7 +130,7 @@ define_builtin_macros_for_type_sizes (cpp_reader *pfile)
{
#define define_type_sizeof(NAME, TYPE) \
cpp_define_formatted (pfile, NAME"="HOST_WIDE_INT_PRINT_DEC, \
tree_low_cst (TYPE_SIZE_UNIT (TYPE), 1))
tree_to_uhwi (TYPE_SIZE_UNIT (TYPE)))
define_type_sizeof ("__SIZEOF_INT__", integer_type_node);
define_type_sizeof ("__SIZEOF_LONG__", long_integer_type_node);

View File

@ -1566,7 +1566,7 @@ dbxout_type_fields (tree type)
stabstr_C (',');
stabstr_D (int_bit_position (tem));
stabstr_C (',');
stabstr_D (tree_low_cst (DECL_SIZE (tem), 1));
stabstr_D (tree_to_uhwi (DECL_SIZE (tem)));
stabstr_C (';');
}
}

View File

@ -10262,7 +10262,7 @@ simple_type_size_in_bits (const_tree type)
else if (TYPE_SIZE (type) == NULL_TREE)
return 0;
else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
return tree_low_cst (TYPE_SIZE (type), 1);
return tree_to_uhwi (TYPE_SIZE (type));
else
return TYPE_ALIGN (type);
}
@ -13543,7 +13543,7 @@ dw_sra_loc_expr (tree decl, rtx loc)
|| !tree_fits_uhwi_p (DECL_SIZE (decl)))
return NULL;
decl_size = tree_low_cst (DECL_SIZE (decl), 1);
decl_size = tree_to_uhwi (DECL_SIZE (decl));
descr = NULL;
descr_tail = &descr;
@ -16436,7 +16436,7 @@ add_bit_size_attribute (dw_die_ref die, tree decl)
&& DECL_BIT_FIELD_TYPE (decl));
if (tree_fits_uhwi_p (DECL_SIZE (decl)))
add_AT_unsigned (die, DW_AT_bit_size, tree_low_cst (DECL_SIZE (decl), 1));
add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
}
/* If the compiled language is ANSI C, then add a 'prototyped'
@ -17073,7 +17073,7 @@ descr_info_loc (tree val, tree base_decl)
case POINTER_PLUS_EXPR:
case PLUS_EXPR:
if (tree_fits_uhwi_p (TREE_OPERAND (val, 1))
&& (unsigned HOST_WIDE_INT) tree_low_cst (TREE_OPERAND (val, 1), 1)
&& (unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (val, 1))
< 16384)
{
loc = descr_info_loc (TREE_OPERAND (val, 0), base_decl);

View File

@ -1545,8 +1545,8 @@ get_mem_align_offset (rtx mem, unsigned int align)
|| !tree_fits_uhwi_p (bit_offset))
return -1;
offset += tree_low_cst (byte_offset, 1);
offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
offset += tree_to_uhwi (byte_offset);
offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
if (inner == NULL_TREE)
{
@ -1773,7 +1773,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
if (tree_fits_uhwi_p (off_tree))
{
attrs.offset_known_p = true;
attrs.offset = tree_low_cst (off_tree, 1);
attrs.offset = tree_to_uhwi (off_tree);
apply_bitpos = bitpos;
}
}
@ -1803,7 +1803,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
if (tree_fits_uhwi_p (new_size))
{
attrs.size_known_p = true;
attrs.size = tree_low_cst (new_size, 1);
attrs.size = tree_to_uhwi (new_size);
}
/* If we modified OFFSET based on T, then subtract the outstanding
@ -2280,8 +2280,8 @@ widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
}
attrs.expr = TREE_OPERAND (attrs.expr, 0);
attrs.offset += tree_low_cst (offset, 1);
attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
attrs.offset += tree_to_uhwi (offset);
attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
/ BITS_PER_UNIT);
}
/* Similarly for the decl. */

View File

@ -313,20 +313,20 @@ init_eh (void)
/* Cache the interesting field offsets so that we have
easy access from rtl. */
sjlj_fc_call_site_ofs
= (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
= (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
+ tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
sjlj_fc_data_ofs
= (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
= (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
+ tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
sjlj_fc_personality_ofs
= (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
= (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
+ tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
sjlj_fc_lsda_ofs
= (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
= (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
+ tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
sjlj_fc_jbuf_ofs
= (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
= (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
+ tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
}
}
@ -2145,7 +2145,7 @@ expand_builtin_eh_return_data_regno (tree exp)
return constm1_rtx;
}
iwhich = tree_low_cst (which, 1);
iwhich = tree_to_uhwi (which);
iwhich = EH_RETURN_DATA_REGNO (iwhich);
if (iwhich == INVALID_REGNUM)
return constm1_rtx;

View File

@ -4630,12 +4630,12 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart,
see finish_bitfield_layout. */
if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
&& tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
- tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
- tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
else
bitoffset = 0;
bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
- tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
/* If the adjustment is larger than bitpos, we would have a negative bit
position for the lower bound and this may wreak havoc later. Adjust
@ -4656,7 +4656,7 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart,
else
*bitstart = *bitpos - bitoffset;
*bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
*bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
}
/* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
@ -5474,7 +5474,7 @@ count_type_elements (const_tree type, bool for_ctor_p)
{
unsigned HOST_WIDE_INT n;
n = tree_low_cst (nelts, 1) + 1;
n = tree_to_uhwi (nelts) + 1;
if (n == 0 || for_ctor_p)
return n;
else
@ -5590,8 +5590,8 @@ categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
tree hi_index = TREE_OPERAND (purpose, 1);
if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
mult = (tree_low_cst (hi_index, 1)
- tree_low_cst (lo_index, 1) + 1);
mult = (tree_to_uhwi (hi_index)
- tree_to_uhwi (lo_index) + 1);
}
num_fields += mult;
elt_type = TREE_TYPE (value);
@ -5909,7 +5909,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
continue;
if (tree_fits_uhwi_p (DECL_SIZE (field)))
bitsize = tree_low_cst (DECL_SIZE (field), 1);
bitsize = tree_to_uhwi (DECL_SIZE (field));
else
bitsize = -1;
@ -6054,8 +6054,8 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
break;
}
this_node_count = (tree_low_cst (hi_index, 1)
- tree_low_cst (lo_index, 1) + 1);
this_node_count = (tree_to_uhwi (hi_index)
- tree_to_uhwi (lo_index) + 1);
}
else
this_node_count = 1;
@ -6103,7 +6103,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
mode = TYPE_MODE (elttype);
if (mode == BLKmode)
bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
? tree_low_cst (TYPE_SIZE (elttype), 1)
? tree_to_uhwi (TYPE_SIZE (elttype))
: -1);
else
bitsize = GET_MODE_BITSIZE (mode);
@ -6126,7 +6126,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
(!MEM_P (target)
|| count <= 2
|| (tree_fits_uhwi_p (TYPE_SIZE (elttype))
&& (tree_low_cst (TYPE_SIZE (elttype), 1) * count
&& (tree_to_uhwi (TYPE_SIZE (elttype)) * count
<= 40 * 8)))))
{
lo -= minelt; hi -= minelt;
@ -6236,9 +6236,9 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
{
if (index != 0)
bitpos = ((tree_to_shwi (index) - minelt)
* tree_low_cst (TYPE_SIZE (elttype), 1));
* tree_to_uhwi (TYPE_SIZE (elttype)));
else
bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
&& TREE_CODE (type) == ARRAY_TYPE
@ -6262,7 +6262,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
int need_to_clear;
int icode = CODE_FOR_nothing;
tree elttype = TREE_TYPE (type);
int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
enum machine_mode eltmode = TYPE_MODE (elttype);
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
@ -6344,12 +6344,12 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
HOST_WIDE_INT eltpos;
tree value = ce->value;
bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
if (cleared && initializer_zerop (value))
continue;
if (ce->index)
eltpos = tree_low_cst (ce->index, 1);
eltpos = tree_to_uhwi (ce->index);
else
eltpos = i;
@ -6679,7 +6679,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
if (! tree_fits_uhwi_p (size_tree))
mode = BLKmode, *pbitsize = -1;
else
*pbitsize = tree_low_cst (size_tree, 1);
*pbitsize = tree_to_uhwi (size_tree);
}
/* Compute cumulative bit-offset for nested component-refs and array-refs,
@ -7758,7 +7758,7 @@ expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
|| TREE_ADDRESSABLE (exp)
|| (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
&& (! MOVE_BY_PIECES_P
(tree_low_cst (TYPE_SIZE_UNIT (type), 1),
(tree_to_uhwi (TYPE_SIZE_UNIT (type)),
TYPE_ALIGN (type)))
&& ! mostly_zeros_p (exp))))
|| ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
@ -10561,7 +10561,7 @@ is_aligning_offset (const_tree offset, const_tree exp)
|| !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
|| compare_tree_int (TREE_OPERAND (offset, 1),
BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
|| !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
|| !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
return 0;
/* Look at the first operand of BIT_AND_EXPR and strip any conversion.

View File

@ -1433,10 +1433,10 @@ const_binop (enum tree_code code, tree arg1, tree arg2)
if (!tree_fits_uhwi_p (arg2))
return NULL_TREE;
unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
unsigned HOST_WIDE_INT innerc
= tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
if (shiftc >= outerc || (shiftc % innerc) != 0)
return NULL_TREE;
int offset = shiftc / innerc;
@ -8102,7 +8102,7 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
{
unsigned HOST_WIDE_INT cst;
cst = tree_low_cst (and1, 1);
cst = tree_to_uhwi (and1);
cst &= HOST_WIDE_INT_M1U
<< (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
change = (cst == 0);
@ -11866,7 +11866,7 @@ fold_binary_loc (location_t loc,
If B is constant and (B & M) == 0, fold into A & M. */
if (tree_fits_uhwi_p (arg1))
{
unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
unsigned HOST_WIDE_INT cst1 = tree_to_uhwi (arg1);
if (~cst1 && (cst1 & (cst1 + 1)) == 0
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0))
&& (TREE_CODE (arg0) == PLUS_EXPR
@ -11891,7 +11891,7 @@ fold_binary_loc (location_t loc,
}
if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
|| (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
|| (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
& cst1) != cst1)
which = -1;
@ -12036,11 +12036,11 @@ fold_binary_loc (location_t loc,
&& TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
&& TREE_CODE (arg1) == INTEGER_CST
&& tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
&& tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0
&& (tree_low_cst (TREE_OPERAND (arg0, 1), 1)
&& tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
&& (tree_to_uhwi (TREE_OPERAND (arg0, 1))
< TYPE_PRECISION (TREE_TYPE (arg0))))
{
unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
unsigned HOST_WIDE_INT newmask, zerobits = 0;
tree shift_type = TREE_TYPE (arg0);
@ -14503,9 +14503,9 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type,
&& TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
{
tree eltype = TREE_TYPE (TREE_TYPE (arg0));
unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
if (n != 0
&& (idx % width) == 0
@ -14576,7 +14576,7 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type,
/* A bit-field-ref that referenced the full argument can be stripped. */
if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
&& TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
&& TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
&& integer_zerop (op2))
return fold_convert_loc (loc, type, arg0);
@ -14587,14 +14587,14 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type,
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
/* This limitation should not be necessary, we just need to
round this up to mode size. */
&& tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
&& tree_to_uhwi (op1) % BITS_PER_UNIT == 0
/* Need bit-shifting of the buffer to relax the following. */
&& tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
&& tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
{
unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
unsigned HOST_WIDE_INT clen;
clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
/* ??? We cannot tell native_encode_expr to start at
some random byte only. So limit us to a reasonable amount
of work. */

View File

@ -3814,7 +3814,7 @@ locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
tree s2 = sizetree;
if (where_pad != none
&& (!tree_fits_uhwi_p (sizetree)
|| (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
|| (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
SUB_PARM_SIZE (locate->slot_offset, s2);
}
@ -3859,7 +3859,7 @@ locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
if (where_pad != none
&& (!tree_fits_uhwi_p (sizetree)
|| (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
|| (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
ADD_PARM_SIZE (locate->size, sizetree);

View File

@ -1076,7 +1076,7 @@ gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
continue;
pos = int_bit_position (fld);
size = tree_low_cst (DECL_SIZE (fld), 1);
size = tree_to_uhwi (DECL_SIZE (fld));
if (pos <= offset && (pos + size) > offset)
break;
}
@ -3151,7 +3151,7 @@ gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
if (TREE_CODE (v) == POINTER_PLUS_EXPR)
{
offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
offset = tree_to_uhwi (TREE_OPERAND (v, 1)) * BITS_PER_UNIT;
v = TREE_OPERAND (v, 0);
}
else
@ -3177,7 +3177,7 @@ gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
return NULL_TREE;
}
gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
offset += token * size;
fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
offset, size, v);
@ -3403,7 +3403,7 @@ gimple_fold_indirect_ref (tree t)
&& useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
&& tree_fits_uhwi_p (off))
{
unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
tree part_width = TYPE_SIZE (type);
unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width) / BITS_PER_UNIT;

View File

@ -987,7 +987,7 @@ go_output_typedef (struct godump_container *container, tree decl)
else if (tree_fits_uhwi_p (TREE_VALUE (element)))
snprintf (buf, sizeof buf, HOST_WIDE_INT_PRINT_UNSIGNED,
((unsigned HOST_WIDE_INT)
tree_low_cst (TREE_VALUE (element), 1)));
tree_to_uhwi (TREE_VALUE (element))));
else
snprintf (buf, sizeof buf, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
((unsigned HOST_WIDE_INT)

View File

@ -1371,7 +1371,7 @@ propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
if (item->offset < 0)
continue;
gcc_checking_assert (is_gimple_ip_invariant (item->value));
val_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (item->value)), 1);
val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
if (merge_agg_lats_step (dest_plats, item->offset, val_size,
&aglat, pre_existing, &ret))

View File

@ -297,7 +297,7 @@ ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
item->offset);
if (TYPE_P (item->value))
fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
tree_low_cst (TYPE_SIZE (item->value), 1));
tree_to_uhwi (TYPE_SIZE (item->value)));
else
{
fprintf (f, "cst: ");
@ -1348,7 +1348,7 @@ determine_known_aggregate_parts (gimple call, tree arg,
arg_base = arg;
arg_offset = 0;
type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
arg_size = tree_low_cst (type_size, 1);
arg_size = tree_to_uhwi (type_size);
ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
}
else if (TREE_CODE (arg) == ADDR_EXPR)
@ -1995,7 +1995,7 @@ ipa_analyze_virtual_call_uses (struct cgraph_node *node,
cs = ipa_note_param_call (node, index, call);
ii = cs->indirect_info;
ii->offset = anc_offset;
ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
ii->otr_type = obj_type_ref_class (target);
ii->polymorphic = 1;
}
@ -2207,7 +2207,7 @@ ipa_intraprocedural_devirtualization (gimple call)
if (!binfo)
return NULL_TREE;
token = OBJ_TYPE_REF_TOKEN (otr);
fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
binfo);
#ifdef ENABLE_CHECKING
if (fndecl)

View File

@ -1,3 +1,8 @@
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* objc-encoding.c: Replace tree_low_cst (..., 1) with tree_to_uhwi
throughout.
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* objc-next-runtime-abi-02.c: Replace tree_low_cst (..., 0) with

View File

@ -820,7 +820,7 @@ encode_field (tree field_decl, int curtype, int format)
between GNU and NeXT runtimes. */
if (DECL_BIT_FIELD_TYPE (field_decl))
{
int size = tree_low_cst (DECL_SIZE (field_decl), 1);
int size = tree_to_uhwi (DECL_SIZE (field_decl));
if (flag_next_runtime)
encode_next_bitfield (size);

View File

@ -6770,11 +6770,11 @@ expand_omp_simd (struct omp_region *region, struct omp_for_data *fd)
{
safelen = OMP_CLAUSE_SAFELEN_EXPR (safelen);
if (!tree_fits_uhwi_p (safelen)
|| (unsigned HOST_WIDE_INT) tree_low_cst (safelen, 1)
|| (unsigned HOST_WIDE_INT) tree_to_uhwi (safelen)
> INT_MAX)
loop->safelen = INT_MAX;
else
loop->safelen = tree_low_cst (safelen, 1);
loop->safelen = tree_to_uhwi (safelen);
if (loop->safelen == 1)
loop->safelen = 0;
}
@ -7678,7 +7678,7 @@ expand_omp_atomic (struct omp_region *region)
HOST_WIDE_INT index;
/* Make sure the type is one of the supported sizes. */
index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
index = tree_to_uhwi (TYPE_SIZE_UNIT (type));
index = exact_log2 (index);
if (index >= 0 && index <= 4)
{

View File

@ -1560,7 +1560,7 @@ predict_loops (void)
if (tree_fits_uhwi_p (niter)
&& max
&& compare_tree_int (niter, max - 1) == -1)
nitercst = tree_low_cst (niter, 1) + 1;
nitercst = tree_to_uhwi (niter) + 1;
else
nitercst = max;
predictor = PRED_LOOP_ITERATIONS;

View File

@ -1185,7 +1185,7 @@ sdbout_one_type (tree type)
PUT_SDB_INT_VAL (int_bit_position (tem));
PUT_SDB_SCL (C_FIELD);
sdbout_type (DECL_BIT_FIELD_TYPE (tem));
PUT_SDB_SIZE (tree_low_cst (DECL_SIZE (tem), 1));
PUT_SDB_SIZE (tree_to_uhwi (DECL_SIZE (tem)));
}
else
{

View File

@ -334,7 +334,7 @@ mode_for_size_tree (const_tree size, enum mode_class mclass, int limit)
if (!tree_fits_uhwi_p (size))
return BLKmode;
uhwi = tree_low_cst (size, 1);
uhwi = tree_to_uhwi (size);
ui = uhwi;
if (uhwi != ui)
return BLKmode;
@ -485,8 +485,8 @@ mode_for_array (tree elem_type, tree size)
limit_p = true;
if (tree_fits_uhwi_p (size) && tree_fits_uhwi_p (elem_size))
{
int_size = tree_low_cst (size, 1);
int_elem_size = tree_low_cst (elem_size, 1);
int_size = tree_to_uhwi (size);
int_elem_size = tree_to_uhwi (elem_size);
if (int_elem_size > 0
&& int_size % int_elem_size == 0
&& targetm.array_mode_supported_p (TYPE_MODE (elem_type),
@ -1053,7 +1053,7 @@ excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
offset = offset % align;
return ((offset + size + align - 1) / align
> ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1)
> ((unsigned HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
/ align));
}
#endif
@ -1113,14 +1113,14 @@ place_field (record_layout_info rli, tree field)
/* Work out the known alignment so far. Note that A & (-A) is the
value of the least-significant bit in A that is one. */
if (! integer_zerop (rli->bitpos))
known_align = (tree_low_cst (rli->bitpos, 1)
& - tree_low_cst (rli->bitpos, 1));
known_align = (tree_to_uhwi (rli->bitpos)
& - tree_to_uhwi (rli->bitpos));
else if (integer_zerop (rli->offset))
known_align = 0;
else if (tree_fits_uhwi_p (rli->offset))
known_align = (BITS_PER_UNIT
* (tree_low_cst (rli->offset, 1)
& - tree_low_cst (rli->offset, 1)));
* (tree_to_uhwi (rli->offset)
& - tree_to_uhwi (rli->offset)));
else
known_align = rli->offset_align;
@ -1200,7 +1200,7 @@ place_field (record_layout_info rli, tree field)
{
unsigned int type_align = TYPE_ALIGN (type);
tree dsize = DECL_SIZE (field);
HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
HOST_WIDE_INT offset = tree_to_shwi (rli->offset);
HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
@ -1244,7 +1244,7 @@ place_field (record_layout_info rli, tree field)
{
unsigned int type_align = TYPE_ALIGN (type);
tree dsize = DECL_SIZE (field);
HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
HOST_WIDE_INT offset = tree_to_shwi (rli->offset);
HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
@ -1307,11 +1307,11 @@ place_field (record_layout_info rli, tree field)
/* We're in the middle of a run of equal type size fields; make
sure we realign if we run out of bits. (Not decl size,
type size!) */
HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 1);
HOST_WIDE_INT bitsize = tree_to_uhwi (DECL_SIZE (field));
if (rli->remaining_in_alignment < bitsize)
{
HOST_WIDE_INT typesize = tree_low_cst (TYPE_SIZE (type), 1);
HOST_WIDE_INT typesize = tree_to_uhwi (TYPE_SIZE (type));
/* out of bits; bump up to next 'word'. */
rli->bitpos
@ -1387,9 +1387,9 @@ place_field (record_layout_info rli, tree field)
&& tree_fits_uhwi_p (DECL_SIZE (field)))
{
unsigned HOST_WIDE_INT bitsize
= tree_low_cst (DECL_SIZE (field), 1);
= tree_to_uhwi (DECL_SIZE (field));
unsigned HOST_WIDE_INT typesize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (field)), 1);
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field)));
if (typesize < bitsize)
rli->remaining_in_alignment = 0;
@ -1421,14 +1421,14 @@ place_field (record_layout_info rli, tree field)
approximate this by seeing if its position changed), lay out the field
again; perhaps we can use an integral mode for it now. */
if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
& - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
actual_align = (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
& - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)));
else if (integer_zerop (DECL_FIELD_OFFSET (field)))
actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
actual_align = (BITS_PER_UNIT
* (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
& - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
* (tree_to_uhwi (DECL_FIELD_OFFSET (field))
& - tree_to_uhwi (DECL_FIELD_OFFSET (field))));
else
actual_align = DECL_OFFSET_ALIGN (field);
/* ACTUAL_ALIGN is still the actual alignment *within the record* .
@ -1764,10 +1764,10 @@ finish_bitfield_representative (tree repr, tree field)
size = size_diffop (DECL_FIELD_OFFSET (field),
DECL_FIELD_OFFSET (repr));
gcc_assert (tree_fits_uhwi_p (size));
bitsize = (tree_low_cst (size, 1) * BITS_PER_UNIT
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
- tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1)
+ tree_low_cst (DECL_SIZE (field), 1));
bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT
+ tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))
+ tree_to_uhwi (DECL_SIZE (field)));
/* Round up bitsize to multiples of BITS_PER_UNIT. */
bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
@ -1787,9 +1787,9 @@ finish_bitfield_representative (tree repr, tree field)
DECL_FIELD_OFFSET (repr));
if (tree_fits_uhwi_p (maxsize))
{
maxbitsize = (tree_low_cst (maxsize, 1) * BITS_PER_UNIT
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (nextf), 1)
- tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
+ tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf))
- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
/* If the group ends within a bitfield nextf does not need to be
aligned to BITS_PER_UNIT. Thus round up. */
maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
@ -1807,8 +1807,8 @@ finish_bitfield_representative (tree repr, tree field)
tree maxsize = size_diffop (TYPE_SIZE_UNIT (DECL_CONTEXT (field)),
DECL_FIELD_OFFSET (repr));
if (tree_fits_uhwi_p (maxsize))
maxbitsize = (tree_low_cst (maxsize, 1) * BITS_PER_UNIT
- tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
else
maxbitsize = bitsize;
}

View File

@ -1104,7 +1104,7 @@ tm_log_add (basic_block entry_block, tree addr, gimple stmt)
&& transaction_invariant_address_p (lp->addr, entry_block)
&& TYPE_SIZE_UNIT (type) != NULL
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
&& (tree_low_cst (TYPE_SIZE_UNIT (type), 1)
&& (tree_to_uhwi (TYPE_SIZE_UNIT (type))
< PARAM_VALUE (PARAM_TM_MAX_AGGREGATE_SIZE))
/* We must be able to copy this type normally. I.e., no
special constructors and the like. */
@ -1189,7 +1189,7 @@ tm_log_emit_stmt (tree addr, gimple stmt)
code = BUILT_IN_TM_LOG_LDOUBLE;
else if (tree_fits_uhwi_p (size))
{
unsigned int n = tree_low_cst (size, 1);
unsigned int n = tree_to_uhwi (size);
switch (n)
{
case 1:
@ -2107,7 +2107,7 @@ build_tm_load (location_t loc, tree lhs, tree rhs, gimple_stmt_iterator *gsi)
else if (TYPE_SIZE_UNIT (type) != NULL
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
{
switch (tree_low_cst (TYPE_SIZE_UNIT (type), 1))
switch (tree_to_uhwi (TYPE_SIZE_UNIT (type)))
{
case 1:
code = BUILT_IN_TM_LOAD_1;
@ -2179,7 +2179,7 @@ build_tm_store (location_t loc, tree lhs, tree rhs, gimple_stmt_iterator *gsi)
else if (TYPE_SIZE_UNIT (type) != NULL
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
{
switch (tree_low_cst (TYPE_SIZE_UNIT (type), 1))
switch (tree_to_uhwi (TYPE_SIZE_UNIT (type)))
{
case 1:
code = BUILT_IN_TM_STORE_1;

View File

@ -82,7 +82,7 @@ static void
init_offset_limit (void)
{
if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
else
offset_limit = -1;
offset_limit /= 2;
@ -110,7 +110,7 @@ compute_object_offset (const_tree expr, const_tree var)
t = TREE_OPERAND (expr, 1);
off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
/ BITS_PER_UNIT));
break;
@ -211,14 +211,14 @@ addr_object_size (struct object_size_info *osi, const_tree ptr,
&& DECL_P (pt_var)
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
&& (unsigned HOST_WIDE_INT)
tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
pt_var_size = DECL_SIZE_UNIT (pt_var);
else if (pt_var
&& TREE_CODE (pt_var) == STRING_CST
&& TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
&& (unsigned HOST_WIDE_INT)
tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
< offset_limit)
pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
else
@ -372,7 +372,7 @@ addr_object_size (struct object_size_info *osi, const_tree ptr,
bytes = pt_var_size;
if (tree_fits_uhwi_p (bytes))
return tree_low_cst (bytes, 1);
return tree_to_uhwi (bytes);
return unknown[object_size_type];
}
@ -436,7 +436,7 @@ alloc_object_size (const_gimple call, int object_size_type)
bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
if (bytes && tree_fits_uhwi_p (bytes))
return tree_low_cst (bytes, 1);
return tree_to_uhwi (bytes);
return unknown[object_size_type];
}
@ -799,10 +799,10 @@ plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
if (! tree_fits_uhwi_p (op1))
bytes = unknown[object_size_type];
else if (TREE_CODE (op0) == SSA_NAME)
return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
else
{
unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
/* op0 will be ADDR_EXPR here. */
bytes = addr_object_size (osi, op0, object_size_type);
@ -1231,7 +1231,7 @@ compute_object_sizes (void)
if (tree_fits_uhwi_p (ost))
{
unsigned HOST_WIDE_INT object_size_type
= tree_low_cst (ost, 1);
= tree_to_uhwi (ost);
if (object_size_type < 2)
result = fold_convert (size_type_node,

View File

@ -989,7 +989,7 @@ completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
struct access *access;
HOST_WIDE_INT size;
size = tree_low_cst (DECL_SIZE (fld), 1);
size = tree_to_uhwi (DECL_SIZE (fld));
access = create_access_1 (base, pos, size);
access->expr = nref;
access->type = ft;
@ -1008,7 +1008,7 @@ completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
static void
completely_scalarize_var (tree var)
{
HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
struct access *access;
access = create_access_1 (var, 0, size);
@ -1683,7 +1683,7 @@ build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
tr_size = TYPE_SIZE (TREE_TYPE (type));
if (!tr_size || !tree_fits_uhwi_p (tr_size))
return false;
el_size = tree_low_cst (tr_size, 1);
el_size = tree_to_uhwi (tr_size);
minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
@ -1764,7 +1764,7 @@ maybe_add_sra_candidate (tree var)
reject (var, "type size not fixed");
return false;
}
if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
{
reject (var, "type size is zero");
return false;
@ -2488,7 +2488,7 @@ analyze_all_variable_accesses (void)
if (TREE_CODE (var) == VAR_DECL
&& type_consists_of_records_p (TREE_TYPE (var)))
{
if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
if ((unsigned) tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
<= max_total_scalarization_size)
{
completely_scalarize_var (var);
@ -2802,9 +2802,9 @@ sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
&& tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
&& tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
{
chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
start_offset = access->offset
+ tree_low_cst (TREE_OPERAND (bfr, 2), 1);
+ tree_to_uhwi (TREE_OPERAND (bfr, 2));
}
else
start_offset = chunk_size = 0;
@ -3694,7 +3694,7 @@ find_param_candidates (void)
if (!COMPLETE_TYPE_P (type)
|| !tree_fits_uhwi_p (TYPE_SIZE (type))
|| tree_low_cst (TYPE_SIZE (type), 1) == 0
|| tree_to_uhwi (TYPE_SIZE (type)) == 0
|| (AGGREGATE_TYPE_P (type)
&& type_internals_preclude_sra_p (type, &msg)))
continue;
@ -4067,9 +4067,9 @@ splice_param_accesses (tree parm, bool *ro_grp)
}
if (POINTER_TYPE_P (TREE_TYPE (parm)))
agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
else
agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
if (total_size >= agg_size)
return NULL;
@ -4088,13 +4088,13 @@ decide_one_param_reduction (struct access *repr)
tree parm;
parm = repr->base;
cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
gcc_assert (cur_parm_size > 0);
if (POINTER_TYPE_P (TREE_TYPE (parm)))
{
by_ref = true;
agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
}
else
{

View File

@ -1489,7 +1489,7 @@ bit_value_assume_aligned (gimple stmt)
align = gimple_call_arg (stmt, 1);
if (!tree_fits_uhwi_p (align))
return ptrval;
aligni = tree_low_cst (align, 1);
aligni = tree_to_uhwi (align);
if (aligni <= 1
|| (aligni & (aligni - 1)) != 0)
return ptrval;
@ -1498,7 +1498,7 @@ bit_value_assume_aligned (gimple stmt)
misalign = gimple_call_arg (stmt, 2);
if (!tree_fits_uhwi_p (misalign))
return ptrval;
misaligni = tree_low_cst (misalign, 1);
misaligni = tree_to_uhwi (misalign);
if (misaligni >= aligni)
return ptrval;
}

View File

@ -1555,7 +1555,7 @@ simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
if (!tree_fits_uhwi_p (off1)
|| compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
|| compare_tree_int (len1, TREE_STRING_LENGTH (str1)
- tree_low_cst (off1, 1)) > 0
- tree_to_uhwi (off1)) > 0
|| TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
|| TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
!= TYPE_MODE (char_type_node))
@ -1599,10 +1599,10 @@ simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
/* Use maximum of difference plus memset length and memcpy length
as the new memcpy length, if it is too big, bail out. */
src_len = tree_low_cst (diff, 1);
src_len += tree_low_cst (len2, 1);
if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
src_len = tree_low_cst (len1, 1);
src_len = tree_to_uhwi (diff);
src_len += tree_to_uhwi (len2);
if (src_len < (unsigned HOST_WIDE_INT) tree_to_uhwi (len1))
src_len = tree_to_uhwi (len1);
if (src_len > 1024)
break;
@ -1628,12 +1628,12 @@ simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
src_buf = XALLOCAVEC (char, src_len + 1);
if (callee1)
memcpy (src_buf,
TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
tree_low_cst (len1, 1));
TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
tree_to_uhwi (len1));
else
src_buf[0] = tree_to_shwi (src1);
memset (src_buf + tree_low_cst (diff, 1),
tree_to_shwi (val2), tree_low_cst (len2, 1));
memset (src_buf + tree_to_uhwi (diff),
tree_to_shwi (val2), tree_to_uhwi (len2));
src_buf[src_len] = '\0';
/* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
handle embedded '\0's. */
@ -2319,8 +2319,8 @@ simplify_rotate (gimple_stmt_iterator *gsi)
/* CNT1 + CNT2 == B case above. */
if (tree_fits_uhwi_p (def_arg2[0])
&& tree_fits_uhwi_p (def_arg2[1])
&& (unsigned HOST_WIDE_INT) tree_low_cst (def_arg2[0], 1)
+ tree_low_cst (def_arg2[1], 1) == TYPE_PRECISION (rtype))
&& (unsigned HOST_WIDE_INT) tree_to_uhwi (def_arg2[0])
+ tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
rotcnt = def_arg2[0];
else if (TREE_CODE (def_arg2[0]) != SSA_NAME
|| TREE_CODE (def_arg2[1]) != SSA_NAME)

View File

@ -669,7 +669,7 @@ try_unroll_loop_completely (struct loop *loop,
from the iv test. */
if (tree_fits_uhwi_p (niter))
{
n_unroll = tree_low_cst (niter, 1);
n_unroll = tree_to_uhwi (niter);
n_unroll_found = true;
edge_to_cancel = EDGE_SUCC (exit->src, 0);
if (edge_to_cancel == exit)

View File

@ -4356,7 +4356,7 @@ iv_period (struct iv *iv)
period = build_low_bits_mask (type,
(TYPE_PRECISION (type)
- tree_low_cst (pow2div, 1)));
- tree_to_uhwi (pow2div)));
return period;
}

View File

@ -659,7 +659,7 @@ number_of_iterations_ne_max (mpz_t bnd, bool no_overflow, tree c, tree s,
if (!no_overflow)
{
max = double_int::mask (TYPE_PRECISION (type)
- tree_low_cst (num_ending_zeros (s), 1));
- tree_to_uhwi (num_ending_zeros (s)));
mpz_set_double_int (bnd, max, true);
return;
}
@ -748,7 +748,7 @@ number_of_iterations_ne (tree type, affine_iv *iv, tree final,
bits = num_ending_zeros (s);
bound = build_low_bits_mask (niter_type,
(TYPE_PRECISION (niter_type)
- tree_low_cst (bits, 1)));
- tree_to_uhwi (bits)));
d = fold_binary_to_constant (LSHIFT_EXPR, niter_type,
build_int_cst (niter_type, 1), bits);

View File

@ -1509,7 +1509,7 @@ self_reuse_distance (data_reference_p dr, unsigned *loop_sizes, unsigned n,
{
stride = TYPE_SIZE_UNIT (TREE_TYPE (ref));
if (tree_fits_uhwi_p (stride))
astride = tree_low_cst (stride, 1);
astride = tree_to_uhwi (stride);
else
astride = L1_CACHE_LINE_SIZE;

View File

@ -850,12 +850,12 @@ adjust_last_stmt (strinfo si, gimple stmt, bool is_strcat)
{
if (!tree_fits_uhwi_p (last.len)
|| integer_zerop (len)
|| (unsigned HOST_WIDE_INT) tree_low_cst (len, 1)
!= (unsigned HOST_WIDE_INT) tree_low_cst (last.len, 1) + 1)
|| (unsigned HOST_WIDE_INT) tree_to_uhwi (len)
!= (unsigned HOST_WIDE_INT) tree_to_uhwi (last.len) + 1)
return;
/* Don't adjust the length if it is divisible by 4, it is more efficient
to store the extra '\0' in that case. */
if ((((unsigned HOST_WIDE_INT) tree_low_cst (len, 1)) & 3) == 0)
if ((((unsigned HOST_WIDE_INT) tree_to_uhwi (len)) & 3) == 0)
return;
}
else if (TREE_CODE (len) == SSA_NAME)
@ -1337,7 +1337,7 @@ handle_builtin_memcpy (enum built_in_function bcode, gimple_stmt_iterator *gsi)
/* Handle memcpy (x, "abcd", 5) or
memcpy (x, "abc\0uvw", 7). */
if (!tree_fits_uhwi_p (len)
|| (unsigned HOST_WIDE_INT) tree_low_cst (len, 1)
|| (unsigned HOST_WIDE_INT) tree_to_uhwi (len)
<= (unsigned HOST_WIDE_INT) ~idx)
return;
}
@ -1627,10 +1627,10 @@ handle_pointer_plus (gimple_stmt_iterator *gsi)
{
tree off = gimple_assign_rhs2 (stmt);
if (tree_fits_uhwi_p (off)
&& (unsigned HOST_WIDE_INT) tree_low_cst (off, 1)
&& (unsigned HOST_WIDE_INT) tree_to_uhwi (off)
<= (unsigned HOST_WIDE_INT) ~idx)
ssa_ver_to_stridx[SSA_NAME_VERSION (lhs)]
= ~(~idx - (int) tree_low_cst (off, 1));
= ~(~idx - (int) tree_to_uhwi (off));
return;
}

View File

@ -174,7 +174,7 @@ va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
&& TREE_CODE (rhs1) == SSA_NAME
&& tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
{
ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
lhs = rhs1;
continue;
}
@ -184,7 +184,7 @@ va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
&& tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
{
ret += tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
continue;
}
@ -241,7 +241,7 @@ va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
&& TREE_CODE (rhs1) == SSA_NAME
&& tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
{
val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
lhs = rhs1;
continue;
}
@ -251,7 +251,7 @@ va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
&& tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
{
val -= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
continue;
}
@ -597,7 +597,7 @@ check_all_va_list_escapes (struct stdarg_info *si)
gpr_size = si->offsets[SSA_NAME_VERSION (use)]
+ tree_to_shwi (TREE_OPERAND (rhs, 1))
+ tree_low_cst (access_size, 1);
+ tree_to_uhwi (access_size);
if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
else if (gpr_size > cfun->va_list_gpr_size)

View File

@ -703,7 +703,7 @@ check_range (struct switch_conv_info *info)
return false;
}
if ((unsigned HOST_WIDE_INT) tree_low_cst (info->range_size, 1)
if ((unsigned HOST_WIDE_INT) tree_to_uhwi (info->range_size)
> ((unsigned) info->count * SWITCH_CONVERSION_BRANCH_RATIO))
{
info->reason = "the maximum range-branch ratio exceeded";
@ -805,7 +805,7 @@ create_temp_arrays (struct switch_conv_info *info)
info->target_inbound_names = info->default_values + info->phi_count;
info->target_outbound_names = info->target_inbound_names + info->phi_count;
for (i = 0; i < info->phi_count; i++)
vec_alloc (info->constructors[i], tree_low_cst (info->range_size, 1) + 1);
vec_alloc (info->constructors[i], tree_to_uhwi (info->range_size) + 1);
}
/* Free the arrays created by create_temp_arrays(). The vectors that are

View File

@ -49,7 +49,7 @@ static void expand_vector_operations_1 (gimple_stmt_iterator *);
static tree
build_replicated_const (tree type, tree inner_type, HOST_WIDE_INT value)
{
int width = tree_low_cst (TYPE_SIZE (inner_type), 1);
int width = tree_to_uhwi (TYPE_SIZE (inner_type));
int n = HOST_BITS_PER_WIDE_INT / width;
unsigned HOST_WIDE_INT low, high, mask;
tree ret;
@ -238,8 +238,8 @@ expand_vector_piecewise (gimple_stmt_iterator *gsi, elem_op_func f,
tree part_width = TYPE_SIZE (inner_type);
tree index = bitsize_int (0);
int nunits = TYPE_VECTOR_SUBPARTS (type);
int delta = tree_low_cst (part_width, 1)
/ tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
int delta = tree_to_uhwi (part_width)
/ tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
int i;
location_t loc = gimple_location (gsi_stmt (*gsi));
@ -272,7 +272,7 @@ expand_vector_parallel (gimple_stmt_iterator *gsi, elem_op_func f, tree type,
{
tree result, compute_type;
enum machine_mode mode;
int n_words = tree_low_cst (TYPE_SIZE_UNIT (type), 1) / UNITS_PER_WORD;
int n_words = tree_to_uhwi (TYPE_SIZE_UNIT (type)) / UNITS_PER_WORD;
location_t loc = gimple_location (gsi_stmt (*gsi));
/* We have three strategies. If the type is already correct, just do
@ -295,7 +295,7 @@ expand_vector_parallel (gimple_stmt_iterator *gsi, elem_op_func f, tree type,
else
{
/* Use a single scalar operation with a mode no wider than word_mode. */
mode = mode_for_size (tree_low_cst (TYPE_SIZE (type), 1), MODE_INT, 0);
mode = mode_for_size (tree_to_uhwi (TYPE_SIZE (type)), MODE_INT, 0);
compute_type = lang_hooks.types.type_for_mode (mode, 1);
result = f (gsi, compute_type, a, b, NULL_TREE, NULL_TREE, code);
warning_at (loc, OPT_Wvector_operation_performance,
@ -317,7 +317,7 @@ expand_vector_addition (gimple_stmt_iterator *gsi,
tree type, tree a, tree b, enum tree_code code)
{
int parts_per_word = UNITS_PER_WORD
/ tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (type)), 1);
/ tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type)));
if (INTEGRAL_TYPE_P (TREE_TYPE (type))
&& parts_per_word >= 4
@ -487,7 +487,7 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0,
if (!tree_fits_uhwi_p (cst2))
return NULL_TREE;
d2 = tree_low_cst (cst2, 1) & mask;
d2 = tree_to_uhwi (cst2) & mask;
if (d2 == 0)
return NULL_TREE;
this_pre_shift = floor_log2 (d2 & -d2);

View File

@ -3099,10 +3099,10 @@ vect_model_reduction_cost (stmt_vec_info stmt_info, enum tree_code reduc_code,
}
else
{
int vec_size_in_bits = tree_low_cst (TYPE_SIZE (vectype), 1);
int vec_size_in_bits = tree_to_uhwi (TYPE_SIZE (vectype));
tree bitsize =
TYPE_SIZE (TREE_TYPE (gimple_assign_lhs (orig_stmt)));
int element_bitsize = tree_low_cst (bitsize, 1);
int element_bitsize = tree_to_uhwi (bitsize);
int nelements = vec_size_in_bits / element_bitsize;
optab = optab_for_tree_code (code, vectype, optab_default);
@ -4115,8 +4115,8 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple stmt,
enum tree_code shift_code = ERROR_MARK;
bool have_whole_vector_shift = true;
int bit_offset;
int element_bitsize = tree_low_cst (bitsize, 1);
int vec_size_in_bits = tree_low_cst (TYPE_SIZE (vectype), 1);
int element_bitsize = tree_to_uhwi (bitsize);
int vec_size_in_bits = tree_to_uhwi (TYPE_SIZE (vectype));
tree vec_temp;
if (optab_handler (vec_shr_optab, mode) != CODE_FOR_nothing)
@ -4193,7 +4193,7 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple stmt,
dump_printf_loc (MSG_NOTE, vect_location,
"Reduce using scalar code.\n");
vec_size_in_bits = tree_low_cst (TYPE_SIZE (vectype), 1);
vec_size_in_bits = tree_to_uhwi (TYPE_SIZE (vectype));
FOR_EACH_VEC_ELT (new_phis, i, new_phi)
{
if (gimple_code (new_phi) == GIMPLE_PHI)

View File

@ -1636,13 +1636,13 @@ vect_recog_rotate_pattern (vec<gimple> *stmts, tree *type_in, tree *type_out)
if (TREE_CODE (def) == INTEGER_CST)
{
if (!tree_fits_uhwi_p (def)
|| (unsigned HOST_WIDE_INT) tree_low_cst (def, 1)
|| (unsigned HOST_WIDE_INT) tree_to_uhwi (def)
>= GET_MODE_PRECISION (TYPE_MODE (type))
|| integer_zerop (def))
return NULL;
def2 = build_int_cst (stype,
GET_MODE_PRECISION (TYPE_MODE (type))
- tree_low_cst (def, 1));
- tree_to_uhwi (def));
}
else
{

View File

@ -5017,13 +5017,13 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
if (TREE_CODE (name2) == SSA_NAME
&& tree_fits_uhwi_p (cst2)
&& INTEGRAL_TYPE_P (TREE_TYPE (name2))
&& IN_RANGE (tree_low_cst (cst2, 1), 1, prec - 1)
&& IN_RANGE (tree_to_uhwi (cst2), 1, prec - 1)
&& prec <= HOST_BITS_PER_DOUBLE_INT
&& prec == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (val)))
&& live_on_edge (e, name2)
&& !has_single_use (name2))
{
mask = double_int::mask (tree_low_cst (cst2, 1));
mask = double_int::mask (tree_to_uhwi (cst2));
val2 = fold_binary (LSHIFT_EXPR, TREE_TYPE (val), val, cst2);
}
}

View File

@ -2211,20 +2211,20 @@ tree_ctz (const_tree expr)
case LSHIFT_EXPR:
ret1 = tree_ctz (TREE_OPERAND (expr, 0));
if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
&& ((unsigned HOST_WIDE_INT) tree_low_cst (TREE_OPERAND (expr, 1), 1)
&& ((unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (expr, 1))
< (unsigned HOST_WIDE_INT) prec))
{
ret2 = tree_low_cst (TREE_OPERAND (expr, 1), 1);
ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
return MIN (ret1 + ret2, prec);
}
return ret1;
case RSHIFT_EXPR:
if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
&& ((unsigned HOST_WIDE_INT) tree_low_cst (TREE_OPERAND (expr, 1), 1)
&& ((unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (expr, 1))
< (unsigned HOST_WIDE_INT) prec))
{
ret1 = tree_ctz (TREE_OPERAND (expr, 0));
ret2 = tree_low_cst (TREE_OPERAND (expr, 1), 1);
ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
if (ret1 > ret2)
return ret1 - ret2;
}
@ -2675,7 +2675,7 @@ max_int_size_in_bytes (const_tree type)
size_tree = TYPE_ARRAY_MAX_SIZE (type);
if (size_tree && tree_fits_uhwi_p (size_tree))
size = tree_low_cst (size_tree, 1);
size = tree_to_uhwi (size_tree);
}
/* If we still haven't been able to get a size, see if the language
@ -2686,7 +2686,7 @@ max_int_size_in_bytes (const_tree type)
size_tree = lang_hooks.types.max_size (type);
if (size_tree && tree_fits_uhwi_p (size_tree))
size = tree_low_cst (size_tree, 1);
size = tree_to_uhwi (size_tree);
}
return size;
@ -6120,7 +6120,7 @@ find_atomic_core_type (tree type)
if (TYPE_SIZE (type) == NULL_TREE)
return NULL_TREE;
HOST_WIDE_INT type_size = tree_low_cst (TYPE_SIZE (type), 1);
HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
switch (type_size)
{
case 8:
@ -7709,7 +7709,7 @@ build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
ret = itype;
if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
ret = type_hash_canon (tree_low_cst (TYPE_MAX_VALUE (itype), 1), itype);
ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
if (precision <= MAX_INT_CACHED_PREC)
nonstandard_integer_type_cache[precision + unsignedp] = ret;
@ -8548,7 +8548,7 @@ get_narrower (tree op, int *unsignedp_ptr)
&& tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
{
unsigned HOST_WIDE_INT innerprec
= tree_low_cst (DECL_SIZE (TREE_OPERAND (op, 1)), 1);
= tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
|| TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
@ -11971,7 +11971,7 @@ get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
continue;
pos = int_bit_position (fld);
size = tree_low_cst (DECL_SIZE (fld), 1);
size = tree_to_uhwi (DECL_SIZE (fld));
if (pos <= offset && (pos + size) > offset)
break;
}

View File

@ -448,7 +448,7 @@ instrument_builtin_call (gimple_stmt_iterator *gsi)
case fetch_op:
last_arg = gimple_call_arg (stmt, num - 1);
if (!tree_fits_uhwi_p (last_arg)
|| (unsigned HOST_WIDE_INT) tree_low_cst (last_arg, 1)
|| (unsigned HOST_WIDE_INT) tree_to_uhwi (last_arg)
> MEMMODEL_SEQ_CST)
return;
gimple_call_set_fndecl (stmt, decl);
@ -520,11 +520,11 @@ instrument_builtin_call (gimple_stmt_iterator *gsi)
for (j = 0; j < 6; j++)
args[j] = gimple_call_arg (stmt, j);
if (!tree_fits_uhwi_p (args[4])
|| (unsigned HOST_WIDE_INT) tree_low_cst (args[4], 1)
|| (unsigned HOST_WIDE_INT) tree_to_uhwi (args[4])
> MEMMODEL_SEQ_CST)
return;
if (!tree_fits_uhwi_p (args[5])
|| (unsigned HOST_WIDE_INT) tree_low_cst (args[5], 1)
|| (unsigned HOST_WIDE_INT) tree_to_uhwi (args[5])
> MEMMODEL_SEQ_CST)
return;
update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],

View File

@ -234,7 +234,7 @@ static unsigned short
get_ubsan_type_info_for_type (tree type)
{
gcc_assert (TYPE_SIZE (type) && tree_fits_uhwi_p (TYPE_SIZE (type)));
int prec = exact_log2 (tree_low_cst (TYPE_SIZE (type), 1));
int prec = exact_log2 (tree_to_uhwi (TYPE_SIZE (type)));
gcc_assert (prec != -1);
return (prec << 1) | !TYPE_UNSIGNED (type);
}

View File

@ -1889,7 +1889,7 @@ assemble_noswitch_variable (tree decl, const char *name, section *sect,
{
unsigned HOST_WIDE_INT size, rounded;
size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
rounded = size;
if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
@ -1936,11 +1936,11 @@ assemble_variable_contents (tree decl, const char *name,
&& !initializer_zerop (DECL_INITIAL (decl)))
/* Output the actual data. */
output_constant (DECL_INITIAL (decl),
tree_low_cst (DECL_SIZE_UNIT (decl), 1),
tree_to_uhwi (DECL_SIZE_UNIT (decl)),
get_variable_align (decl));
else
/* Leave space for it. */
assemble_zeros (tree_low_cst (DECL_SIZE_UNIT (decl), 1));
assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
}
}
@ -2126,7 +2126,7 @@ assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED,
if (asan_protected)
{
unsigned HOST_WIDE_INT int size
= tree_low_cst (DECL_SIZE_UNIT (decl), 1);
= tree_to_uhwi (DECL_SIZE_UNIT (decl));
assemble_zeros (asan_red_zone_size (size));
}
}
@ -2717,7 +2717,7 @@ decode_addr_const (tree exp, struct addr_const *value)
else if (TREE_CODE (target) == ARRAY_REF
|| TREE_CODE (target) == ARRAY_RANGE_REF)
{
offset += (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (target)), 1)
offset += (tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (target)))
* tree_to_shwi (TREE_OPERAND (target, 1)));
target = TREE_OPERAND (target, 0);
}
@ -4876,7 +4876,7 @@ output_constructor_regular_field (oc_local_state *local)
double_int idx = tree_to_double_int (local->index)
- tree_to_double_int (local->min_index);
idx = idx.sext (prec);
fieldpos = (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (local->val)), 1)
fieldpos = (tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (local->val)))
* idx.low);
}
else if (local->field != NULL_TREE)
@ -4926,7 +4926,7 @@ output_constructor_regular_field (oc_local_state *local)
gcc_assert (!fieldsize || !DECL_CHAIN (local->field));
}
else
fieldsize = tree_low_cst (DECL_SIZE_UNIT (local->field), 1);
fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
}
else
fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
@ -4951,8 +4951,8 @@ output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset)
/* Bit size of this element. */
HOST_WIDE_INT ebitsize
= (local->field
? tree_low_cst (DECL_SIZE (local->field), 1)
: tree_low_cst (TYPE_SIZE (TREE_TYPE (local->type)), 1));
? tree_to_uhwi (DECL_SIZE (local->field))
: tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type))));
/* Relative index of this element if this is an array component. */
HOST_WIDE_INT relative_index
@ -7070,7 +7070,7 @@ place_block_symbol (rtx symbol)
{
decl = SYMBOL_REF_DECL (symbol);
alignment = get_variable_align (decl);
size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
if ((flag_sanitize & SANITIZE_ADDRESS)
&& asan_protect_global (decl))
{
@ -7236,7 +7236,7 @@ output_object_block (struct object_block *block)
HOST_WIDE_INT size;
decl = SYMBOL_REF_DECL (symbol);
assemble_variable_contents (decl, XSTR (symbol, 0), false);
size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
offset += size;
if ((flag_sanitize & SANITIZE_ADDRESS)
&& asan_protect_global (decl))