From 15b19a7dbf433b1936eecd042ce7dedbb23220f9 Mon Sep 17 00:00:00 2001 From: Olivier Hainque Date: Wed, 16 Apr 2003 23:30:35 +0200 Subject: [PATCH] expr.c (store_field): Force usage of bitfield instructions when the field position requires it... * expr.c (store_field): Force usage of bitfield instructions when the field position requires it, whatever SLOW_UNALIGNED_ACCESS. (expand_expr, case BIT_FIELD_REF): likewise. From-SVN: r65701 --- gcc/ChangeLog | 6 ++++++ gcc/expr.c | 10 +++++----- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index f95d7026759..bb692cf621d 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,9 @@ +2003-04-16 Olivier Hainque + + * expr.c (store_field): Force usage of bitfield instructions when + the field position requires it, whatever SLOW_UNALIGNED_ACCESS. + (expand_expr, case BIT_FIELD_REF): likewise. + 2003-04-16 Mark Mitchell PR middle-end/8866 diff --git a/gcc/expr.c b/gcc/expr.c index d2034241333..abf0875c849 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -5564,8 +5564,9 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type, || GET_CODE (target) == SUBREG /* If the field isn't aligned enough to store as an ordinary memref, store it as a bit field. */ - || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)) - && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode) + || (mode != BLKmode + && ((SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)) + && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))) || bitpos % GET_MODE_ALIGNMENT (mode))) /* If the RHS and field are a constant size and the size of the RHS isn't the same size as the bitfield, we must use bitfield @@ -7504,9 +7505,8 @@ expand_expr (exp, target, tmode, modifier) /* If the field isn't aligned enough to fetch as a memref, fetch it as a bit field. */ || (mode1 != BLKmode - && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)) - && ((TYPE_ALIGN (TREE_TYPE (tem)) - < GET_MODE_ALIGNMENT (mode)) + && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode) + && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))) || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))) /* If the type and the field are a constant size and the size of the type isn't the same size as the bitfield,