re PR fortran/15080 (Forall bounds not calculated correctly (forall_3.f90))

PR fortran/15080
	* trans-stmt.c (generate_loop_for_temp_to_lhs): Remove SIZE and COUNT2
	arguments.  If LSS is gfc_ss_terminator, increment COUNT1 by 1, instead
	of incrementing COUNT2 and using COUNT1+COUNT2 increment COUNT1 and use
	just that as index.
	(generate_loop_for_rhs_to_temp): Likewise.
	(compute_overall_iter_number): Add INNER_SIZE_BODY argument.
	It non-NULL, add it to body.
	(allocate_temp_for_forall_nest_1): New function, split from
	allocate_temp_for_forall_nest.
	(allocate_temp_for_forall_nest): Add INNER_SIZE_BODY argument,
	propagate it down to compute_overall_iter_number.  Use
	allocate_temp_for_forall_nest_1.
	(gfc_trans_assign_need_temp): Remove COUNT2.  Call
	compute_inner_temp_size into a new stmtblock_t.  Adjust calls to
	allocate_temp_for_forall_nest, generate_loop_for_rhs_to_temp
	and generate_loop_for_temp_to_lhs.
	(gfc_trans_pointer_assign_need_temp): Adjust calls to
	allocate_temp_for_forall_nest.
	(gfc_evaluate_where_mask): Call compute_inner_temp_size into a new
	stmtblock_t.  Call compute_overall_iter_number just once, then
	allocate_temp_for_forall_nest_1 twice with the same size.
	Initialize mask indexes if nested_forall_info != NULL.
	(gfc_trans_where_2): Initialize mask indexes before calling
	gfc_trans_nested_forall_loop.

	* gfortran.fortran-torture/execute/forall_3.f90: Remove comment
	about the test failing.
	* gfortran.fortran-torture/execute/where_7.f90: New test.
	* gfortran.fortran-torture/execute/where_8.f90: New test.

From-SVN: r99812
This commit is contained in:
Jakub Jelinek 2005-05-17 08:31:51 +02:00 committed by Jakub Jelinek
parent 2b5bf0e27e
commit 8de1f441bb
6 changed files with 278 additions and 92 deletions

View File

@ -1,3 +1,31 @@
2005-05-17 Jakub Jelinek <jakub@redhat.com>
PR fortran/15080
* trans-stmt.c (generate_loop_for_temp_to_lhs): Remove SIZE and COUNT2
arguments. If LSS is gfc_ss_terminator, increment COUNT1 by 1, instead
of incrementing COUNT2 and using COUNT1+COUNT2 increment COUNT1 and use
just that as index.
(generate_loop_for_rhs_to_temp): Likewise.
(compute_overall_iter_number): Add INNER_SIZE_BODY argument.
It non-NULL, add it to body.
(allocate_temp_for_forall_nest_1): New function, split from
allocate_temp_for_forall_nest.
(allocate_temp_for_forall_nest): Add INNER_SIZE_BODY argument,
propagate it down to compute_overall_iter_number. Use
allocate_temp_for_forall_nest_1.
(gfc_trans_assign_need_temp): Remove COUNT2. Call
compute_inner_temp_size into a new stmtblock_t. Adjust calls to
allocate_temp_for_forall_nest, generate_loop_for_rhs_to_temp
and generate_loop_for_temp_to_lhs.
(gfc_trans_pointer_assign_need_temp): Adjust calls to
allocate_temp_for_forall_nest.
(gfc_evaluate_where_mask): Call compute_inner_temp_size into a new
stmtblock_t. Call compute_overall_iter_number just once, then
allocate_temp_for_forall_nest_1 twice with the same size.
Initialize mask indexes if nested_forall_info != NULL.
(gfc_trans_where_2): Initialize mask indexes before calling
gfc_trans_nested_forall_loop.
2005-05-15 Feng Wang <fengwang@nudt.edu.cn>
Jerry DeLisle <jvdelisle@verizon.net>

View File

@ -1516,15 +1516,14 @@ gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
/* Generate codes to copy the temporary to the actual lhs. */
static tree
generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
tree count3, tree count1, tree count2, tree wheremask)
generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
tree count1, tree wheremask)
{
gfc_ss *lss;
gfc_se lse, rse;
stmtblock_t block, body;
gfc_loopinfo loop1;
tree tmp, tmp2;
tree index;
tree wheremaskexpr;
/* Walk the lhs. */
@ -1548,8 +1547,10 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
gfc_add_block_to_block (&block, &lse.post);
/* Increment the count1. */
tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size);
tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
gfc_index_one_node);
gfc_add_modify_expr (&block, count1, tmp);
tmp = gfc_finish_block (&block);
}
else
@ -1569,8 +1570,6 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
gfc_conv_loop_setup (&loop1);
gfc_mark_ss_chain_used (lss, 1);
/* Initialize count2. */
gfc_add_modify_expr (&block, count2, gfc_index_zero_node);
/* Start the scalarized loop body. */
gfc_start_scalarized_body (&loop1, &body);
@ -1581,11 +1580,7 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
/* Form the expression of the temporary. */
if (lss != gfc_ss_terminator)
{
index = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count1, count2);
rse.expr = gfc_build_array_ref (tmp1, index);
}
rse.expr = gfc_build_array_ref (tmp1, count1);
/* Translate expr. */
gfc_conv_expr (&lse, expr);
@ -1596,31 +1591,31 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
if (wheremask)
{
wheremaskexpr = gfc_build_array_ref (wheremask, count3);
tmp2 = TREE_CHAIN (wheremask);
while (tmp2)
{
tmp1 = gfc_build_array_ref (tmp2, count3);
wheremaskexpr = build2 (TRUTH_AND_EXPR, TREE_TYPE (tmp1),
tmp2 = TREE_CHAIN (wheremask);
while (tmp2)
{
tmp1 = gfc_build_array_ref (tmp2, count3);
wheremaskexpr = build2 (TRUTH_AND_EXPR, TREE_TYPE (tmp1),
wheremaskexpr, tmp1);
tmp2 = TREE_CHAIN (tmp2);
}
tmp = build3_v (COND_EXPR, wheremaskexpr, tmp, build_empty_stmt ());
tmp2 = TREE_CHAIN (tmp2);
}
tmp = build3_v (COND_EXPR, wheremaskexpr, tmp, build_empty_stmt ());
}
gfc_add_expr_to_block (&body, tmp);
/* Increment count2. */
/* Increment count1. */
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count2, gfc_index_one_node);
gfc_add_modify_expr (&body, count2, tmp);
count1, gfc_index_one_node);
gfc_add_modify_expr (&body, count1, tmp);
/* Increment count3. */
if (count3)
{
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
{
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count3, gfc_index_one_node);
gfc_add_modify_expr (&body, count3, tmp);
}
gfc_add_modify_expr (&body, count3, tmp);
}
/* Generate the copying loops. */
gfc_trans_scalarizing_loops (&loop1, &body);
@ -1628,9 +1623,6 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
gfc_add_block_to_block (&block, &loop1.post);
gfc_cleanup_loop (&loop1);
/* Increment count1. */
tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size);
gfc_add_modify_expr (&block, count1, tmp);
tmp = gfc_finish_block (&block);
}
return tmp;
@ -1642,15 +1634,15 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
not be freed. */
static tree
generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size,
tree count3, tree count1, tree count2,
gfc_ss *lss, gfc_ss *rss, tree wheremask)
generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
tree count1, gfc_ss *lss, gfc_ss *rss,
tree wheremask)
{
stmtblock_t block, body1;
gfc_loopinfo loop;
gfc_se lse;
gfc_se rse;
tree tmp, tmp2, index;
tree tmp, tmp2;
tree wheremaskexpr;
gfc_start_block (&block);
@ -1666,9 +1658,6 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size,
}
else
{
/* Initialize count2. */
gfc_add_modify_expr (&block, count2, gfc_index_zero_node);
/* Initialize the loop. */
gfc_init_loopinfo (&loop);
@ -1689,8 +1678,7 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size,
gfc_conv_expr (&rse, expr2);
/* Form the expression of the temporary. */
index = fold_build2 (PLUS_EXPR, gfc_array_index_type, count1, count2);
lse.expr = gfc_build_array_ref (tmp1, index);
lse.expr = gfc_build_array_ref (tmp1, count1);
}
/* Use the scalar assignment. */
@ -1702,12 +1690,12 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size,
wheremaskexpr = gfc_build_array_ref (wheremask, count3);
tmp2 = TREE_CHAIN (wheremask);
while (tmp2)
{
tmp1 = gfc_build_array_ref (tmp2, count3);
wheremaskexpr = build2 (TRUTH_AND_EXPR, TREE_TYPE (tmp1),
{
tmp1 = gfc_build_array_ref (tmp2, count3);
wheremaskexpr = build2 (TRUTH_AND_EXPR, TREE_TYPE (tmp1),
wheremaskexpr, tmp1);
tmp2 = TREE_CHAIN (tmp2);
}
tmp2 = TREE_CHAIN (tmp2);
}
tmp = build3_v (COND_EXPR, wheremaskexpr, tmp, build_empty_stmt ());
}
@ -1716,21 +1704,26 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size,
if (lss == gfc_ss_terminator)
{
gfc_add_block_to_block (&block, &body1);
/* Increment count1. */
tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
gfc_index_one_node);
gfc_add_modify_expr (&block, count1, tmp);
}
else
{
/* Increment count2. */
/* Increment count1. */
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count2, gfc_index_one_node);
gfc_add_modify_expr (&body1, count2, tmp);
count1, gfc_index_one_node);
gfc_add_modify_expr (&body1, count1, tmp);
/* Increment count3. */
if (count3)
{
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
{
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count3, gfc_index_one_node);
gfc_add_modify_expr (&body1, count3, tmp);
}
gfc_add_modify_expr (&body1, count3, tmp);
}
/* Generate the copying loops. */
gfc_trans_scalarizing_loops (&loop, &body1);
@ -1740,11 +1733,8 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size,
gfc_cleanup_loop (&loop);
/* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
as tree nodes in SS may not be valid in different scope. */
as tree nodes in SS may not be valid in different scope. */
}
/* Increment count1. */
tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size);
gfc_add_modify_expr (&block, count1, tmp);
tmp = gfc_finish_block (&block);
return tmp;
@ -1822,7 +1812,7 @@ compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
static tree
compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
stmtblock_t *block)
stmtblock_t *inner_size_body, stmtblock_t *block)
{
tree tmp, number;
stmtblock_t body;
@ -1832,6 +1822,8 @@ compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
gfc_add_modify_expr (block, number, gfc_index_zero_node);
gfc_start_block (&body);
if (inner_size_body)
gfc_add_block_to_block (&body, inner_size_body);
if (nested_forall_info)
tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
inner_size);
@ -1850,22 +1842,17 @@ compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
}
/* Allocate temporary for forall construct according to the information in
nested_forall_info. INNER_SIZE is the size of temporary needed in the
assignment inside forall. PTEMP1 is returned for space free. */
/* Allocate temporary for forall construct. SIZE is the size of temporary
needed. PTEMP1 is returned for space free. */
static tree
allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
tree inner_size, stmtblock_t * block,
tree * ptemp1)
allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
tree * ptemp1)
{
tree unit;
tree temp1;
tree tmp;
tree bytesize, size;
/* Calculate the total size of temporary needed in forall construct. */
size = compute_overall_iter_number (nested_forall_info, inner_size, block);
tree bytesize;
unit = TYPE_SIZE_UNIT (type);
bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
@ -1882,7 +1869,56 @@ allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
}
/* Handle assignments inside forall which need temporary. */
/* Allocate temporary for forall construct according to the information in
nested_forall_info. INNER_SIZE is the size of temporary needed in the
assignment inside forall. PTEMP1 is returned for space free. */
static tree
allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
tree inner_size, stmtblock_t * inner_size_body,
stmtblock_t * block, tree * ptemp1)
{
tree size;
/* Calculate the total size of temporary needed in forall construct. */
size = compute_overall_iter_number (nested_forall_info, inner_size,
inner_size_body, block);
return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
}
/* Handle assignments inside forall which need temporary.
forall (i=start:end:stride; maskexpr)
e<i> = f<i>
end forall
(where e,f<i> are arbitrary expressions possibly involving i
and there is a dependency between e<i> and f<i>)
Translates to:
masktmp(:) = maskexpr(:)
maskindex = 0;
count1 = 0;
num = 0;
for (i = start; i <= end; i += stride)
num += SIZE (f<i>)
count1 = 0;
ALLOCATE (tmp(num))
for (i = start; i <= end; i += stride)
{
if (masktmp[maskindex++])
tmp[count1++] = f<i>
}
maskindex = 0;
count1 = 0;
for (i = start; i <= end; i += stride)
{
if (masktmp[maskindex++])
e<i> = tmp[count1++]
}
DEALLOCATE (tmp)
*/
static void
gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, tree wheremask,
forall_info * nested_forall_info,
@ -1891,17 +1927,16 @@ gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, tree wheremask,
tree type;
tree inner_size;
gfc_ss *lss, *rss;
tree count, count1, count2;
tree count, count1;
tree tmp, tmp1;
tree ptemp1;
tree mask, maskindex;
forall_info *forall_tmp;
stmtblock_t inner_size_body;
/* Create vars. count1 is the current iterator number of the nested forall.
count2 is the current iterator number of the inner loops needed in the
assignment. */
/* Create vars. count1 is the current iterator number of the nested
forall. */
count1 = gfc_create_var (gfc_array_index_type, "count1");
count2 = gfc_create_var (gfc_array_index_type, "count2");
/* Count is the wheremask index. */
if (wheremask)
@ -1917,15 +1952,17 @@ gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, tree wheremask,
/* Calculate the size of temporary needed in the assignment. Return loop, lss
and rss which are used in function generate_loop_for_rhs_to_temp(). */
inner_size = compute_inner_temp_size (expr1, expr2, block, &lss, &rss);
gfc_init_block (&inner_size_body);
inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
&lss, &rss);
/* The type of LHS. Used in function allocate_temp_for_forall_nest */
type = gfc_typenode_for_spec (&expr1->ts);
/* Allocate temporary for nested forall construct according to the
information in nested_forall_info and inner_size. */
tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
inner_size, block, &ptemp1);
tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
&inner_size_body, block, &ptemp1);
/* Initialize the maskindexes. */
forall_tmp = nested_forall_info;
@ -1939,8 +1976,8 @@ gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, tree wheremask,
}
/* Generate codes to copy rhs to the temporary . */
tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, inner_size, count,
count1, count2, lss, rss, wheremask);
tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
wheremask);
/* Generate body and loops according to the information in
nested_forall_info. */
@ -1966,8 +2003,7 @@ gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, tree wheremask,
gfc_add_modify_expr (block, count, gfc_index_zero_node);
/* Generate codes to copy the temporary to lhs. */
tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, inner_size, count,
count1, count2, wheremask);
tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1, wheremask);
/* Generate body and loops according to the information in
nested_forall_info. */
@ -2020,8 +2056,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
/* Allocate temporary for nested forall construct according to the
information in nested_forall_info and inner_size. */
tmp1 = allocate_temp_for_forall_nest (nested_forall_info,
type, inner_size, block, &ptemp1);
tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
inner_size, NULL, block, &ptemp1);
gfc_start_block (&body);
gfc_init_se (&lse, NULL);
lse.expr = gfc_build_array_ref (tmp1, count);
@ -2110,7 +2146,7 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
/* Allocate temporary for nested forall construct. */
tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
inner_size, block, &ptemp1);
inner_size, NULL, block, &ptemp1);
gfc_start_block (&body);
gfc_init_se (&lse, NULL);
lse.expr = gfc_build_array_ref (tmp1, count);
@ -2201,7 +2237,7 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
end forall
(where e,f,g,h<i> are arbitrary expressions possibly involving i)
Translates to:
count = ((end + 1 - start) / staride)
count = ((end + 1 - start) / stride)
masktmp(:) = maskexpr(:)
maskindex = 0;
@ -2567,8 +2603,8 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
gfc_ss *lss, *rss;
gfc_loopinfo loop;
tree ptemp1, ntmp, ptemp2;
tree inner_size;
stmtblock_t body, body1;
tree inner_size, size;
stmtblock_t body, body1, inner_size_body;
gfc_se lse, rse;
tree count;
tree tmpexpr;
@ -2576,11 +2612,16 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
gfc_init_loopinfo (&loop);
/* Calculate the size of temporary needed by the mask-expr. */
inner_size = compute_inner_temp_size (me, me, block, &lss, &rss);
gfc_init_block (&inner_size_body);
inner_size = compute_inner_temp_size (me, me, &inner_size_body, &lss, &rss);
/* Calculate the total size of temporary needed. */
size = compute_overall_iter_number (nested_forall_info, inner_size,
&inner_size_body, block);
/* Allocate temporary for where mask. */
tmp = allocate_temp_for_forall_nest (nested_forall_info, boolean_type_node,
inner_size, block, &ptemp1);
tmp = allocate_temp_for_forall_nest_1 (boolean_type_node, size, block,
&ptemp1);
/* Record the temporary address in order to free it later. */
if (ptemp1)
{
@ -2592,8 +2633,8 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
}
/* Allocate temporary for !mask. */
ntmp = allocate_temp_for_forall_nest (nested_forall_info, boolean_type_node,
inner_size, block, &ptemp2);
ntmp = allocate_temp_for_forall_nest_1 (boolean_type_node, size, block,
&ptemp2);
/* Record the temporary in order to free it later. */
if (ptemp2)
{
@ -2676,8 +2717,22 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
tmp1 = gfc_finish_block (&body);
/* If the WHERE construct is inside FORALL, fill the full temporary. */
if (nested_forall_info != NULL)
tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1, 1);
{
forall_info *forall_tmp;
tree maskindex;
/* Initialize the maskindexes. */
forall_tmp = nested_forall_info;
while (forall_tmp != NULL)
{
maskindex = forall_tmp->maskindex;
if (forall_tmp->mask)
gfc_add_modify_expr (block, maskindex, gfc_index_zero_node);
forall_tmp = forall_tmp->next_nest;
}
tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1, 1);
}
gfc_add_expr_to_block (block, tmp1);
@ -2998,6 +3053,9 @@ gfc_trans_where_2 (gfc_code * code, tree mask, tree pmask,
nested_forall_info, block);
else
{
forall_info *forall_tmp;
tree maskindex;
/* Variables to control maskexpr. */
count1 = gfc_create_var (gfc_array_index_type, "count1");
count2 = gfc_create_var (gfc_array_index_type, "count2");
@ -3006,6 +3064,18 @@ gfc_trans_where_2 (gfc_code * code, tree mask, tree pmask,
tmp = gfc_trans_where_assign (expr1, expr2, mask, count1,
count2);
/* Initialize the maskindexes. */
forall_tmp = nested_forall_info;
while (forall_tmp != NULL)
{
maskindex = forall_tmp->maskindex;
if (forall_tmp->mask)
gfc_add_modify_expr (block, maskindex,
gfc_index_zero_node);
forall_tmp = forall_tmp->next_nest;
}
tmp = gfc_trans_nested_forall_loop (nested_forall_info,
tmp, 1, 1);
gfc_add_expr_to_block (block, tmp);

View File

@ -1,3 +1,11 @@
2005-05-17 Jakub Jelinek <jakub@redhat.com>
PR fortran/15080
* gfortran.fortran-torture/execute/forall_3.f90: Remove comment
about the test failing.
* gfortran.fortran-torture/execute/where_7.f90: New test.
* gfortran.fortran-torture/execute/where_8.f90: New test.
2005-05-16 Richard Henderson <rth@redhat.com>
* lib/target-supports.exp (check_effective_target_vect_int_mul): Add

View File

@ -1,6 +1,5 @@
! PR fortran/15080
! Really test forall with temporary
! This test fails (2004-06-28). See PR15080. I'd XFAIL it,
! but there doesn't seem to be an easy way to do this for torture tests.
program evil_forall
implicit none
type t

View File

@ -0,0 +1,53 @@
! Really test where inside forall with temporary
program evil_where
implicit none
type t
logical valid
integer :: s
integer, dimension(:), pointer :: p
end type
type (t), dimension (5) :: v
integer i
allocate (v(1)%p(2))
allocate (v(2)%p(8))
v(3)%p => NULL()
allocate (v(4)%p(8))
allocate (v(5)%p(2))
v(:)%valid = (/.true., .true., .false., .true., .true./)
v(:)%s = (/1, 8, 999, 6, 2/)
v(1)%p(:) = (/9, 10/)
v(2)%p(:) = (/1, 2, 3, 4, 5, 6, 7, 8/)
v(4)%p(:) = (/13, 14, 15, 16, 17, 18, 19, 20/)
v(5)%p(:) = (/11, 12/)
forall (i=1:5,v(i)%valid)
where (v(i)%p(1:v(i)%s).gt.4)
v(i)%p(1:v(i)%s) = v(6-i)%p(1:v(i)%s)
end where
end forall
if (any(v(1)%p(:) .ne. (/11, 10/))) call abort
if (any(v(2)%p(:) .ne. (/1, 2, 3, 4, 17, 18, 19, 20/))) call abort
if (any(v(4)%p(:) .ne. (/1, 2, 3, 4, 5, 6, 19, 20/))) call abort
if (any(v(5)%p(:) .ne. (/9, 10/))) call abort
v(1)%p(:) = (/9, 10/)
v(2)%p(:) = (/1, 2, 3, 4, 5, 6, 7, 8/)
v(4)%p(:) = (/13, 14, 15, 16, 17, 18, 19, 20/)
v(5)%p(:) = (/11, 12/)
forall (i=1:5,v(i)%valid)
where (v(i)%p(1:v(i)%s).le.4)
v(i)%p(1:v(i)%s) = v(6-i)%p(1:v(i)%s)
end where
end forall
if (any(v(1)%p(:) .ne. (/9, 10/))) call abort
if (any(v(2)%p(:) .ne. (/13, 14, 15, 16, 5, 6, 7, 8/))) call abort
if (any(v(4)%p(:) .ne. (/13, 14, 15, 16, 17, 18, 19, 20/))) call abort
if (any(v(5)%p(:) .ne. (/11, 12/))) call abort
! I should really free the memory I've allocated.
end program

View File

@ -0,0 +1,28 @@
program where_8
implicit none
type t
logical valid
integer :: s
integer, dimension(8) :: p
end type
type (t), dimension (5) :: v
integer i
v(:)%valid = (/.true., .true., .false., .true., .true./)
v(:)%s = (/1, 8, 999, 6, 2/)
v(1)%p(:) = (/9, 10, 0, 0, 0, 0, 0, 0/)
v(2)%p(:) = (/1, 2, 3, 4, 5, 6, 7, 8/)
v(4)%p(:) = (/13, 14, 15, 16, 17, 18, 19, 20/)
v(5)%p(:) = (/11, 12, 0, 0, 0, 0, 0, 0/)
forall (i=1:5,v(i)%valid)
where (v(i)%p(1:v(i)%s).gt.4)
v(i)%p(1:v(i)%s) = 21
end where
end forall
if (any(v(1)%p(:) .ne. (/21, 10, 0, 0, 0, 0, 0, 0/))) call abort
if (any(v(2)%p(:) .ne. (/1, 2, 3, 4, 21, 21, 21, 21/))) call abort
if (any(v(4)%p(:) .ne. (/21, 21, 21, 21, 21, 21, 19, 20/))) call abort
if (any(v(5)%p(:) .ne. (/21, 21, 0, 0, 0, 0, 0, 0/))) call abort
end program