https://gcc.gnu.org/g:b8fb22c3271fc672935476a3885b9a0a613b2ddb
commit b8fb22c3271fc672935476a3885b9a0a613b2ddb Author: Mikael Morin <mik...@gcc.gnu.org> Date: Wed Jul 23 17:07:24 2025 +0200 Extraction gfc_conv_remap_descriptor Diff: --- gcc/fortran/trans-descriptor.cc | 104 +++++++++++++++++++++++++++++++++++++ gcc/fortran/trans-descriptor.h | 3 +- gcc/fortran/trans-expr.cc | 111 +--------------------------------------- 3 files changed, 108 insertions(+), 110 deletions(-) diff --git a/gcc/fortran/trans-descriptor.cc b/gcc/fortran/trans-descriptor.cc index 61752f087b59..e72720967e6d 100644 --- a/gcc/fortran/trans-descriptor.cc +++ b/gcc/fortran/trans-descriptor.cc @@ -1047,3 +1047,107 @@ gfc_copy_sequence_descriptor (stmtblock_t *block, tree dest, tree src, int rank) gfc_conv_descriptor_offset_set (block, dest, gfc_index_zero_node); } + +void +gfc_conv_remap_descriptor (stmtblock_t *block, tree dest, int dest_rank, + tree src, int src_rank, gfc_array_ref *ar) +{ + /* Set dtype. */ + gfc_conv_descriptor_dtype_set (block, dest, + gfc_get_dtype (TREE_TYPE (dest))); + + /* Copy data pointer. */ + gfc_conv_descriptor_data_set (block, dest, + gfc_conv_descriptor_data_get (src)); + + /* Copy the span. */ + tree span; + if (VAR_P (src) + && GFC_DECL_PTR_ARRAY_P (src)) + span = gfc_conv_descriptor_span_get (src); + else + { + tree tmp = TREE_TYPE (src); + tmp = TYPE_SIZE_UNIT (gfc_get_element_type (tmp)); + span = fold_convert (gfc_array_index_type, tmp); + } + gfc_conv_descriptor_span_set (block, dest, span); + + /* Copy offset but adjust it such that it would correspond + to a lbound of zero. */ + if (src_rank == -1) + gfc_conv_descriptor_offset_set (block, dest, + gfc_index_zero_node); + else + { + tree offs = gfc_conv_descriptor_offset_get (src); + for (int dim = 0; dim < src_rank; ++dim) + { + tree stride = gfc_conv_descriptor_stride_get (src, gfc_rank_cst[dim]); + tree lbound = gfc_conv_descriptor_lbound_get (src, gfc_rank_cst[dim]); + tree tmp = fold_build2_loc (input_location, MULT_EXPR, + gfc_array_index_type, stride, lbound); + offs = fold_build2_loc (input_location, PLUS_EXPR, + gfc_array_index_type, offs, tmp); + } + gfc_conv_descriptor_offset_set (block, dest, offs); + } + + /* Set the bounds as declared for the LHS and calculate strides as + well as another offset update accordingly. */ + tree stride = gfc_conv_descriptor_stride_get (src, gfc_rank_cst[0]); + for (int dim = 0; dim < dest_rank; ++dim) + { + gfc_se lower_se; + gfc_se upper_se; + + gcc_assert (ar->start[dim] && ar->end[dim]); + + if (ar->start[dim]->expr_type != EXPR_CONSTANT + || ar->start[dim]->expr_type != EXPR_VARIABLE) + gfc_resolve_expr (ar->start[dim]); + if (ar->end[dim]->expr_type != EXPR_CONSTANT + || ar->end[dim]->expr_type != EXPR_VARIABLE) + gfc_resolve_expr (ar->end[dim]); + + /* Convert declared bounds. */ + gfc_init_se (&lower_se, NULL); + gfc_init_se (&upper_se, NULL); + gfc_conv_expr (&lower_se, ar->start[dim]); + gfc_conv_expr (&upper_se, ar->end[dim]); + + gfc_add_block_to_block (block, &lower_se.pre); + gfc_add_block_to_block (block, &upper_se.pre); + + tree lbound = fold_convert (gfc_array_index_type, lower_se.expr); + tree ubound = fold_convert (gfc_array_index_type, upper_se.expr); + + lbound = gfc_evaluate_now (lbound, block); + ubound = gfc_evaluate_now (ubound, block); + + gfc_add_block_to_block (block, &lower_se.post); + gfc_add_block_to_block (block, &upper_se.post); + + /* Set bounds in descriptor. */ + gfc_conv_descriptor_lbound_set (block, dest, gfc_rank_cst[dim], lbound); + gfc_conv_descriptor_ubound_set (block, dest, gfc_rank_cst[dim], ubound); + + /* Set stride. */ + stride = gfc_evaluate_now (stride, block); + gfc_conv_descriptor_stride_set (block, dest, gfc_rank_cst[dim], stride); + + /* Update offset. */ + tree offs = gfc_conv_descriptor_offset_get (dest); + tree tmp = fold_build2_loc (input_location, MULT_EXPR, + gfc_array_index_type, lbound, stride); + offs = fold_build2_loc (input_location, MINUS_EXPR, + gfc_array_index_type, offs, tmp); + offs = gfc_evaluate_now (offs, block); + gfc_conv_descriptor_offset_set (block, dest, offs); + + /* Update stride. */ + tmp = gfc_conv_array_extent_dim (lbound, ubound, NULL); + stride = fold_build2_loc (input_location, MULT_EXPR, + gfc_array_index_type, stride, tmp); + } +} diff --git a/gcc/fortran/trans-descriptor.h b/gcc/fortran/trans-descriptor.h index ac7960589abb..955778a3f412 100644 --- a/gcc/fortran/trans-descriptor.h +++ b/gcc/fortran/trans-descriptor.h @@ -107,6 +107,7 @@ void gfc_conv_shift_descriptor (stmtblock_t *, tree, const gfc_array_ref &); /* Build a null array descriptor constructor. */ void gfc_nullify_descriptor (stmtblock_t *block, tree); void gfc_copy_sequence_descriptor (stmtblock_t *, tree, tree, int); - +void gfc_conv_remap_descriptor (stmtblock_t *, tree, int, tree, int, + gfc_array_ref *); #endif /* GFC_TRANS_DESCRIPTOR_H */ diff --git a/gcc/fortran/trans-expr.cc b/gcc/fortran/trans-expr.cc index 26a1ded0d268..38b67b5e6a7f 100644 --- a/gcc/fortran/trans-expr.cc +++ b/gcc/fortran/trans-expr.cc @@ -11210,7 +11210,6 @@ gfc_trans_pointer_assignment (gfc_expr * expr1, gfc_expr * expr2) /* If we do bounds remapping, update LHS descriptor accordingly. */ if (remap) { - int dim; gcc_assert (remap->u.ar.dimen == expr1->rank); if (rank_remap) @@ -11218,114 +11217,8 @@ gfc_trans_pointer_assignment (gfc_expr * expr1, gfc_expr * expr2) /* Do rank remapping. We already have the RHS's descriptor converted in rse and now have to build the correct LHS descriptor for it. */ - - tree data, span; - tree offs, stride; - tree lbound, ubound; - - /* Set dtype. */ - gfc_conv_descriptor_dtype_set (&block, desc, - gfc_get_dtype (TREE_TYPE (desc))); - - /* Copy data pointer. */ - data = gfc_conv_descriptor_data_get (rse.expr); - gfc_conv_descriptor_data_set (&block, desc, data); - - /* Copy the span. */ - if (VAR_P (rse.expr) - && GFC_DECL_PTR_ARRAY_P (rse.expr)) - span = gfc_conv_descriptor_span_get (rse.expr); - else - { - tmp = TREE_TYPE (rse.expr); - tmp = TYPE_SIZE_UNIT (gfc_get_element_type (tmp)); - span = fold_convert (gfc_array_index_type, tmp); - } - gfc_conv_descriptor_span_set (&block, desc, span); - - /* Copy offset but adjust it such that it would correspond - to a lbound of zero. */ - if (expr2->rank == -1) - gfc_conv_descriptor_offset_set (&block, desc, - gfc_index_zero_node); - else - { - offs = gfc_conv_descriptor_offset_get (rse.expr); - for (dim = 0; dim < expr2->rank; ++dim) - { - stride = gfc_conv_descriptor_stride_get (rse.expr, - gfc_rank_cst[dim]); - lbound = gfc_conv_descriptor_lbound_get (rse.expr, - gfc_rank_cst[dim]); - tmp = fold_build2_loc (input_location, MULT_EXPR, - gfc_array_index_type, stride, - lbound); - offs = fold_build2_loc (input_location, PLUS_EXPR, - gfc_array_index_type, offs, tmp); - } - gfc_conv_descriptor_offset_set (&block, desc, offs); - } - /* Set the bounds as declared for the LHS and calculate strides as - well as another offset update accordingly. */ - stride = gfc_conv_descriptor_stride_get (rse.expr, - gfc_rank_cst[0]); - for (dim = 0; dim < expr1->rank; ++dim) - { - gfc_se lower_se; - gfc_se upper_se; - - gcc_assert (remap->u.ar.start[dim] && remap->u.ar.end[dim]); - - if (remap->u.ar.start[dim]->expr_type != EXPR_CONSTANT - || remap->u.ar.start[dim]->expr_type != EXPR_VARIABLE) - gfc_resolve_expr (remap->u.ar.start[dim]); - if (remap->u.ar.end[dim]->expr_type != EXPR_CONSTANT - || remap->u.ar.end[dim]->expr_type != EXPR_VARIABLE) - gfc_resolve_expr (remap->u.ar.end[dim]); - - /* Convert declared bounds. */ - gfc_init_se (&lower_se, NULL); - gfc_init_se (&upper_se, NULL); - gfc_conv_expr (&lower_se, remap->u.ar.start[dim]); - gfc_conv_expr (&upper_se, remap->u.ar.end[dim]); - - gfc_add_block_to_block (&block, &lower_se.pre); - gfc_add_block_to_block (&block, &upper_se.pre); - - lbound = fold_convert (gfc_array_index_type, lower_se.expr); - ubound = fold_convert (gfc_array_index_type, upper_se.expr); - - lbound = gfc_evaluate_now (lbound, &block); - ubound = gfc_evaluate_now (ubound, &block); - - gfc_add_block_to_block (&block, &lower_se.post); - gfc_add_block_to_block (&block, &upper_se.post); - - /* Set bounds in descriptor. */ - gfc_conv_descriptor_lbound_set (&block, desc, - gfc_rank_cst[dim], lbound); - gfc_conv_descriptor_ubound_set (&block, desc, - gfc_rank_cst[dim], ubound); - - /* Set stride. */ - stride = gfc_evaluate_now (stride, &block); - gfc_conv_descriptor_stride_set (&block, desc, - gfc_rank_cst[dim], stride); - - /* Update offset. */ - offs = gfc_conv_descriptor_offset_get (desc); - tmp = fold_build2_loc (input_location, MULT_EXPR, - gfc_array_index_type, lbound, stride); - offs = fold_build2_loc (input_location, MINUS_EXPR, - gfc_array_index_type, offs, tmp); - offs = gfc_evaluate_now (offs, &block); - gfc_conv_descriptor_offset_set (&block, desc, offs); - - /* Update stride. */ - tmp = gfc_conv_array_extent_dim (lbound, ubound, NULL); - stride = fold_build2_loc (input_location, MULT_EXPR, - gfc_array_index_type, stride, tmp); - } + gfc_conv_remap_descriptor (&block, desc, expr1->rank, + rse.expr, expr2->rank, &remap->u.ar); } else /* Bounds remapping. Just shift the lower bounds. */