https://gcc.gnu.org/g:85811069bfb1d451fa5956a37868c4b8aeeae259
commit r16-5760-g85811069bfb1d451fa5956a37868c4b8aeeae259 Author: Jose E. Marchesi <[email protected]> Date: Sat Oct 11 19:54:10 2025 +0200 a68: low: units and coercions Signed-off-by: Jose E. Marchesi <[email protected]> gcc/ChangeLog * algol68/a68-low-coercions.cc: New file. * algol68/a68-low-generator.cc: Likewise. * algol68/a68-low-units.cc: Likewise. Diff: --- gcc/algol68/a68-low-coercions.cc | 471 ++++++++++++++ gcc/algol68/a68-low-generator.cc | 533 ++++++++++++++++ gcc/algol68/a68-low-units.cc | 1253 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 2257 insertions(+) diff --git a/gcc/algol68/a68-low-coercions.cc b/gcc/algol68/a68-low-coercions.cc new file mode 100644 index 000000000000..b9e1acee9ce8 --- /dev/null +++ b/gcc/algol68/a68-low-coercions.cc @@ -0,0 +1,471 @@ +/* Lower Algol 68 coercions to GENERIC. + Copyright (C) 2025 Jose E. Marchesi. + + Written by Jose E. Marchesi. + + GCC is free software; you can redistribute it and/or modify it + under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3, or (at your option) + any later version. + + GCC is distributed in the hope that it will be useful, but WITHOUT + ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public + License for more details. + + You should have received a copy of the GNU General Public License + along with GCC; see the file COPYING3. If not see + <http://www.gnu.org/licenses/>. */ + +#define INCLUDE_MEMORY +#include "config.h" +#include "system.h" +#include "coretypes.h" + +#include "tree.h" +#include "fold-const.h" +#include "diagnostic.h" +#include "langhooks.h" +#include "tm.h" +#include "function.h" +#include "cgraph.h" +#include "toplev.h" +#include "varasm.h" +#include "predict.h" +#include "stor-layout.h" +#include "tree-iterator.h" +#include "stringpool.h" +#include "print-tree.h" +#include "gimplify.h" +#include "dumpfile.h" +#include "convert.h" + +#include "a68.h" + +/* Lower a dereferencing coercion. */ +tree +a68_lower_dereferencing (NODE_T *p, LOW_CTX_T ctx) +{ + return a68_low_deref (a68_lower_tree (SUB (p), ctx), SUB (p)); +} + +/* Lower an uniting coercion. */ + +tree +a68_lower_uniting (NODE_T *p, LOW_CTX_T ctx) +{ + tree coercend_tree = a68_lower_tree (SUB (p), ctx); + + if (MOID (p) == M_ROWS) + { + /* ROWS is a mode to which any ROW mode can be strongly coerced. It is + used as the mode of the second operand of the ELEMS, LWB and UPB + operators. The coercion is expressed in the parse tree via uniting. + This results in replacing the multiple with a "rows" value that + contains dimension and bounds information. */ + if (A68_ROW_TYPE_P (TREE_TYPE (coercend_tree))) + return a68_rows_value (coercend_tree); + else if (A68_UNION_TYPE_P (TREE_TYPE (coercend_tree))) + { + /* coercend_tree is expanded more than once below. */ + coercend_tree = save_expr (coercend_tree); + + /* Union of row modes. We should create a rows value for the currently + selected value. */ + a68_push_range (M_ROWS); + tree done_label = build_decl (UNKNOWN_LOCATION, + LABEL_DECL, + get_identifier ("done_label%"), + void_type_node); + DECL_CONTEXT (done_label) = a68_range_context (); + a68_add_decl (done_label); + a68_add_decl_expr (fold_build1 (DECL_EXPR, TREE_TYPE (done_label), done_label)); + tree rows = a68_lower_tmpvar ("rows%", CTYPE (M_ROWS), + a68_get_skip_tree (M_ROWS)); + tree coercend_overhead = a68_union_overhead (coercend_tree); + tree overhead = a68_lower_tmpvar ("overhead%", TREE_TYPE (coercend_overhead), + coercend_overhead); + int field_index = 0; + for (tree field = TYPE_FIELDS (TREE_TYPE (a68_union_cunion (coercend_tree))); + field; + field = DECL_CHAIN (field)) + { + a68_push_range (M_VOID); + { + /* Set rows% to the rows value computed from coercend_tree.FIELD, + which is of some multiple type. */ + a68_add_stmt (fold_build2 (MODIFY_EXPR, CTYPE (M_ROWS), + rows, + a68_rows_value (a68_union_alternative (coercend_tree, + field_index)))); + a68_add_stmt (fold_build1 (GOTO_EXPR, void_type_node, done_label)); + a68_add_stmt (a68_get_skip_tree (M_VOID)); + } + tree process_entry = a68_pop_range (); + + /* IF overhead = field_index THEN rows% = rows_from_multiple FI */ + a68_add_stmt (fold_build3 (COND_EXPR, + a68_void_type, + fold_build2 (EQ_EXPR, + TREE_TYPE (overhead), + overhead, + build_int_cst (TREE_TYPE (overhead), field_index)), + process_entry, + a68_get_skip_tree (M_VOID))); + field_index += 1; + } + + /* This should not be reached. Emit run-time error. */ + { + unsigned int lineno = NUMBER (LINE (INFO (p))); + const char *filename_str = FILENAME (LINE (INFO (p))); + tree filename = build_string_literal (strlen (filename_str) + 1, + filename_str); + tree call = a68_build_libcall (A68_LIBCALL_UNREACHABLE, + void_type_node, 2, + filename, + build_int_cst (unsigned_type_node, lineno)); + a68_add_stmt (call); + } + + a68_add_stmt (build1 (LABEL_EXPR, void_type_node, done_label)); + a68_add_stmt (rows); + return a68_pop_range (); + } + else + { + debug_tree (TREE_TYPE (coercend_tree)); + gcc_assert (A68_ROWS_TYPE_P (TREE_TYPE (coercend_tree))); + return coercend_tree; + } + } + else if (IS_UNION (MOID (SUB (p)))) + { + /* We have to extract the value of the coercend union. */ + a68_push_range (MOID (p)); + { + MOID_T *coercend_mode = MOID (SUB (p)); + MOID_T *coercee_mode = MOID (p); + + /* Temporaries for the coercend's components. */ + tree coercend = a68_lower_tmpvar ("coercend%", TREE_TYPE (coercend_tree), coercend_tree); + tree cval = a68_union_cunion (coercend); + tree coverhead = a68_union_overhead (coercend); + tree coercend_value = a68_lower_tmpvar ("coercend_value%", TREE_TYPE (cval), cval); + tree coercend_overhead = a68_lower_tmpvar ("coercend_overhead%", sizetype, coverhead); + + /* Create the coercee. */ + tree coercee = a68_lower_tmpvar ("coercee%", + CTYPE (MOID (p)), + a68_get_skip_tree (MOID (p))); + tree coercee_value = a68_union_cunion (coercee); + + /* First translate overhead. This is crude, but it works. */ + int idx = 0; + tree coercee_overhead = size_zero_node; + while (EQUIVALENT (coercend_mode) != NO_MOID) + coercend_mode = EQUIVALENT (coercend_mode); + for (PACK_T *pack = PACK (coercend_mode); pack != NO_PACK; FORWARD (pack)) + { + coercee_overhead = fold_build3 (COND_EXPR, + sizetype, + fold_build2 (EQ_EXPR, + sizetype, + coercend_overhead, + size_int (idx)), + size_int (a68_united_mode_index (coercee_mode, MOID (pack))), + coercee_overhead); + idx++; + } + a68_add_stmt (a68_union_set_overhead (coercee, coercee_overhead)); + + /* Now copy over the value. This of course relies on the fact the + value of the coercend is smaller or of the same size than the value + of the built union. */ + a68_add_stmt (a68_lower_memcpy (fold_build1 (ADDR_EXPR, + build_pointer_type (TREE_TYPE (coercee_value)), + coercee_value), + fold_build1 (ADDR_EXPR, + build_pointer_type (TREE_TYPE (coercend_value)), + coercend_value), + size_in_bytes (TREE_TYPE (coercend_value)))); + a68_add_stmt (coercee); + } + return a68_pop_range (); + } + else + { + /* Produce a united mode one of whose component modes is the mode of the + coercend. */ + return a68_union_value (MOID (p), coercend_tree, MOID (SUB (p))); + } +} + +/* Lower a rowing coercion. */ + +tree +a68_lower_rowing (NODE_T *p, LOW_CTX_T ctx) +{ + MOID_T *mode = MOID (p); + bool did_deref = false; + + /* If the primary is a REF, we need to dereference it to get the referred + value. */ + tree primary = NULL_TREE; + tree orig_primary = NULL_TREE; + MOID_T *target_mode = NO_MOID; + if (IS_REF (mode)) + { + gcc_assert (IS_REF (MOID (SUB (p)))); + did_deref = true; + target_mode = SUB (mode); + + a68_push_range (mode); + /* Note that we have to consolidate because we need a pointer to compare + to NIL below. */ + orig_primary = a68_lower_tmpvar ("orig_primary%", + CTYPE (MOID (SUB (p))), + a68_consolidate_ref (MOID (SUB (p)), + a68_lower_tree (SUB (p), ctx))); + primary = a68_low_deref (orig_primary, SUB (p)); + } + else + { + target_mode = mode; + primary = a68_lower_tree (SUB (p), ctx); + /* The primary gets expanded more than once below. */ + primary = save_expr (primary); + } + + /* Perform the rowing in the primary. */ + tree ssize_one_node = fold_convert (ssizetype, size_one_node); + tree rowed_primary = NULL_TREE; + if (DIM (DEFLEX (target_mode)) >= 2) + { + /* []A -> [,]A */ + + /* First determine the number of dimensions of the resulting + multiple. */ + tree primary_dimensions = a68_multiple_dimensions (primary); + gcc_assert (TREE_CODE (primary_dimensions) == INTEGER_CST); + int dim = tree_to_shwi (primary_dimensions) + 1; + + /* Compute bounds. */ + tree *lower_bounds = (tree *) xmalloc (sizeof (tree) * dim); + tree *upper_bounds = (tree *) xmalloc (sizeof (tree) * dim); + + lower_bounds[0] = ssize_one_node; + upper_bounds[0] = ssize_one_node; + for (int d = 1; d < dim; ++d) + { + lower_bounds[d] = a68_multiple_lower_bound (primary, ssize_int (d - 1)); + upper_bounds[d] = a68_multiple_upper_bound (primary, ssize_int (d - 1)); + } + + rowed_primary = a68_row_value (CTYPE (target_mode), dim, + a68_multiple_elements (primary), + a68_multiple_elements_size (primary), + lower_bounds, upper_bounds); + free (lower_bounds); + free (upper_bounds); + } + else + { + /* A -> []A */ + tree row_type = CTYPE (target_mode); + tree lower_bound = ssize_one_node; + tree upper_bound = ssize_one_node; + tree elements = (did_deref + ? orig_primary + : fold_build1 (ADDR_EXPR, + build_pointer_type (TREE_TYPE (primary)), + build_constructor_va (build_array_type (TREE_TYPE (primary), + build_index_type (size_zero_node)), + 1, size_zero_node, primary))); + tree elements_type = a68_row_elements_type (row_type); + tree elements_size = size_in_bytes (elements_type); + rowed_primary = a68_row_value (row_type, 1, + elements, elements_size, + &lower_bound, &upper_bound); + } + + /* Build a ref if we rowed a ref. */ + if (did_deref) + { + tree pointer_type = build_pointer_type (TREE_TYPE (rowed_primary)); + rowed_primary = fold_build1 (ADDR_EXPR, pointer_type, rowed_primary); + /* Rowing NIL yields NIL. */ + rowed_primary = fold_build3_loc (a68_get_node_location (p), + COND_EXPR, + pointer_type, + fold_build2 (EQ_EXPR, + pointer_type, + fold_convert (pointer_type, orig_primary), + build_int_cst (pointer_type, 0)), + build_int_cst (pointer_type, 0), + rowed_primary); + a68_add_stmt (rowed_primary); + rowed_primary = a68_pop_range (); + } + + return rowed_primary; +} + +/* Lower a widening coercion. + + Widening allows the following conversions of mode: + + LONGSETY INT to LONGSETY REAL + LONGSETY REAL to LONGSETY COMPL + LONGSETY BITS to []BOOL + LONGSETY BYTES to []CHAR */ + +tree +a68_lower_widening (NODE_T *p, LOW_CTX_T ctx) +{ + if (MOID (p) == M_REAL + || MOID (p) == M_LONG_REAL + || MOID (p) == M_LONG_LONG_REAL) + { + return convert_to_real (CTYPE (MOID (p)), a68_lower_tree (SUB (p), ctx)); + } + if (MOID (p) == M_COMPLEX + || MOID (p) == M_LONG_COMPLEX + || MOID (p) == M_LONG_LONG_COMPLEX) + { + return a68_complex_widen_from_real (MOID (p), + a68_lower_tree (SUB (p), ctx)); + } + else if (MOID (p) == M_ROW_BOOL) + { + /* Widen a LONGSETY BITS to a row of BOOLs. */ + tree coercend = a68_lower_tree (SUB (p), ctx); + tree coercend_type = TREE_TYPE (coercend); + HOST_WIDE_INT bits_size = int_size_in_bytes (coercend_type); + gcc_assert (bits_size != -1); + bits_size = bits_size * 8; + + tree pointer_to_bool_type = build_pointer_type (a68_bool_type); + a68_push_range (M_ROW_BOOL); + /* First allocate space for the elements. */ + tree elements = a68_lower_tmpvar ("elements%", + pointer_to_bool_type, + a68_lower_alloca (a68_bool_type, + fold_build2 (MULT_EXPR, + sizetype, + size_int (bits_size), + size_in_bytes (a68_bool_type)))); + + /* Set the elements, each element is a BOOL which is TRUE if the + corresponding bit in the coercend is set, FALSE otherwise. */ + tree coercend_one_node = build_int_cst (coercend_type, 1); + coercend = save_expr (coercend); + for (HOST_WIDE_INT bit = 0; bit < bits_size; ++bit) + { + tree offset = fold_build2 (MULT_EXPR, sizetype, + size_int (bit), size_in_bytes (a68_bool_type)); + tree bit_set = fold_convert (a68_bool_type, + fold_build2 (BIT_AND_EXPR, coercend_type, + fold_build2 (RSHIFT_EXPR, coercend_type, + coercend, + build_int_cst (coercend_type, + bits_size - 1 - bit)), + coercend_one_node)); + + a68_add_stmt (fold_build2 (MODIFY_EXPR, + a68_bool_type, + fold_build2 (MEM_REF, + a68_bool_type, + fold_build2 (POINTER_PLUS_EXPR, + pointer_to_bool_type, + elements, + offset), + fold_convert (pointer_to_bool_type, + integer_zero_node)), + bit_set)); + } + + /* Create multiple. */ + tree lower_bound = ssize_int (1); + tree upper_bound = ssize_int (bits_size); + tree elements_size = fold_build2 (MULT_EXPR, sizetype, + size_int (bits_size), + size_in_bytes (a68_bool_type)); + tree multiple = a68_row_value (CTYPE (M_ROW_BOOL), 1 /* dim */, + elements, elements_size, + &lower_bound, &upper_bound); + a68_add_stmt (multiple); + return a68_pop_range (); + } + else + { + fatal_error (a68_get_node_location (p), + "cannot do widening from %s to %s", + a68_moid_to_string (MOID (SUB (p)), MOID_ERROR_WIDTH, SUB (p)), + a68_moid_to_string (MOID (p), MOID_ERROR_WIDTH, p)); + gcc_unreachable (); + } +} + +/* Lower a voiding coercion. + + The voiding lowers into a compound expression with the voided expression + (for side-effects) and returns EMPTY. */ + +tree +a68_lower_voiding (NODE_T *p, LOW_CTX_T ctx) +{ + return fold_build2_loc (a68_get_node_location (p), + COMPOUND_EXPR, + a68_void_type, + a68_lower_tree (SUB (p), ctx), + a68_get_empty ()); +} + +/* Lower a proceduring coercion. + + proceduring : jump. + + In the Revised language only jump statements can be procedured. The + coercion results in a new function whose body is the jump instruction. */ + +tree +a68_lower_proceduring (NODE_T *p, LOW_CTX_T ctx) +{ + tree jump = a68_lower_tree (SUB (p), ctx); + + tree procedured_goto = a68_make_anonymous_routine_decl (MOID (p)); + a68_add_decl (procedured_goto); + a68_add_decl_expr (fold_build1_loc (a68_get_node_location (p), + DECL_EXPR, + TREE_TYPE (procedured_goto), + procedured_goto)); + announce_function (procedured_goto); + + a68_push_function_range (procedured_goto, CTYPE (SUB (MOID (p)))); + a68_pop_function_range (jump); + return fold_build1 (ADDR_EXPR, + build_pointer_type (TREE_TYPE (procedured_goto)), + procedured_goto); +} + +/* Lower a deproceduring coercion. + The deproceduring lowers into a call expression. */ + +tree +a68_lower_deproceduring (NODE_T *p, LOW_CTX_T ctx) +{ + tree func = a68_lower_tree (SUB (p), ctx); + + if (POINTER_TYPE_P (TREE_TYPE (func))) + { + if (TREE_CODE (func) == ADDR_EXPR) + func = TREE_OPERAND (func, 0); + else + func = fold_build1 (INDIRECT_REF, + TREE_TYPE (TREE_TYPE (func)), + func); + } + + return build_call_expr_loc (a68_get_node_location (p), func, 0); +} diff --git a/gcc/algol68/a68-low-generator.cc b/gcc/algol68/a68-low-generator.cc new file mode 100644 index 000000000000..5c4d65569b37 --- /dev/null +++ b/gcc/algol68/a68-low-generator.cc @@ -0,0 +1,533 @@ +/* Lower generators. + Copyright (C) 2025 Jose E. Marchesi. + + Written by Jose E. Marchesi. + + GCC is free software; you can redistribute it and/or modify it + under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3, or (at your option) + any later version. + + GCC is distributed in the hope that it will be useful, but WITHOUT + ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public + License for more details. + + You should have received a copy of the GNU General Public License + along with GCC; see the file COPYING3. If not see + <http://www.gnu.org/licenses/>. */ + +#define INCLUDE_MEMORY +#include "config.h" +#include "system.h" +#include "coretypes.h" + +#include "tree.h" +#include "fold-const.h" +#include "diagnostic.h" +#include "langhooks.h" +#include "tm.h" +#include "function.h" +#include "cgraph.h" +#include "toplev.h" +#include "varasm.h" +#include "predict.h" +#include "stor-layout.h" +#include "tree-iterator.h" +#include "stringpool.h" +#include "print-tree.h" +#include "gimplify.h" +#include "dumpfile.h" +#include "convert.h" + +#include "a68.h" + + +typedef tree (*allocator_t) (tree, tree); + +/* Lower to code that fill in BOUNDS and elements pointers in the given buffer + pointed by BUFFER at offset OFFSET according to the mode MODE, and evals to + BUFFER. */ + +static tree +fill_in_buffer (tree buffer, tree offset, tree_stmt_iterator *bounds, MOID_T *m, + allocator_t allocator) +{ + tree filler = NULL_TREE; + tree type = CTYPE (m); + tree pointer_type = build_pointer_type (type); + + a68_push_stmt_list (M_VOID); + + if (m == M_INT || m == M_BOOL || m == M_CHAR || m == M_REAL || IS_REF (m)) + { + tree val_address = fold_build2 (POINTER_PLUS_EXPR, pointer_type, buffer, offset); + tree init_val = a68_get_skip_tree (m); + tree modify = fold_build2 (MODIFY_EXPR, + type, + fold_build1 (INDIRECT_REF, type, val_address), + init_val); + a68_add_stmt (modify); + } + else if (!HAS_ROWS (m)) + { + /* This mode has no rows. We can just fill in with zeroes, which + translates into SKIP values for all possibly contained types. */ + tree call = builtin_decl_explicit (BUILT_IN_MEMSET); + call = build_call_expr_loc (UNKNOWN_LOCATION, call, 3, + buffer, + integer_zero_node, + fold_convert (sizetype, size_in_bytes (CTYPE (m)))); + a68_add_stmt (call); + } + else if (m == M_STRING) + { + /* Strings are rows but handled especially as they are created empty and + don't feature bounds in the formal declarer. */ + + /* First the descriptor. */ + tree pointer_byte_size = size_int (POINTER_SIZE / BITS_PER_UNIT); + tree lb_address = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), buffer, offset); + a68_add_stmt (fold_build2 (MODIFY_EXPR, void_type_node, + fold_build1 (INDIRECT_REF, ssizetype, lb_address), + ssize_int (1))); + offset = fold_build2 (PLUS_EXPR, sizetype, offset, pointer_byte_size); + tree ub_address = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), buffer, offset); + a68_add_stmt (fold_build2 (MODIFY_EXPR, void_type_node, + fold_build1 (INDIRECT_REF, ssizetype, ub_address), + ssize_int (0))); + offset = fold_build2 (PLUS_EXPR, sizetype, offset, pointer_byte_size); + tree stride_address = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), buffer, offset); + a68_add_stmt (fold_build2 (MODIFY_EXPR, void_type_node, + fold_build1 (INDIRECT_REF, sizetype, stride_address), + size_in_bytes (a68_char_type))); + + /* The data is an empty string, i.e NULL. */ + offset = fold_build2 (PLUS_EXPR, sizetype, offset, pointer_byte_size); + tree elems_address = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), buffer, offset); + a68_add_stmt (fold_build2 (MODIFY_EXPR, void_type_node, + fold_build1 (INDIRECT_REF, build_pointer_type (a68_char_type), + elems_address), + build_int_cst (build_pointer_type (a68_char_type), 0))); + + /* The size of the elements is zero. */ + offset = fold_build2 (PLUS_EXPR, sizetype, offset, pointer_byte_size); + tree elems_size_address = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), buffer, offset); + a68_add_stmt (fold_build2 (MODIFY_EXPR, void_type_node, + fold_build1 (INDIRECT_REF, build_pointer_type (a68_char_type), + elems_size_address), + size_zero_node)); + } + else if (A68_ROW_TYPE_P (type)) + { + /* If the row mode is flexible we can deflex it now: these also must have + bounds specified for them, with the only exception of strings/flexible + rows of chars, which are handled above. Note we cannot use DEFLEXED + here because that contains the fully deflexed mode. For example, + DEFLEXED returns [][]INT for FLEX[]FLEX[]INT, and we want []FLEX[]INT + instead. */ + if (IS_FLEX (m)) + m = SUB (m); + + /* Consume two bounds from BOUNDS for each dimension and patch them at + their right offsets. Note that we have to process from upper + dimension to lower dimension so we can calculate the stride as we + go. */ + size_t dim = DIM (m); + + /* Collect lower and upper bounds and calculate the number of elements of + the multiple. */ + tree *lower_bounds = (tree *) xmalloc (sizeof (tree) * dim); + tree *upper_bounds = (tree *) xmalloc (sizeof (tree) * dim); + tree num_elems = NULL_TREE; + for (size_t i = 0; i < dim; ++i) + { + /* Note we have to convert the bounds from CTYPE(M_INT) to + ssizetype. */ + lower_bounds[i] = fold_convert (ssizetype, save_expr (tsi_stmt (*bounds))); + tsi_next (bounds); + upper_bounds[i] = fold_convert (ssizetype, save_expr (tsi_stmt (*bounds))); + tsi_next (bounds); + + tree dim_num_elems + = fold_build2 (PLUS_EXPR, sizetype, + fold_convert (sizetype, + fold_build2 (MINUS_EXPR, ssizetype, + upper_bounds[i], lower_bounds[i])), + size_one_node); + dim_num_elems = fold_build3 (COND_EXPR, + sizetype, + fold_build2 (LT_EXPR, ssizetype, + upper_bounds[i], lower_bounds[i]), + size_zero_node, + dim_num_elems); + if (num_elems == NULL_TREE) + num_elems = dim_num_elems; + else + num_elems = fold_build2 (MULT_EXPR, sizetype, num_elems, dim_num_elems); + } + + /* Calculate strides. */ + tree *strides = (tree *) xmalloc (sizeof (tree) * dim); + a68_multiple_compute_strides (type, dim, lower_bounds, upper_bounds, strides); + + /* Now emit instructions to patch the bounds and strides. */ + tree pointer_byte_size = size_int (POINTER_SIZE / BITS_PER_UNIT); + for (size_t i = 0; i < dim; ++i) + { + /* Lower bound. */ + tree lb_address = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), buffer, offset); + a68_add_stmt (fold_build2 (MODIFY_EXPR, + void_type_node, + fold_build1 (INDIRECT_REF, ssizetype, lb_address), + lower_bounds[i])); + /* Upper bound. */ + offset = fold_build2 (PLUS_EXPR, sizetype, offset, pointer_byte_size); + tree ub_address = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), buffer, offset); + a68_add_stmt (fold_build2 (MODIFY_EXPR, + void_type_node, + fold_build1 (INDIRECT_REF, ssizetype, ub_address), + upper_bounds[i])); + /* Stride. */ + offset = fold_build2 (PLUS_EXPR, sizetype, offset, pointer_byte_size); + tree stride_address = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), buffer, offset); + a68_add_stmt (fold_build2 (MODIFY_EXPR, + void_type_node, + fold_build1 (INDIRECT_REF, sizetype, stride_address), + strides[i])); + offset = fold_build2 (PLUS_EXPR, sizetype, offset, pointer_byte_size); + } + free (lower_bounds); + free (upper_bounds); + free (strides); + + /* Now allocate space for the elements. */ + MOID_T *elem_mode = SUB (m); + tree elem_size = fold_convert (sizetype, size_in_bytes (CTYPE (elem_mode))); + tree elems_size = save_expr (fold_build2 (MULT_EXPR, sizetype, elem_size, num_elems)); + tree elemsptr = (*allocator) (CTYPE (elem_mode), elems_size); + elemsptr = save_expr (elemsptr); + + /* And initialize them. */ + if (elem_mode == M_INT || elem_mode == M_BOOL || elem_mode == M_CHAR + || elem_mode == M_REAL || IS_REF (elem_mode)) + { + /* Memsetting the buffer with either zeroes or ones satisfies the + SKIP value for these modes. */ + tree call = builtin_decl_explicit (BUILT_IN_MEMSET); + call = build_call_expr_loc (UNKNOWN_LOCATION, call, 3, + elemsptr, + integer_zero_node, + elems_size); + a68_add_stmt (call); + } + else + { + /* Recurse in a loop to fill in elements. */ + a68_push_range (NULL); + tree num_elems_var = a68_lower_tmpvar ("numelems%", size_type_node, + num_elems); + tree index = a68_lower_tmpvar ("index%", size_type_node, size_zero_node); + tree elems_var = a68_lower_tmpvar ("elems%", TREE_TYPE (elemsptr), + elemsptr); + tree elem_offset = a68_lower_tmpvar ("elem_offset%", size_type_node, + size_zero_node); + + /* Begin of loop body. */ + a68_push_range (NULL); + a68_add_stmt (fold_build1 (EXIT_EXPR, + void_type_node, + fold_build2 (EQ_EXPR, + size_type_node, + index, num_elems_var))); + a68_add_stmt (fill_in_buffer (elems_var, elem_offset, bounds, elem_mode, + allocator)); + /* Increase elem_offset */ + a68_add_stmt (fold_build2 (MODIFY_EXPR, sizetype, + elem_offset, + fold_build2 (PLUS_EXPR, sizetype, + elem_offset, elem_size))); + /* index++ */ + a68_add_stmt (fold_build2 (POSTINCREMENT_EXPR, + size_type_node, + index, size_one_node)); + tree loop_body = a68_pop_range (); + /* End of loop body. */ + a68_add_stmt (fold_build1 (LOOP_EXPR, + void_type_node, + loop_body)); + a68_add_stmt (a68_pop_range ()); + } + + /* Patch the elements% field. */ + tree elems_address = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), buffer, offset); + a68_add_stmt (fold_build2 (MODIFY_EXPR, + void_type_node, + fold_build1 (INDIRECT_REF, + build_pointer_type (CTYPE (elem_mode)), elems_address), + elemsptr)); + /* Patch the elements_size% field. */ + offset = fold_build2 (PLUS_EXPR, sizetype, offset, pointer_byte_size); + tree elems_size_address = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), buffer, offset); + a68_add_stmt (fold_build2 (MODIFY_EXPR, + void_type_node, + fold_build1 (INDIRECT_REF, + sizetype, + elems_size_address), + elems_size)); + } + else if (A68_STRUCT_TYPE_P (type)) + { + /* Initialize the struct's fields in the allocated buffer. */ + tree base = a68_lower_tmpvar ("base%", TREE_TYPE (buffer), + fold_build2 (POINTER_PLUS_EXPR, + TREE_TYPE (buffer), + buffer, offset)); + PACK_T *field_pack = PACK (m); + for (tree field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) + { + gcc_assert (COMPLETE_TYPE_P (TREE_TYPE (field))); + // printf ("BYTE_POSITION\n"); + // debug_tree (byte_position (field)); + a68_add_stmt (fill_in_buffer (base, byte_position (field), + bounds, MOID (field_pack), allocator)); + FORWARD (field_pack); + } + } + else if (A68_UNION_TYPE_P (type)) + { + /* Union values are initialized with an overhead of (sizetype) -1, which + means it is not initialized. Note that row declarers in united modes + are formal declarers, so they never contribute bounds. */ + tree overhead_address + = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), buffer, offset); + tree uninitialized = fold_convert (sizetype, build_minus_one_cst (ssizetype)); + a68_add_stmt (fold_build2 (MODIFY_EXPR, void_type_node, + fold_build1 (INDIRECT_REF, sizetype, overhead_address), + uninitialized)); +#if 0 + /* Set the rest of the union with zeroes. */ + tree value_address + = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (buffer), + buffer, + fold_build2 (PLUS_EXPR, sizetype, offset, size_in_bytes (sizetype))); + + tree value_field = TREE_CHAIN (TYPE_FIELDS (type)); + tree call = builtin_decl_explicit (BUILT_IN_MEMSET); + call = build_call_expr_loc (UNKNOWN_LOCATION, call, 3, + value_address, + integer_zero_node, + size_in_bytes (TREE_TYPE (value_field))); + a68_add_stmt (call); +#endif + } + else + gcc_unreachable (); + + a68_add_stmt (buffer); + filler = a68_pop_stmt_list (); + TREE_TYPE (filler) = pointer_type; + return filler; +} + +/* Lower to code that generates storage for a value of mode M, using bounds + from BOUNDS. */ + +static tree +gen_mode (MOID_T *m, tree_stmt_iterator *bounds, allocator_t allocator) +{ + /* Allocate space for the value and fill it. */ + tree buffer = (*allocator) (CTYPE (m), size_in_bytes (CTYPE (m))); + buffer = save_expr (buffer); + return fill_in_buffer (buffer, size_zero_node, bounds, m, allocator); +} + +/* Collect row bounds from BOUNDS. + Lower bounds are optional, and if not found they default to 1. */ + +static void +collect_bounds (NODE_T *p, LOW_CTX_T ctx) +{ + for (; p != NO_NODE; FORWARD (p)) + { + if (IS (p, BOUNDS_LIST)) + collect_bounds (SUB (p), ctx); + else if (IS (p, BOUND)) + collect_bounds (SUB (p), ctx); + else if (IS (p, UNIT)) + { + /* First the lower bound. */ + tree lower_bound; + if (NEXT (p) != NO_NODE && IS (NEXT (p), COLON_SYMBOL)) + { + lower_bound = a68_lower_tree (p, ctx); + p = NEXT_NEXT (p); + } + else + /* Default lower bound. */ + lower_bound = integer_one_node; + + /* Now the upper bound. */ + tree upper_bound = a68_lower_tree (p, ctx); + + /* See the comment for collect_declarer_bounds for an explanation for + the usage of save_expr here. */ + a68_add_stmt (save_expr (lower_bound)); + a68_add_stmt (save_expr (upper_bound)); + } + } +} + +/* Append all the bounds found in the given declarer in the current statements + list. */ + +static void +collect_declarer_bounds_1 (NODE_T *p, LOW_CTX_T ctx) +{ + for (; p != NO_NODE; FORWARD (p)) + { + if (IS (p, BOUNDS)) + collect_bounds (SUB (p), ctx); + else if (IS (p, INDICANT) && IS_LITERALLY (p, "STRING")) + return; + else if (IS (p, INDICANT)) + { + if (TAX (p) != NO_TAG && HAS_ROWS (MOID (TAX (p)))) + /* Continue from definition at MODE A = .... */ + collect_declarer_bounds_1 (NEXT_NEXT (NODE (TAX (p))), ctx); + } + else if (IS (p, DECLARER) + && (IS_UNION (MOID (p)) || !HAS_ROWS (MOID (p)))) + return; + else + collect_declarer_bounds_1 (SUB (p), ctx); + } +} + +/* Given a declarer node, return a statements list with all the expressions of + the bounds within it. + + Note that the language rules mandates that the bounds expression shall be + evaluated just once even when they are used by several generators, such as + in + + [n +:= 1]real a, b; + + Therefore the expressions are saved in save_exprs and the statements list + is cached in the CDECL field of the parse tree node. */ + +static tree +collect_declarer_bounds (NODE_T *p, LOW_CTX_T ctx) +{ + if (CDECL (p) == NULL_TREE) + { + a68_push_stmt_list (M_VOID); + collect_declarer_bounds_1 (SUB (p), ctx); + CDECL (p) = a68_pop_stmt_list (); + } + + return CDECL (p); +} + +/* Low the elaboration of a generator. + + The lowered code evaluates to a pointer. + + DECLARER is the actual declarer passed to the generator. + + MODE is the mode of the value to generate. + + HEAP is true if we are lowering a heap generator, false if we are lowering a + LOC generator. */ + +tree +a68_low_generator (NODE_T *declarer, + MOID_T *mode, + bool heap, LOW_CTX_T ctx) +{ + /* If the declarer is a mode indicant which has a recursive definition then + we need to lower to a function which gets immediately called rather than + an expression, to handle the recursivity. In that case, though, we need + to always heap allocated memory for obvious reasons, which sucks, but such + is life. */ + + if (IS (SUB (declarer), INDICANT) && TAX (SUB (declarer)) != NO_TAG + && IS_RECURSIVE (TAX (SUB (declarer)))) + { + if (TAX_TREE_DECL (TAX (SUB (declarer))) != NULL_TREE) + { + /* This is a recursive mode indicant. Just call the function. */ + return save_expr (build_call_expr_loc (a68_get_node_location (SUB (declarer)), + TAX_TREE_DECL (TAX (SUB (declarer))), + 0)); + } + + tree ret_type = build_pointer_type (CTYPE (mode)); + tree func_decl = build_decl (a68_get_node_location (declarer), + FUNCTION_DECL, + NULL_TREE /* name, set below */, + build_function_type (ret_type, void_list_node)); + char *name = xasprintf ("genroutine%d", DECL_UID (func_decl)); + DECL_NAME (func_decl) = a68_get_mangled_identifier (name); + free (name); + DECL_EXTERNAL (func_decl) = 0; + DECL_STATIC_CHAIN (func_decl) = !a68_in_global_range (); + TREE_ADDRESSABLE (func_decl) = 1; + TREE_PUBLIC (func_decl) = a68_in_global_range (); + TREE_STATIC (func_decl) = 1; + TAX_TREE_DECL (TAX (SUB (declarer))) = func_decl; + + a68_add_decl (func_decl); + a68_add_decl_expr (fold_build1_loc (a68_get_node_location (declarer), + DECL_EXPR, + TREE_TYPE (func_decl), + func_decl)); + announce_function (func_decl); + a68_push_function_range (func_decl, ret_type); + + /* Collect bounds from declarer. */ + tree bounds = collect_declarer_bounds (declarer, ctx); + + /* Allocate and initialize a memory buffer for a value of mode MODE with + bounds in BOUNDS. */ + tree_stmt_iterator bounds_iter = tsi_start (bounds); + tree gen = gen_mode (mode, &bounds_iter, a68_lower_malloc); + a68_pop_function_range (gen); + /* Avoid this generator function, which uses the global lexical + environment, to be reused in other contexts. */ + TAX_TREE_DECL (TAX (SUB (declarer))) = NULL_TREE; + return save_expr (build_call_expr_loc (a68_get_node_location (declarer), + func_decl, 0)); + } + else + { + /* Collect bounds from declarer. */ + tree bounds = collect_declarer_bounds (declarer, ctx); + + /* Allocate and initialize a memory buffer for a value of mode MODE with + bounds in BOUNDS. */ + tree_stmt_iterator bounds_iter = tsi_start (bounds); + tree gen = gen_mode (mode, &bounds_iter, + heap ? a68_lower_malloc : a68_lower_alloca); + return gen; + } +} + +/* Allocate storage for a value of mode M. + NBOUNDS is the number of bounds in BOUNDS. */ + +tree +a68_low_gen (MOID_T *m, size_t nbounds, tree *bounds, bool use_heap) +{ + /* First collect bounds from BOUNDS into a statements list, which is what + gen_mode expects. */ + tree bounds_list = alloc_stmt_list (); + for (size_t i = 0; i < nbounds; ++i) + append_to_statement_list_force (bounds[i], &bounds_list); + allocator_t allocator = use_heap ? a68_lower_malloc : a68_lower_alloca; + + tree_stmt_iterator q = tsi_start (bounds_list); + tree ret = gen_mode (m, &q, allocator); + free_stmt_list (bounds_list); + return ret; +} diff --git a/gcc/algol68/a68-low-units.cc b/gcc/algol68/a68-low-units.cc new file mode 100644 index 000000000000..9802468873b2 --- /dev/null +++ b/gcc/algol68/a68-low-units.cc @@ -0,0 +1,1253 @@ +/* Lower units to GENERIC. + Copyright (C) 2025 Jose E. Marchesi. + + Written by Jose E. Marchesi. + + GCC is free software; you can redistribute it and/or modify it + under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3, or (at your option) + any later version. + + GCC is distributed in the hope that it will be useful, but WITHOUT + ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public + License for more details. + + You should have received a copy of the GNU General Public License + along with GCC; see the file COPYING3. If not see + <http://www.gnu.org/licenses/>. */ + +#define INCLUDE_MEMORY +#include "config.h" +#include "system.h" +#include "coretypes.h" + +#include "tree.h" +#include "fold-const.h" +#include "diagnostic.h" +#include "langhooks.h" +#include "tm.h" +#include "function.h" +#include "cgraph.h" +#include "toplev.h" +#include "varasm.h" +#include "predict.h" +#include "stor-layout.h" +#include "tree-iterator.h" +#include "stringpool.h" +#include "print-tree.h" +#include "gimplify.h" +#include "dumpfile.h" +#include "convert.h" + +#include "a68.h" + +/* Note that enclosed clauses, which are units, are handled in + a68-low-clauses. */ + +/* Lower an applied identifier. + + This lowers into the declaration of the referred identifier. The + declaration of the identifier should now be available in the symbol table + entry for the identifier. */ + +tree +a68_lower_identifier (NODE_T *p, LOW_CTX_T ctx) +{ + if (TAG_TABLE (TAX (p)) == A68_STANDENV) + { + /* This identifier is defined in the standard prelude. Use its lowering + handler. */ + LOWERER_T lowerer = LOWERER (TAX (p)); + return (*lowerer) (p, ctx); + } + else + { + tree id_decl = TAX_TREE_DECL (TAX (p)); + + if (id_decl == NULL_TREE) + { + /* This is an applied identifier used before the corresponding defining + identifier gets defined in either an identity declaration or a + variable declaration. Create the declaration and install it in the + symbol table. The declaration itself, declaration expr and + initialization assignment for the declaration will be emitted by the + corresponding declaration lowering handler. Note that the defining + identifier (and therefore the declaration associated with this applied + identifier) may be in an outer lexical block. */ + + if (IS (MOID (p), PROC_SYMBOL)) + { + bool external = (MOIF (TAX (p)) != NO_MOIF); + const char *extern_symbol = EXTERN_SYMBOL (TAX (p)); + if (VARIABLE (TAX (p))) + { + if (external) + id_decl + = a68_make_variable_declaration_decl (p, NAME (MOIF (TAX (p))), external, + extern_symbol); + else + id_decl + = a68_make_variable_declaration_decl (p, ctx.module_definition_name); + } + else if (IN_PROC (TAX (p))) + { + if (external) + id_decl + = a68_make_proc_identity_declaration_decl (p, NAME (MOIF (TAX (p))), + false /* indicant */, + external, + extern_symbol); + else + id_decl + = a68_make_proc_identity_declaration_decl (p, ctx.module_definition_name); + } + else + { + if (external) + id_decl + = a68_make_identity_declaration_decl (p, NAME (MOIF (TAX (p))), + false /* indicant */, + external, extern_symbol); + else + id_decl + = a68_make_identity_declaration_decl (p, ctx.module_definition_name); + } + } + else + { + bool external = (MOIF (TAX (p)) != NO_MOIF); + const char *extern_symbol = EXTERN_SYMBOL (TAX (p)); + if (VARIABLE (TAX (p))) + { + if (external) + id_decl + = a68_make_variable_declaration_decl (p, NAME (MOIF (TAX (p))), external, + extern_symbol); + else + id_decl + = a68_make_variable_declaration_decl (p, ctx.module_definition_name); + } + else + { + if (external) + id_decl + = a68_make_identity_declaration_decl (p, NAME (MOIF (TAX (p))), + false /* indicant */, external, + extern_symbol); + else + id_decl + = a68_make_identity_declaration_decl (p, ctx.module_definition_name); + } + } + + TAX_TREE_DECL (TAX (p)) = id_decl; + } + + /* If the identifier refers to a FUNCTION_DECL, this means the declaration + was made by a procecure-identity-dclaration. The applied identifier in + that case refers to the address of the corresponding function. */ + if (TREE_CODE (id_decl) == FUNCTION_DECL) + return fold_build1 (ADDR_EXPR, + build_pointer_type (TREE_TYPE (id_decl)), + id_decl); + else + return id_decl; + } +} + +/* Lower a string denotation. + + String denotations are of mode []CHAR, and lower into a multiple with a + single dimension, and with the following characteristics: + + - The lower bound of dimension 0 is 1. + - The upper bound of dimension 0 is strlen (NSYMBOL (p)). + - The stride of dimension 0 is 0. + - The pointed elements are a buffer of CHARs allocated in the stack. */ + +tree +a68_lower_string_denotation (NODE_T *p, LOW_CTX_T ctx ATTRIBUTE_UNUSED) +{ + /* First process string breaks. */ + char *str = a68_string_process_breaks (p, NSYMBOL (p)); + + /* Build a multiple of UCS-4 CHARs from the resulting UTF-8 string. */ + size_t ucslen; + uint32_t *ucsbuf = a68_u8_to_u32 ((const uint8_t *) str, strlen (str), + NULL, &ucslen); + free (str); + tree string_literal = build_string_literal (ucslen * sizeof (uint32_t), + (char *) ucsbuf, a68_char_type); + tree elements = string_literal; + tree lower_bound = fold_convert (ssizetype, size_one_node); + tree upper_bound = ssize_int (ucslen); + tree elements_size = fold_build2 (MULT_EXPR, sizetype, + size_int (ucslen), + size_in_bytes (a68_char_type)); + tree multiple = a68_row_value (CTYPE (M_ROW_CHAR), 1, + elements, elements_size, + &lower_bound, &upper_bound); + TREE_CONSTANT (multiple) = true; + free (ucsbuf); + return multiple; +} + +/* Lower denotation. + + denotation : int denotation; real denotation; bits denotation; + row char denotation; + true symbol; false symbol; + empty symbol; + longety, int denotation; + longety, real denotation; + longety, bits denotation; + shortety, int denotation; + shortety, real denotation; + shortety, bits denotation. + + Denotations lower into GENERIC cst expressions. */ + +tree +a68_lower_denotation (NODE_T *p, LOW_CTX_T ctx) +{ + MOID_T *moid = MOID (p); + + if (moid == M_VOID) + /* EMPTY */ + return a68_lower_empty (p, ctx); + else if (moid == M_BOOL) + /* TRUE or FALSE. */ + return (NSYMBOL (p)[0] == 'T') ? boolean_true_node : boolean_false_node; + else if (moid == M_CHAR) + { + char *s = a68_string_process_breaks (p, NSYMBOL (p)); + uint32_t ucs; + int length = a68_u8_mbtouc (&ucs, (const uint8_t *) s, 1); + gcc_assert (length == 1); + free (s); + return build_int_cst (a68_char_type, ucs); + } + else if (moid == M_ROW_CHAR) + return a68_lower_string_denotation (p, ctx); + else if (moid == M_INT + || moid == M_LONG_INT + || moid == M_LONG_LONG_INT + || moid == M_SHORT_INT + || moid == M_SHORT_SHORT_INT) + { + /* SIZETY INT */ + tree type; + char *end; + NODE_T *s = NO_NODE; + if (IS (SUB (p), LONGETY) || IS (SUB (p), SHORTETY)) + s = NEXT (SUB (p)); + else + s = SUB (p); + + type = CTYPE (moid); + int64_t val = strtol (NSYMBOL (s), &end, 10); + gcc_assert (end[0] == '\0'); + return build_int_cst (type, val); + } + if (moid == M_BITS + || moid == M_LONG_BITS + || moid == M_LONG_LONG_BITS + || moid == M_SHORT_BITS + || moid == M_SHORT_SHORT_BITS) + { + /* SIZETY BITS */ + + tree type; + char *end; + NODE_T *s = NO_NODE; + if (IS (SUB (p), LONGETY) || IS (SUB (p), SHORTETY)) + s = NEXT (SUB (p)); + else + s = SUB (p); + + type = CTYPE (moid); + int64_t radix = strtol (NSYMBOL (s), &end, 10); + gcc_assert (end != NSYMBOL (s) && *end == 'r'); + end++; + int64_t val = strtol (end, &end, radix); + gcc_assert (end[0] == '\0'); + return build_int_cst (type, val); + } + else if (moid == M_REAL + || moid == M_LONG_REAL + || moid == M_LONG_LONG_REAL) + { + /* SIZETY INT */ + tree type; + NODE_T *s = NO_NODE; + if (IS (SUB (p), LONGETY) || IS (SUB (p), SHORTETY)) + s = NEXT (SUB (p)); + else + s = SUB (p); + + if (moid == M_REAL) + type = float_type_node; + else if (moid == M_LONG_REAL) + type = double_type_node; + else if (moid == M_LONG_LONG_REAL) + type = long_double_type_node; + else + gcc_unreachable (); + + REAL_VALUE_TYPE val; + real_from_string (&val, NSYMBOL (s)); + return build_real (type, val); + } + + gcc_unreachable (); + return NULL_TREE; +} + +/* Lower SKIP. + + skip +*/ + +tree +a68_lower_skip (NODE_T *p, LOW_CTX_T ctx ATTRIBUTE_UNUSED) +{ + return a68_get_skip_tree (MOID (p)); +} + +/* Lower NIHIL. + + nihil : nil. + + NIL stands for a name referring to no value and which must be + distinguishable from any other name. It is of mode REF AMODE. NIL is never + subject to coercion and it may only occur where the context is strong, + i.e. where AMODE is known at compile-time. + + It lowers to a pointer to AMODE with value 0. */ + +tree +a68_lower_nihil (NODE_T *p, LOW_CTX_T ctx ATTRIBUTE_UNUSED) +{ + tree type = CTYPE (MOID (p)); + + gcc_assert (type == a68_void_type || POINTER_TYPE_P (type)); + if (type == a68_void_type) + return a68_lower_empty (p, ctx); + else + return build_int_cst (type, 0); +} + +/* Lower EMPTY. */ + +tree +a68_lower_empty (NODE_T *p ATTRIBUTE_UNUSED, LOW_CTX_T ctx ATTRIBUTE_UNUSED) +{ + return a68_get_empty (); +} + +/* Lower an identity relation. + + identity relation : tertiary, is symbol, tertiary; + tertiary, isnt symbol, tertiary. + + An identity relation determines whether two name values are the same. It + lowers into EQ_EXPR in case of IS and into NE_EXPR in case of ISNT. */ + +tree +a68_lower_identity_relation (NODE_T *p, LOW_CTX_T ctx) +{ + NODE_T *lhs = SUB (p); + NODE_T *oper = NEXT (lhs); + NODE_T *rhs = NEXT (oper); + + /* Consolidate arguments to make sure we are comparing pointers in the + r-value context of the EQ_EXPR or NE_EXPR operation below. */ + tree op1 = a68_consolidate_ref (MOID (lhs), a68_lower_tree (lhs, ctx)); + tree op2 = a68_consolidate_ref (MOID (rhs), a68_lower_tree (rhs, ctx)); + + tree_code code; + if (IS (oper, IS_SYMBOL)) + code = EQ_EXPR; + else if (IS (oper, ISNT_SYMBOL)) + code = NE_EXPR; + else + gcc_unreachable (); + + return fold_build2_loc (a68_get_node_location (p), + code, boolean_type_node, op1, op2); +} + +/* Lower AND_FUNCTION and OR_FUNCTION. + + and function : tertiary, andf symbol, tertiary. + or function : tertiary, orf_symbol, tertiary. + + These are pseudo-operators that are used to implement short-circuits + evaluation of logical expressions. + + These pseudo-operators lower into TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, + respectively. */ + +tree +a68_lower_logic_function (NODE_T *p, LOW_CTX_T ctx) +{ + NODE_T *lhs = SUB (p); + NODE_T *oper = NEXT (lhs); + NODE_T *rhs = NEXT (oper); + + tree op1 = a68_lower_tree (lhs, ctx); + tree op2 = a68_lower_tree (rhs, ctx); + + tree_code code; + if (IS (oper, ANDF_SYMBOL)) + code = TRUTH_ANDIF_EXPR; + else if (IS (oper, ORF_SYMBOL)) + code = TRUTH_ORIF_EXPR; + else + gcc_unreachable (); + + return fold_build2_loc (a68_get_node_location (p), + code, boolean_type_node, op1, op2); +} + +/* Lower a primary. + + primary : identifier; denotation; cast; enclosed clause; format text. + + The primary lowers into some GENERIC expression. */ + +tree +a68_lower_primary (NODE_T *p, LOW_CTX_T ctx) +{ + return a68_lower_tree (SUB (p), ctx); +} + +/* Lower a cast. + + cast : declarer, enclosed clause; + void symbol, enclosed clause. + + A cast establishes a strong context with some required mode. This context + allows coercions to be applied, and these coercions have been inserted in + the parse tree by the parser. */ + +tree +a68_lower_cast (NODE_T *p, LOW_CTX_T ctx) +{ + return a68_lower_tree (NEXT (SUB (p)), ctx); +} + +/* Lower a slice. + + slice : MULTIPLE INDEXER + + Slicing a multiple may result in either an element of the multiple, if the + operation is indexing, or another multiple, if the operation is a + trimming. */ + +static void +lower_subscript_for_indexes (NODE_T *p, LOW_CTX_T ctx) +{ + for (; p != NO_NODE; FORWARD (p)) + { + switch (ATTRIBUTE (p)) + { + case TRIMMER: + /* Because of ANNOTATION (indexer) == SLICE */ + gcc_unreachable (); + break; + case UNIT: + a68_add_stmt (a68_lower_tree (p, ctx)); + break; + case GENERIC_ARGUMENT: + case GENERIC_ARGUMENT_LIST: + lower_subscript_for_indexes (SUB (p), ctx); + break; + default: + break; + } + } +} + +static void +lower_subscript_for_trimmers (NODE_T *p, LOW_CTX_T ctx, + tree multiple, tree new_multiple, + int *dim, int *new_dim, + tree elements_pointer_type) +{ + /* new.elements := multiple.elements; + FOR dim TO num dimensions + DO CO t[dim] is either a subscript i or a trimmer i : j @ k CO + new.elements +:= i * multiple.strides[dim]; + IF t[dim] is a trimmer + THEN INT d := ( k is absent | 1 | multiple.lb[dim] - k ); + new.lb[dim] := multiple.lb[dim] - d; + new.ub[dim] := multiple.ub[dim] - d; + new.strides[dim] := multiple.strides[dim] + FI + OD + */ + + for (; p != NO_NODE; FORWARD (p)) + { + switch (ATTRIBUTE (p)) + { + case UNIT: + { + tree unit = save_expr (fold_convert (ssizetype, a68_lower_tree (p, ctx))); + tree new_elements = a68_multiple_elements (new_multiple); + tree size_dim = size_int (*dim); + tree dim_lower_bound = save_expr (a68_multiple_lower_bound (multiple, size_dim)); + tree stride = save_expr (a68_multiple_stride (multiple, size_dim)); + + /* Validate bounds. */ + if (OPTION_BOUNDS_CHECKING (&A68_JOB)) + a68_add_stmt (a68_multiple_bounds_check (p, size_dim, multiple, unit)); + + /* new_elements += i * strides[dim] */ + tree offset = fold_build2 (MULT_EXPR, sizetype, + fold_convert (sizetype, fold_build2 (MINUS_EXPR, ssizetype, + unit, dim_lower_bound)), + stride); + + offset = save_expr (offset); + new_elements = fold_build2 (POINTER_PLUS_EXPR, + elements_pointer_type, + new_elements, + offset); + a68_add_stmt (a68_multiple_set_elements (new_multiple, new_elements)); + + /* elements_size -= i * strides[dim] */ + tree elements_size = a68_multiple_elements_size (new_multiple); + elements_size = fold_build2 (MINUS_EXPR, sizetype, + elements_size, offset); + a68_add_stmt (a68_multiple_set_elements_size (new_multiple, elements_size)); + + *dim += 1; + break; + } + case TRIMMER: + { + /* First collect components from the trimmer. */ + tree size_dim = size_int (*dim); + tree dim_lower_bound = save_expr (a68_multiple_lower_bound (multiple, size_dim)); + tree lower_bound = dim_lower_bound; + tree upper_bound = save_expr (a68_multiple_upper_bound (multiple, size_dim)); + tree at = ssize_int (1); + + NODE_T *q = SUB (p); + if (q != NO_NODE) + { + if (IS (q, AT_SYMBOL)) + { + /* Both bounds are implicit. */ + at = save_expr (fold_convert (ssizetype, a68_lower_tree (NEXT (q), ctx))); + } + else if (IS (q, COLON_SYMBOL)) + { + /* Lower bound is implicit. */ + FORWARD (q); + if (IS (q, AT_SYMBOL)) + { + /* Upper bound is implicit, AT specified. */ + gcc_assert (IS (q, AT_SYMBOL)); + at = save_expr (fold_convert (ssizetype, a68_lower_tree (NEXT (q), ctx))); + } + else + { + upper_bound + = save_expr (fold_convert (ssizetype, a68_lower_tree (q, ctx))); + FORWARD (q); + if (q != NO_NODE) + { + gcc_assert (IS (q, AT_SYMBOL)); + at = save_expr (fold_convert (ssizetype, a68_lower_tree (NEXT (q), ctx))); + } + } + } + else + { + /* Lower bound is explicit. */ + lower_bound = fold_convert (ssizetype, a68_lower_tree (q, ctx)); + FORWARD (q); + gcc_assert (IS (q, COLON_SYMBOL)); + FORWARD (q); + if (q != NO_NODE) + { + if (IS (q, AT_SYMBOL)) + at = save_expr (fold_convert (ssizetype, a68_lower_tree (NEXT (q), ctx))); + else + { + upper_bound + = save_expr (fold_convert (ssizetype, a68_lower_tree (q, ctx))); + FORWARD (q); + if (q != NO_NODE && IS (q, AT_SYMBOL)) + at = + save_expr (fold_convert (ssizetype, a68_lower_tree (NEXT (q), ctx))); + } + } + } + } + + /* Time for some bounds checking. + + Note that in trimmers, given the current dimension's bounds + (L,U), we cannot simply do the check: + + L <= lower_bound <= U + L <= upper_bound <= U + + This is because the multiple may be flat, and the dimension may + have bounds such like U < L. In that case, the expressions + above would always eval to false for any lower_bound and + upper_bound. + + So we check for this instead: + + L <= lower_bound AND upper_bound <= U + + This allows to trim a "flat dimension" using a trimmer where + upper_bound < lower_bound. The result is, of course, another + "flat dimension" in the multiple result of the trimming. */ + + if (OPTION_BOUNDS_CHECKING (&A68_JOB)) + { + a68_add_stmt (a68_multiple_single_bound_check (p, size_dim, multiple, + lower_bound, + false /* upper_bound */)); + a68_add_stmt (a68_multiple_single_bound_check (p, size_dim, multiple, + upper_bound, + true /* upper_bound */)); + } + + /* new_elements += i * strides[dim] */ + tree stride = save_expr (a68_multiple_stride (multiple, size_dim)); + tree new_elements = a68_multiple_elements (new_multiple); + tree offset = fold_build2 (MULT_EXPR, sizetype, + fold_convert (sizetype, fold_build2 (MINUS_EXPR, ssizetype, + lower_bound, dim_lower_bound)), + stride); + + offset = save_expr (offset); + new_elements = fold_build2 (POINTER_PLUS_EXPR, + elements_pointer_type, + new_elements, + offset); + a68_add_stmt (a68_multiple_set_elements (new_multiple, new_elements)); + + /* elements_size -= i * strides[dim] */ + tree elements_size = a68_multiple_elements_size (new_multiple); + elements_size = fold_build2 (MINUS_EXPR, sizetype, + elements_size, offset); + a68_add_stmt (a68_multiple_set_elements_size (new_multiple, + elements_size)); + + /* Fill the triplet for this dimension in new_multiple. */ + tree size_new_dim = size_int (*new_dim); + tree d = fold_build2 (MINUS_EXPR, ssizetype, lower_bound, at); + + a68_add_stmt (a68_multiple_set_lower_bound (new_multiple, size_new_dim, + fold_build2 (MINUS_EXPR, ssizetype, + lower_bound, d))); + a68_add_stmt (a68_multiple_set_upper_bound (new_multiple, size_new_dim, + fold_build2 (MINUS_EXPR, ssizetype, + upper_bound, d))); + a68_add_stmt (a68_multiple_set_stride (new_multiple, size_new_dim, stride)); + + *new_dim += 1; + *dim += 1; + break; + } + default: + lower_subscript_for_trimmers (SUB (p), ctx, + multiple, new_multiple, + dim, new_dim, + elements_pointer_type); + } + } +} + +tree +a68_lower_slice (NODE_T *p, LOW_CTX_T ctx) +{ + NODE_T *indexer = NEXT_SUB (p); + MOID_T *orig_multiple_mode = MOID (SUB (p)); + MOID_T *multiple_mode = orig_multiple_mode; + bool slicing_name = false; + + /* First of all, lower the multiple being sliced. If it is a name to a + multiple, set a flag and dereference. */ + tree multiple = a68_lower_tree (SUB (p), ctx); + MOID_T *orig_sliced_multiple_mode = MOID (p); + MOID_T *sliced_multiple_mode = MOID (p); + size_t slice_num_dimensions = 0; + if (IS_REF (MOID (SUB (p)))) + { + slicing_name = true; + multiple = a68_low_deref (multiple, SUB (p)); + multiple_mode = SUB (multiple_mode); + slice_num_dimensions = DIM (SUB (MOID (p))); + sliced_multiple_mode = SUB (sliced_multiple_mode); + } + else + slice_num_dimensions = DIM (MOID (p)); + + tree slice = NULL_TREE; + if (ANNOTATION (indexer) == SLICE) + { + /* The slice has only indexers and no trimmers. Collect units and slice + an element of the multiple using a68_multiple_slice. This operation + results in an element of the multiple. */ + + /* Collect units */ + a68_push_range (NULL); + lower_subscript_for_indexes (SUB (indexer), ctx); + tree units = a68_pop_range (); + + /* We need to allocate space for as many indexes as dimensions of the + multiple. */ + tree num_dimensions_tree = a68_multiple_dimensions (multiple); + gcc_assert (TREE_CODE (num_dimensions_tree) == INTEGER_CST); + int num_dimensions = tree_to_shwi (num_dimensions_tree); + + int num_indexes = 0; + tree *indexes = (tree *) xmalloc (sizeof (tree) * num_dimensions); + for (tree_stmt_iterator si = tsi_start (units); + !tsi_end_p (si); + tsi_next (&si)) + { + /* Add the unit to the list of indexes. */ + indexes[num_indexes] = tsi_stmt (si); + num_indexes++; + } + gcc_assert (num_indexes == num_dimensions); + + /* Slice. */ + slice = a68_multiple_slice (p, multiple, slicing_name, + num_indexes, indexes); + free (indexes); + } + else if (ANNOTATION (indexer) == TRIMMER) + { + /* The slice has both indexers and trimmers. Traverse the indexer + subtree to obtain the descriptor of the trimmed multiple (which is + another multiple) and the pointer to the elements, which points to + some position within the elements of the trimmed multiple. This + operation results in a new multiple of the same mode than the trimmed + multiple with shared elements. */ + + a68_push_range (sliced_multiple_mode); + + tree sliced_multiple = a68_lower_tmpvar ("multiple%", TREE_TYPE (multiple), + multiple); + tree *lower_bounds = (tree *) xmalloc (sizeof (tree) * slice_num_dimensions); + tree *upper_bounds = (tree *) xmalloc (sizeof (tree) * slice_num_dimensions); + tree ssize_one_node = fold_convert (ssizetype, size_one_node); + tree ssize_zero_node = fold_convert (ssizetype, size_zero_node); + for (size_t d = 0; d < slice_num_dimensions; ++d) + { + /* Note that these dummy bounds and the implied strides will be + overwritten by lower_subscript_for_trimmers below. */ + lower_bounds[d] = ssize_one_node; + upper_bounds[d] = ssize_zero_node; + } + tree new_multiple = a68_row_value (CTYPE (sliced_multiple_mode), + slice_num_dimensions, + a68_multiple_elements (sliced_multiple), + a68_multiple_elements_size (sliced_multiple), + lower_bounds, upper_bounds); + new_multiple = save_expr (new_multiple); + new_multiple = a68_lower_tmpvar ("new_multiple%", TREE_TYPE (new_multiple), + new_multiple); + + int dim = 0; + int new_dim = 0; + lower_subscript_for_trimmers (SUB (indexer), ctx, + sliced_multiple, new_multiple, + &dim, &new_dim, + a68_row_elements_pointer_type (TREE_TYPE (multiple))); + a68_add_stmt (new_multiple); + slice = a68_pop_range (); + + /* In case we are slicing a ref to a multiple, return the address of the + resulting multiple and not the multiple itself. But in this case we + need an address in the heap, because the trimmed multiple may be in + the heap and the result shall have the same scope. */ + if (slicing_name) + { + tree ptrtype = CTYPE (orig_sliced_multiple_mode); + tree slice_addr = fold_build1 (ADDR_EXPR, ptrtype, slice); + tree alloc = a68_lower_malloc (ptrtype, size_in_bytes (TREE_TYPE (slice))); + alloc = save_expr (alloc); + tree copy = a68_lower_memcpy (alloc, slice_addr, size_in_bytes (TREE_TYPE (slice))); + + slice = fold_build2 (COMPOUND_EXPR, ptrtype, copy, alloc); + } + } + else + gcc_unreachable (); + + return slice; +} + +/* Lower a selection. + + selection : selector, secondary. + selector : field identifier, of symbol. + + The selection lowers into a COMPONENT_REF of the field corresponding to the + field identifier. */ + +tree +a68_lower_selection (NODE_T *p, LOW_CTX_T ctx) +{ + NODE_T *secondary = NEXT (SUB (p)); + NODE_T *field_identifier = SUB (SUB (p)); + + MOID_T *secondary_mode = MOID (secondary); + tree secondary_expr = a68_lower_tree (secondary, ctx); + + tree res = NULL_TREE; + + /* If the secondary is an address, we need to indirect. */ + if (IS_REF (secondary_mode)) + { + secondary_expr = a68_low_deref (secondary_expr, secondary); + secondary_mode = SUB (secondary_mode); + } + + if (IS_FLEX (secondary_mode) || IS_ROW (secondary_mode)) + { + /* This is the selection of a multiple of structs. + + The result is a multiple with same dimensions, dimension bounds and + strides than the indexed multiple. The elements pointer is made to + point to the selected field of the first struct. */ + + MOID_T *result_mode = MOID (p); + if (IS_REF (result_mode)) + result_mode = SUB (result_mode); + MOID_T *struct_mode = SUB (secondary_mode); + tree field_id = a68_get_mangled_identifier (SYMBOL (INFO (field_identifier))); + tree struct_type = CTYPE (struct_mode); + a68_push_range (result_mode); + tree selection = a68_lower_tmpvar ("selection%", CTYPE (result_mode), + a68_get_skip_tree (result_mode)); + tree multiple = a68_lower_tmpvar ("multiple%", TREE_TYPE (secondary_expr), + secondary_expr); + + /* First set the bounds of the selection, which are exactly the same + bounds than the selected multiple. */ + for (int dim = 0; dim < DIM (DEFLEX (secondary_mode)); ++dim) + { + tree size_dim = size_int (dim); + tree lower_bound = a68_multiple_lower_bound (multiple, size_dim); + tree upper_bound = a68_multiple_upper_bound (multiple, size_dim); + tree stride = a68_multiple_stride (multiple, size_dim); + a68_add_stmt (a68_multiple_set_lower_bound (selection, size_dim, + lower_bound)); + a68_add_stmt (a68_multiple_set_upper_bound (selection, size_dim, + upper_bound)); + a68_add_stmt (a68_multiple_set_stride (selection, size_dim, + stride)); + } + + /* Now set the elements pointer, which is the elements pointer of the + selected multiple offset the offset of the selected field in its + struct type. */ + tree elements = a68_multiple_elements (selection); + tree multiple_elements = a68_multiple_elements (multiple); + tree multiple_elements_size = a68_multiple_elements_size (multiple); + tree element_pointer_type = TREE_TYPE (elements); + tree field_offset = NULL_TREE; + for (tree f = TYPE_FIELDS (struct_type); f; f = DECL_CHAIN (f)) + { + if (field_id == DECL_NAME (f)) + { + field_offset = byte_position (f); + break; + } + } + gcc_assert (field_offset != NULL_TREE); + a68_add_stmt (a68_multiple_set_elements (selection, + fold_build2 (POINTER_PLUS_EXPR, + element_pointer_type, + multiple_elements, + field_offset))); + + /* The size of the buffer pointed by the elements pointer has to be + adjusted accordingly. */ + a68_add_stmt (a68_multiple_set_elements_size (selection, + fold_build2 (MINUS_EXPR, sizetype, + multiple_elements_size, + field_offset))); + + a68_add_stmt (selection); + res = a68_pop_range (); + } + else + { + /* This is the selection of a struct field. */ + gcc_assert (A68_STRUCT_TYPE_P (TREE_TYPE (secondary_expr))); + + /* Search for the selected field in the struct type. */ + tree struct_type = TREE_TYPE (secondary_expr); + tree field_id = a68_get_mangled_identifier (SYMBOL (INFO (field_identifier))); + tree field = NULL_TREE; + for (tree f = TYPE_FIELDS (struct_type); f; f = DECL_CHAIN (f)) + { + if (field_id == DECL_NAME (f)) + { + field = f; + break; + } + } + gcc_assert (field != NULL_TREE); + + /* Emit the COMPONENT_REF. */ + res = fold_build3_loc (a68_get_node_location (p), + COMPONENT_REF, + TREE_TYPE (field), + secondary_expr, + field, + NULL_TREE); + } + + /* The selection of a name yields a name. */ + if (IS_REF (MOID (secondary))) + /* XXX This may require copying. */ + return fold_build1 (ADDR_EXPR, CTYPE (MOID (p)), res); + else + return res; +} + +/* Lower a secondary. + + secondary : primary; generator; selection. + + The secondary lowers into some GENERIC expression. */ + +tree +a68_lower_secondary (NODE_T *p, LOW_CTX_T ctx) +{ + return a68_lower_tree (SUB (p), ctx); +} + +/* Lower a formula. + + formula : secondary, operator, secondary; + secondary, operator, monadic formula; + secondary, operator, formula; + monadic formula; + monadic formula, operator, secondary; + monadic formula, operator, monadic formula; + monadic formula, operator, formula; + formula, operator, secondary; + formula, operator, monadic formula; + formula, operator, formula. + + The formula lowers into some GENERIC expression. */ + +tree +a68_lower_formula (NODE_T *p, LOW_CTX_T ctx) +{ + if (IS (SUB (p), MONADIC_FORMULA) && NEXT (SUB (p)) == NO_NODE) + return a68_lower_tree (SUB (p), ctx); + else + { + /* If the operator is defined in the standard prelude, then use its lowering + code. */ + if (TAG_TABLE (TAX (NEXT (SUB (p)))) == A68_STANDENV) + { + LOWERER_T lowerer = LOWERER (TAX (NEXT (SUB (p)))); + return (*lowerer) (p, ctx); + } + else + { + tree arg1 = a68_lower_tree (SUB (p), ctx); + tree op = a68_lower_tree (NEXT (SUB (p)), ctx); + tree arg2 = a68_lower_tree (NEXT (NEXT (SUB (p))), ctx); + + if (POINTER_TYPE_P (TREE_TYPE (op))) + op = fold_build1 (INDIRECT_REF, + TREE_TYPE (TREE_TYPE (op)), + op); + return build_call_expr_loc (a68_get_node_location (p), op, 2, arg1, arg2); + } + } +} + +/* Lower a monadic formula. + + monadic formula : operator, secondary; + operator, monadic formula. + + The monadic formula lowers into some GENERIC expression. */ + +tree +a68_lower_monadic_formula (NODE_T *p, LOW_CTX_T ctx) +{ + /* If the operator is defined in the standard prelude, then use its lowering + code. */ + if (TAG_TABLE (TAX (SUB (p))) == A68_STANDENV) + { + LOWERER_T lowerer = LOWERER (TAX (SUB (p))); + return (*lowerer) (p, ctx); + } + else + { + tree op = a68_lower_tree (SUB (p), ctx); + tree secondary = a68_lower_tree (NEXT (SUB (p)), ctx); + + if (POINTER_TYPE_P (TREE_TYPE (op))) + op = fold_build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (op)), op); + return build_call_expr_loc (a68_get_node_location (p), op, 1, secondary); + } +} + +/* Lower a tertiary. + + tertiary : nihil; monadic formula; formula; secondary. + + The tertiary lowers to some GENERIC expression. */ + +tree +a68_lower_tertiary (NODE_T *p, LOW_CTX_T ctx) +{ + return a68_lower_tree (SUB (p), ctx); +} + +/* Lower an assignation. + + assignation : tertiary, assign symbol, tertiary; + tertiary, assign symbol, identity relation; + tertiary, assign symbol, and function; + tertiary, assign symbol, or function; + tertiary, assign symbol, routine text; + tertiary, assign symbol, jump; + tertiary, assign symbol, skip; + tertiary, assign symbol, assignation; + tertiary, assign symbol, code clause. + + An assignation lowers into appending a MODIFY_EXPR to the statements list, + and the result of the expression is the left hand side. A compound + expression fits perfectly */ + +tree +a68_lower_assignation (NODE_T *p, LOW_CTX_T ctx) +{ + NODE_T *lhs_node = SUB (p); + NODE_T *rhs_node = NEXT (NEXT (SUB (p))); + tree lhs = a68_lower_tree (lhs_node, ctx); + tree rhs = a68_lower_tree (rhs_node, ctx); + + return a68_low_assignation (p, + lhs, MOID (lhs_node), + rhs, MOID (rhs_node)); +} + +/* Lower a generator. + + generator : loc symbol, declarer; + heap symbol, declarer; + new symbol, declarer. + + LOC generators lower into calls to BUILT_IN_ALLOCA. + HEAP generators lower into calls to malloc. */ + +tree +a68_lower_generator (NODE_T *p, LOW_CTX_T ctx) +{ + NODE_T *declarer = NEXT (SUB (p)); + return a68_low_generator (declarer, + MOID (declarer), + !IS (SUB (p), LOC_SYMBOL), + ctx); +} + +/* Lower a procedure call. + + */ + +static void +collect_call_arguments (NODE_T *p, vec<tree, va_gc> *args, LOW_CTX_T ctx) +{ + for (; p != NO_NODE; FORWARD (p)) + { + if (IS (p, UNIT)) + { + /* In Algol 68 parameters are passed via an identity declaration, so + this must implement same semantics. */ + tree arg = a68_lower_tree (p, ctx); + if (HAS_ROWS (MOID (p))) + arg = a68_low_dup (arg); + arg = a68_consolidate_ref (MOID (p), arg); + args->quick_push (arg); + } + else + collect_call_arguments (SUB (p), args, ctx); + } +} + +tree +a68_lower_call (NODE_T *p, LOW_CTX_T ctx) +{ + MOID_T *proc_mode = MOID (SUB (p)); + MOID_T *ret_mode = SUB (proc_mode); + unsigned int nargs = DIM (proc_mode); + + /* Collect arguments. */ + vec<tree, va_gc> *args; + vec_alloc (args, nargs); + collect_call_arguments (NEXT (SUB (p)), args, ctx); + + /* Lower the primary to call. */ + tree primary = a68_lower_tree (SUB (p), ctx); + + /* We need a pointer to a function type. */ + if (!POINTER_TYPE_P (TREE_TYPE (primary))) + primary = fold_build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (primary)), + primary); + + /* Build a function call. */ + tree call = build_call_vec (CTYPE (ret_mode), primary, args); + SET_EXPR_LOCATION (call, a68_get_node_location (p)); + return call; +} + +/* Lower a routine text. + + routine text : parameter pack, (declarer ; void symbol), colon symbol, assignation; + parameter pack, (declarer ; void symbol), colon symbol, identity relation; + parameter pack, (declarer ; void symbol), colon symbol, and function; + parameter pack, (declarer ; void symbol), colon symbol, or runction; + parameter pack, (declarer ; void symbol), colon symbol, jump; + parameter pack, (declarer ; void symbol), colon symbol, skip; + parameter pack, (declarer ; void symbol), colon symbol, tertiary; + parameter pack, (declarer ; void symbol), colon symbol, routine text; + parameter pack, (declarer ; void symboL), colon symbol, code clause; + (declarer ; void symbol), colon symbol, assignation; + (declarer ; void symbol), colon symbol, identity relation; + (declarer ; void symbol), colon symbol, and function; + (declarer ; void symbol), colon symbol, or runction; + (declarer ; void symbol), colon symbol, jump; + (declarer ; void symbol), colon symbol, skip; + (declarer ; void symbol), colon symbol, tertiary; + (declarer ; void symbol), colon symbol, routine text; + (declarer ; void symbol), colon symbol, code clause. + + Routine texts are used to create routines. They can stand as the actual + parameter of an identity declaration, as the actual parameter of a call, or + as the right-hand side of an assignation. + + This lowering function is called in two different contexts: + + 1) As part of a routine-identity-declaration, in which case the routine + resulting from this routine-text is beign ascribed to an identifier given + in ctx.proc_decl_identifier. In that case, we lower to a FUNC_DECL + initialized with the body of the routine-text. + + 2) As a free standing routine-text. In that case ctx.proc_decl_identifier is + NO_NODE. We lower to the address of a FUNC_DECL that features some unique + name. This pointer will then likely be assigned or ascribed to some + variable or identifier in non-contracted identity declaration, but we + cannot assume that so we have to opt for the indirection. */ + +tree +a68_lower_routine_text (NODE_T *p, LOW_CTX_T ctx) +{ + NODE_T *s = SUB (p); + + tree func_decl = NULL_TREE; + NODE_T *defining_identifier = ctx.proc_decl_identifier; + bool defining_operator = ctx.proc_decl_operator; + if (defining_identifier != NO_NODE) + { + /* The routine-text is part of a routine-identity-declaration. */ + func_decl = TAX_TREE_DECL (TAX (defining_identifier)); + if (func_decl == NULL_TREE) + { + func_decl + = a68_make_proc_identity_declaration_decl (defining_identifier, + ctx.module_definition_name, + defining_operator /* indicant */); + TAX_TREE_DECL (TAX (defining_identifier)) = func_decl; + } + + /* If the routine-identity-declaration is in a public range then add the + declaration to the publicized declarations list. Otherwise chain the + declaration in the proper block and bind it. */ + if (PUBLIC_RANGE (TABLE (TAX (defining_identifier)))) + vec_safe_push (A68_MODULE_DEFINITION_DECLS, func_decl); + else + a68_add_decl (func_decl); + } + else + { + /* The routine-text is free standing. */ + func_decl = a68_make_anonymous_routine_decl (MOID (p)); + a68_add_decl (func_decl); + } + + a68_add_decl_expr (fold_build1_loc (a68_get_node_location (p), + DECL_EXPR, + TREE_TYPE (func_decl), + func_decl)); + announce_function (func_decl); + + /* PARAMETER_PACK. */ + NODE_T *parameter_pack_node = NO_NODE; + tree parameter_pack = NULL_TREE; /* This is computed below. */ + if (IS (s, PARAMETER_PACK)) + { + parameter_pack_node = s; + FORWARD (s); + } + + /* DECLARER or VOID_SYMBOL */ + if (IS (s, DECLARER) || IS (s, VOID_SYMBOL)) + /* This is not used, as this formal declarer is also available in the + procedure mode. So just skip it. */ + FORWARD (s); + + /* Skip the COLON_SYMBOL. */ + gcc_assert (IS (s, COLON_SYMBOL)); + FORWARD (s); + + /* Lower the function body. + + This should be done in a new range in which the formal parameters of the + routine-text have been declared. */ + a68_push_function_range (func_decl, CTYPE (SUB (MOID (p))) /* result_type */); + if (parameter_pack_node != NO_NODE) + parameter_pack = a68_lower_tree (parameter_pack_node, ctx); + DECL_ARGUMENTS (func_decl) = parameter_pack; + ctx.proc_decl_identifier = NO_NODE; + tree func_body = a68_lower_tree (s, ctx); + a68_pop_function_range (func_body); + + if (defining_identifier != NO_NODE) + /* Routine-text immediately ascribed to some identifier in a + proc-identity-declaration. Return the FUNC_DECL. */ + return func_decl; + else + /* Free standing routine-text. Return its address. */ + return fold_build1 (ADDR_EXPR, + build_pointer_type (TREE_TYPE (func_decl)), + func_decl); +} + +/* Lower an unit. + + unit : assignation; identity relation; + and function; or function; routine text; + jump; skip; tertiary; assertion; code clause. + + The unit lowers to an expression. */ + +tree +a68_lower_unit (NODE_T *p, LOW_CTX_T ctx) +{ + return a68_lower_tree (SUB (p), ctx); +}
