Bootstrapped/regtested on x86_64-linux and i686-linux, ok for trunk?
2020-07-18 Jakub Jelinek <ja...@redhat.com>
PR libstdc++/93121
* c-common.h (enum rid): Add RID_BUILTIN_BIT_CAST.
* c-common.c (c_common_reswords): Add __builtin_bit_cast.
* cp-tree.h (cp_build_bit_cast): Declare.
* cp-tree.def (BIT_CAST_EXPR): New tree code.
* cp-objcp-common.c (names_builtin_p): Handle RID_BUILTIN_BIT_CAST.
(cp_common_init_ts): Handle BIT_CAST_EXPR.
* cxx-pretty-print.c (cxx_pretty_printer::postfix_expression):
Likewise.
* parser.c (cp_parser_postfix_expression): Handle
RID_BUILTIN_BIT_CAST.
* semantics.c (cp_build_bit_cast): New function.
* tree.c (cp_tree_equal): Handle BIT_CAST_EXPR.
(cp_walk_subtrees): Likewise.
* pt.c (tsubst_copy): Likewise.
* constexpr.c (check_bit_cast_type, cxx_find_bitfield_repr_type,
cxx_native_interpret_aggregate, cxx_native_encode_aggregate,
cxx_eval_bit_cast): New functions.
(cxx_eval_constant_expression): Handle BIT_CAST_EXPR.
(potential_constant_expression_1): Likewise.
* cp-gimplify.c (cp_genericize_r): Likewise.
* g++.dg/cpp2a/bit-cast1.C: New test.
* g++.dg/cpp2a/bit-cast2.C: New test.
* g++.dg/cpp2a/bit-cast3.C: New test.
* g++.dg/cpp2a/bit-cast4.C: New test.
* g++.dg/cpp2a/bit-cast5.C: New test.
--- gcc/c-family/c-common.h.jj 2020-07-18 10:48:51.442493640 +0200
+++ gcc/c-family/c-common.h 2020-07-18 10:52:46.175021398 +0200
@@ -164,7 +164,7 @@ enum rid
RID_HAS_NOTHROW_COPY, RID_HAS_TRIVIAL_ASSIGN,
RID_HAS_TRIVIAL_CONSTRUCTOR, RID_HAS_TRIVIAL_COPY,
RID_HAS_TRIVIAL_DESTRUCTOR, RID_HAS_UNIQUE_OBJ_REPRESENTATIONS,
- RID_HAS_VIRTUAL_DESTRUCTOR,
+ RID_HAS_VIRTUAL_DESTRUCTOR, RID_BUILTIN_BIT_CAST,
RID_IS_ABSTRACT, RID_IS_AGGREGATE,
RID_IS_BASE_OF, RID_IS_CLASS,
RID_IS_EMPTY, RID_IS_ENUM,
--- gcc/c-family/c-common.c.jj 2020-07-18 10:48:51.381494543 +0200
+++ gcc/c-family/c-common.c 2020-07-18 10:52:46.178021354 +0200
@@ -373,6 +373,7 @@ const struct c_common_resword c_common_r
{ "__auto_type", RID_AUTO_TYPE, D_CONLY },
{ "__bases", RID_BASES, D_CXXONLY },
{ "__builtin_addressof", RID_ADDRESSOF, D_CXXONLY },
+ { "__builtin_bit_cast", RID_BUILTIN_BIT_CAST, D_CXXONLY },
{ "__builtin_call_with_static_chain",
RID_BUILTIN_CALL_WITH_STATIC_CHAIN, D_CONLY },
{ "__builtin_choose_expr", RID_CHOOSE_EXPR, D_CONLY },
--- gcc/cp/cp-tree.h.jj 2020-07-18 10:48:51.573491701 +0200
+++ gcc/cp/cp-tree.h 2020-07-18 10:52:46.180021324 +0200
@@ -7312,6 +7312,8 @@ extern tree finish_builtin_launder (loc
tsubst_flags_t);
extern tree cp_build_vec_convert (tree, location_t, tree,
tsubst_flags_t);
+extern tree cp_build_bit_cast (location_t, tree, tree,
+ tsubst_flags_t);
extern void start_lambda_scope (tree);
extern void record_lambda_scope (tree);
extern void record_null_lambda_scope (tree);
--- gcc/cp/cp-tree.def.jj 2020-07-18 10:48:51.569491760 +0200
+++ gcc/cp/cp-tree.def 2020-07-18 10:52:46.180021324 +0200
@@ -460,6 +460,9 @@ DEFTREECODE (UNARY_RIGHT_FOLD_EXPR, "una
DEFTREECODE (BINARY_LEFT_FOLD_EXPR, "binary_left_fold_expr", tcc_expression,
3)
DEFTREECODE (BINARY_RIGHT_FOLD_EXPR, "binary_right_fold_expr",
tcc_expression, 3)
+/* Represents the __builtin_bit_cast (type, expr) expression.
+ The type is in TREE_TYPE, expression in TREE_OPERAND (bitcast, 0). */
+DEFTREECODE (BIT_CAST_EXPR, "bit_cast_expr", tcc_expression, 1)
/** C++ extensions. */
--- gcc/cp/cp-objcp-common.c.jj 2020-07-18 10:48:51.515492559 +0200
+++ gcc/cp/cp-objcp-common.c 2020-07-18 10:52:46.181021310 +0200
@@ -394,6 +394,7 @@ names_builtin_p (const char *name)
case RID_BUILTIN_HAS_ATTRIBUTE:
case RID_BUILTIN_SHUFFLE:
case RID_BUILTIN_LAUNDER:
+ case RID_BUILTIN_BIT_CAST:
case RID_OFFSETOF:
case RID_HAS_NOTHROW_ASSIGN:
case RID_HAS_NOTHROW_CONSTRUCTOR:
@@ -498,6 +499,7 @@ cp_common_init_ts (void)
MARK_TS_EXP (ALIGNOF_EXPR);
MARK_TS_EXP (ARROW_EXPR);
MARK_TS_EXP (AT_ENCODE_EXPR);
+ MARK_TS_EXP (BIT_CAST_EXPR);
MARK_TS_EXP (CAST_EXPR);
MARK_TS_EXP (CONST_CAST_EXPR);
MARK_TS_EXP (CTOR_INITIALIZER);
--- gcc/cp/cxx-pretty-print.c.jj 2020-07-18 10:48:51.601491287 +0200
+++ gcc/cp/cxx-pretty-print.c 2020-07-18 10:52:46.181021310 +0200
@@ -655,6 +655,15 @@ cxx_pretty_printer::postfix_expression (
pp_right_paren (this);
break;
+ case BIT_CAST_EXPR:
+ pp_cxx_ws_string (this, "__builtin_bit_cast");
+ pp_left_paren (this);
+ type_id (TREE_TYPE (t));
+ pp_comma (this);
+ expression (TREE_OPERAND (t, 0));
+ pp_right_paren (this);
+ break;
+
case EMPTY_CLASS_EXPR:
type_id (TREE_TYPE (t));
pp_left_paren (this);
--- gcc/cp/parser.c.jj 2020-07-18 10:49:16.446123759 +0200
+++ gcc/cp/parser.c 2020-07-18 10:52:46.186021236 +0200
@@ -7217,6 +7217,32 @@ cp_parser_postfix_expression (cp_parser
tf_warning_or_error);
}
+ case RID_BUILTIN_BIT_CAST:
+ {
+ tree expression;
+ tree type;
+ /* Consume the `__builtin_bit_cast' token. */
+ cp_lexer_consume_token (parser->lexer);
+ /* Look for the opening `('. */
+ matching_parens parens;
+ parens.require_open (parser);
+ location_t type_location
+ = cp_lexer_peek_token (parser->lexer)->location;
+ /* Parse the type-id. */
+ {
+ type_id_in_expr_sentinel s (parser);
+ type = cp_parser_type_id (parser);
+ }
+ /* Look for the `,'. */
+ cp_parser_require (parser, CPP_COMMA, RT_COMMA);
+ /* Now, parse the assignment-expression. */
+ expression = cp_parser_assignment_expression (parser);
+ /* Look for the closing `)'. */
+ parens.require_close (parser);
+ return cp_build_bit_cast (type_location, type, expression,
+ tf_warning_or_error);
+ }
+
default:
{
tree type;
--- gcc/cp/semantics.c.jj 2020-07-18 10:48:51.737489274 +0200
+++ gcc/cp/semantics.c 2020-07-18 10:52:46.187021221 +0200
@@ -10469,4 +10469,82 @@ cp_build_vec_convert (tree arg, location
return build_call_expr_internal_loc (loc, IFN_VEC_CONVERT, type, 1, arg);
}
+/* Finish __builtin_bit_cast (type, arg). */
+
+tree
+cp_build_bit_cast (location_t loc, tree type, tree arg,
+ tsubst_flags_t complain)
+{
+ if (error_operand_p (type))
+ return error_mark_node;
+ if (!dependent_type_p (type))
+ {
+ if (!complete_type_or_maybe_complain (type, NULL_TREE, complain))
+ return error_mark_node;
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ {
+ /* std::bit_cast for destination ARRAY_TYPE is not possible,
+ as functions may not return an array, so don't bother trying
+ to support this (and then deal with VLAs etc.). */
+ error_at (loc, "%<__builtin_bit_cast%> destination type %qT "
+ "is an array type", type);
+ return error_mark_node;
+ }
+ if (!trivially_copyable_p (type))
+ {
+ error_at (loc, "%<__builtin_bit_cast%> destination type %qT "
+ "is not trivially copyable", type);
+ return error_mark_node;
+ }
+ }
+
+ if (error_operand_p (arg))
+ return error_mark_node;
+
+ if (REFERENCE_REF_P (arg))
+ arg = TREE_OPERAND (arg, 0);
+ else
+ error_at (loc, "%qs is not constant expression because %qT "
+ "contains a union type", "__builtin_bit_cast",
+ orig_type);
+ }
+ return true;
+ }
+ if (TREE_CODE (type) == POINTER_TYPE)
+ {
+ if (!ctx->quiet)
+ {
+ if (type == orig_type)
+ error_at (loc, "%qs is not constant expression because %qT is "
+ "a pointer type", "__builtin_bit_cast", type);
+ else
+ error_at (loc, "%qs is not constant expression because %qT "
+ "contains a pointer type", "__builtin_bit_cast",
+ orig_type);
+ }
+ return true;
+ }
+ if (TREE_CODE (type) == REFERENCE_TYPE)
+ {
+ if (!ctx->quiet)
+ {
+ if (type == orig_type)
+ error_at (loc, "%qs is not constant expression because %qT is "
+ "a reference type", "__builtin_bit_cast", type);
+ else
+ error_at (loc, "%qs is not constant expression because %qT "
+ "contains a reference type", "__builtin_bit_cast",
+ orig_type);
+ }
+ return true;
+ }
+ if (TYPE_PTRMEM_P (type))
+ {
+ if (!ctx->quiet)
+ {
+ if (type == orig_type)
+ error_at (loc, "%qs is not constant expression because %qT is "
+ "a pointer to member type", "__builtin_bit_cast",
+ type);
+ else
+ error_at (loc, "%qs is not constant expression because %qT "
+ "contains a pointer to member type",
+ "__builtin_bit_cast", orig_type);
+ }
+ return true;
+ }
+ if (TYPE_VOLATILE (type))
+ {
+ if (!ctx->quiet)
+ {
+ if (type == orig_type)
+ error_at (loc, "%qs is not constant expression because %qT is "
+ "volatile", "__builtin_bit_cast", type);
+ else
+ error_at (loc, "%qs is not constant expression because %qT "
+ "contains a volatile subobject",
+ "__builtin_bit_cast", orig_type);
+ }
+ return true;
+ }
+ if (TREE_CODE (type) == RECORD_TYPE)
+ for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
+ if (TREE_CODE (field) == FIELD_DECL
+ && check_bit_cast_type (ctx, loc, TREE_TYPE (field), orig_type))
+ return true;
+ return false;
+}
+
+/* Try to find a type whose byte size is smaller or equal to LEN bytes larger
+ or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
+ of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
+ machine modes, we can't just use build_nonstandard_integer_type. */
+
+static tree
+cxx_find_bitfield_repr_type (int fieldsize, int len)
+{
+ machine_mode mode;
+ for (int pass = 0; pass < 2; pass++)
+ {
+ enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
+ FOR_EACH_MODE_IN_CLASS (mode, mclass)
+ if (known_ge (GET_MODE_SIZE (mode), fieldsize)
+ && known_eq (GET_MODE_PRECISION (mode),
+ GET_MODE_BITSIZE (mode))
+ && known_le (GET_MODE_SIZE (mode), len))
+ {
+ tree ret = c_common_type_for_mode (mode, 1);
+ if (ret && TYPE_MODE (ret) == mode)
+ return ret;
+ }
+ }
+
+ for (int i = 0; i < NUM_INT_N_ENTS; i ++)
+ if (int_n_enabled_p[i]
+ && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
+ && int_n_trees[i].unsigned_type)
+ {
+ tree ret = int_n_trees[i].unsigned_type;
+ mode = TYPE_MODE (ret);
+ if (known_ge (GET_MODE_SIZE (mode), fieldsize)
+ && known_eq (GET_MODE_PRECISION (mode),
+ GET_MODE_BITSIZE (mode))
+ && known_le (GET_MODE_SIZE (mode), len))
+ return ret;
+ }
+
+ return NULL_TREE;
+}
+
+/* Attempt to interpret aggregate of TYPE from bytes encoded in target
+ byte order at PTR + OFF with LEN bytes. MASK contains bits set if the value
+ is indeterminate. */
+
+static tree
+cxx_native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
+ int len, unsigned char *mask,
+ const constexpr_ctx *ctx, bool *non_constant_p,
+ location_t loc)
+{
+ vec<constructor_elt, va_gc> *elts = NULL;
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ {
+ HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
+ if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
+ return error_mark_node;
+
+ HOST_WIDE_INT cnt = 0;
+ if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
+ {
+ if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
+ return error_mark_node;
+ cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
+ }
+ if (eltsz == 0)
+ cnt = 0;
+ HOST_WIDE_INT pos = 0;
+ for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
+ {
+ tree v = error_mark_node;
+ if (pos >= len || pos + eltsz > len)
+ return error_mark_node;
+ if (can_native_interpret_type_p (TREE_TYPE (type)))
+ {
+ v = native_interpret_expr (TREE_TYPE (type),
+ ptr + off + pos, eltsz);
+ if (v == NULL_TREE)
+ return error_mark_node;
+ for (int i = 0; i < eltsz; i++)
+ if (mask[off + pos + i])
+ {
+ if (!ctx->quiet)
+ error_at (loc, "%qs accessing uninitialized byte at "
+ "offset %d",
+ "__builtin_bit_cast", (int) (off + pos + i));
+ *non_constant_p = true;
+ return error_mark_node;
+ }
+ }
+ else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
+ || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
+ v = cxx_native_interpret_aggregate (TREE_TYPE (type),
+ ptr, off + pos, eltsz,
+ mask, ctx,
+ non_constant_p, loc);
+ if (v == error_mark_node)
+ return error_mark_node;
+ CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
+ }
+ return build_constructor (type, elts);
+ }
+ gcc_assert (TREE_CODE (type) == RECORD_TYPE);
+ for (tree field = next_initializable_field (TYPE_FIELDS (type));
+ field; field = next_initializable_field (DECL_CHAIN (field)))
+ {
+ tree fld = field;
+ HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
+ int diff = 0;
+ tree v = error_mark_node;
+ if (DECL_BIT_FIELD (field))
+ {
+ fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
+ if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
+ {
+ poly_int64 bitoffset;
+ poly_uint64 field_offset, fld_offset;
+ if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
+ && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
+ bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
+ else
+ bitoffset = 0;
+ bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
+ - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
+ diff = (TYPE_PRECISION (TREE_TYPE (fld))
+ - TYPE_PRECISION (TREE_TYPE (field)));
+ if (!bitoffset.is_constant (&bitoff)
+ || bitoff < 0
+ || bitoff > diff)
+ return error_mark_node;
+ }
+ else
+ {
+ if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
+ return error_mark_node;
+ int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
+ int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
+ bpos %= BITS_PER_UNIT;
+ fieldsize += bpos;
+ fieldsize += BITS_PER_UNIT - 1;
+ fieldsize /= BITS_PER_UNIT;
+ tree repr_type = cxx_find_bitfield_repr_type (fieldsize, len);
+ if (repr_type == NULL_TREE)
+ return error_mark_node;
+ sz = int_size_in_bytes (repr_type);
+ if (sz < 0 || sz > len)
+ return error_mark_node;
+ pos = int_byte_position (field);
+ if (pos < 0 || pos > len || pos + fieldsize > len)
+ return error_mark_node;
+ HOST_WIDE_INT rpos;
+ if (pos + sz <= len)
+ rpos = pos;
+ else
+ {
+ rpos = len - sz;
+ gcc_assert (rpos <= pos);
+ }
+ bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
+ pos = rpos;
+ diff = (TYPE_PRECISION (repr_type)
+ - TYPE_PRECISION (TREE_TYPE (field)));
+ v = native_interpret_expr (repr_type, ptr + off + pos, sz);
+ if (v == NULL_TREE)
+ return error_mark_node;
+ fld = NULL_TREE;
+ }
+ }
+
+ if (fld)
+ {
+ sz = int_size_in_bytes (TREE_TYPE (fld));
+ if (sz < 0 || sz > len)
+ return error_mark_node;
+ tree byte_pos = byte_position (fld);
+ if (!tree_fits_shwi_p (byte_pos))
+ return error_mark_node;
+ pos = tree_to_shwi (byte_pos);
+ if (pos < 0 || pos > len || pos + sz > len)
+ return error_mark_node;
+ }
+ if (fld == NULL_TREE)
+ /* Already handled above. */;
+ else if (can_native_interpret_type_p (TREE_TYPE (fld)))
+ {
+ v = native_interpret_expr (TREE_TYPE (fld),
+ ptr + off + pos, sz);
+ if (v == NULL_TREE)
+ return error_mark_node;
+ if (fld == field)
+ for (int i = 0; i < sz; i++)
+ if (mask[off + pos + i])
+ {
+ if (!ctx->quiet)
+ error_at (loc,
+ "%qs accessing uninitialized byte at offset %d",
+ "__builtin_bit_cast", (int) (off + pos + i));
+ *non_constant_p = true;
+ return error_mark_node;
+ }
+ }
+ else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
+ || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
+ v = cxx_native_interpret_aggregate (TREE_TYPE (fld),
+ ptr, off + pos, sz, mask,
+ ctx, non_constant_p, loc);
+ if (v == error_mark_node)
+ return error_mark_node;
+ if (fld != field)
+ {
+ if (TREE_CODE (v) != INTEGER_CST)
+ return error_mark_node;
+
+ /* FIXME: Figure out how to handle PDP endian bitfields. */
+ if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
+ return error_mark_node;
+ if (!BYTES_BIG_ENDIAN)
+ v = wide_int_to_tree (TREE_TYPE (field),
+ wi::lrshift (wi::to_wide (v), bitoff));
+ else
+ v = wide_int_to_tree (TREE_TYPE (field),
+ wi::lrshift (wi::to_wide (v),
+ diff - bitoff));
+ int bpos = bitoff % BITS_PER_UNIT;
+ int bpos_byte = bitoff / BITS_PER_UNIT;
+ int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
+ fieldsize += bpos;
+ int epos = fieldsize % BITS_PER_UNIT;
+ fieldsize += BITS_PER_UNIT - 1;
+ fieldsize /= BITS_PER_UNIT;
+ int bad = -1;
+ if (bpos)
+ {
+ int msk;
+ if (!BYTES_BIG_ENDIAN)
+ {
+ msk = (1 << bpos) - 1;
+ if (fieldsize == 1 && epos != 0)
+ msk |= ~((1 << epos) - 1);
+ }
+ else
+ {
+ msk = ~((1 << (BITS_PER_UNIT - bpos)) - 1);
+ if (fieldsize == 1 && epos != 0)
+ msk |= (1 << (BITS_PER_UNIT - epos)) - 1;
+ }
+ if (mask[off + pos + bpos_byte] & ~msk)
+ bad = off + pos + bpos_byte;
+ }
+ if (epos && (fieldsize > 1 || bpos == 0))
+ {
+ int msk;
+ if (!BYTES_BIG_ENDIAN)
+ msk = (1 << epos) - 1;
+ else
+ msk = ~((1 << (BITS_PER_UNIT - epos)) - 1);
+ if (mask[off + pos + bpos_byte + fieldsize - 1] & msk)
+ bad = off + pos + bpos_byte + fieldsize - 1;
+ }
+ for (int i = 0; bad < 0 && i < fieldsize - (bpos != 0) - (epos != 0);
+ i++)
+ if (mask[off + pos + bpos_byte + (bpos != 0) + i])
+ bad = off + pos + bpos_byte + (bpos != 0) + i;
+ if (bad >= 0)
+ {
+ if (!ctx->quiet)
+ error_at (loc, "%qs accessing uninitialized byte at offset %d",
+ "__builtin_bit_cast", bad);
+ *non_constant_p = true;
+ return error_mark_node;
+ }
+ }
+ CONSTRUCTOR_APPEND_ELT (elts, field, v);
+ }
+ return build_constructor (type, elts);
+}
+
+/* Similar to native_encode_initializer, but handle value initialized members
+ without initializers in !CONSTRUCTOR_NO_CLEARING CONSTRUCTORs, handle VCEs,
+ NON_LVALUE_EXPRs and nops and in addition to filling up PTR, fill also
+ MASK with 1 in bits that have indeterminate value. On the other side,
+ it is simplified by OFF not being present (always assumed to be 0) and
+ never trying to extract anything partial later, always the whole object. */
+
+int
+cxx_native_encode_aggregate (tree init, unsigned char *ptr, int len,
+ unsigned char *mask, const constexpr_ctx *ctx,
+ bool *non_constant_p, bool *overflow_p)
+{
+ if (init == NULL_TREE)
+ return 0;
+
+ STRIP_NOPS (init);
+ switch (TREE_CODE (init))
+ {
+ case VIEW_CONVERT_EXPR:
+ case NON_LVALUE_EXPR:
+ return cxx_native_encode_aggregate (TREE_OPERAND (init, 0), ptr, len,
+ mask, ctx, non_constant_p,
+ overflow_p);
+ default:
+ int r;
+ r = native_encode_expr (init, ptr, len, 0);
+ memset (mask, 0, r);
+ return r;
+ case CONSTRUCTOR:
+ tree type = TREE_TYPE (init);
+ HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
+ if (total_bytes < 0)
+ return 0;
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ {
+ HOST_WIDE_INT min_index;
+ unsigned HOST_WIDE_INT cnt;
+ HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
+ constructor_elt *ce;
+
+ if (TYPE_DOMAIN (type) == NULL_TREE
+ || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
+ return 0;
+
+ fieldsize = int_size_in_bytes (TREE_TYPE (type));
+ if (fieldsize <= 0)
+ return 0;
+
+ min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
+ memset (ptr, '\0', MIN (total_bytes, len));
+
+ FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
+ {
+ tree val = ce->value;
+ tree index = ce->index;
+ HOST_WIDE_INT pos = curpos, count = 0;
+ bool full = false;
+ if (index && TREE_CODE (index) == RANGE_EXPR)
+ {
+ if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
+ || !tree_fits_shwi_p (TREE_OPERAND (index, 1)))
+ return 0;
+ pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
+ * fieldsize;
+ count = (tree_to_shwi (TREE_OPERAND (index, 1))
+ - tree_to_shwi (TREE_OPERAND (index, 0)));
+ }
+ else if (index)
+ {
+ if (!tree_fits_shwi_p (index))
+ return 0;
+ pos = (tree_to_shwi (index) - min_index) * fieldsize;
+ }
+
+ if (!CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
+ {
+ if (valueinit == -1)
+ {
+ tree t = build_value_init (TREE_TYPE (type),
+ tf_warning_or_error);
+ t = cxx_eval_constant_expression (ctx, t, false,
+ non_constant_p,
+ overflow_p);
+ if (!cxx_native_encode_aggregate (t, ptr + curpos,
+ fieldsize,
+ mask + curpos,
+ ctx, non_constant_p,
+ overflow_p))
+ return 0;
+ valueinit = curpos;
+ curpos += fieldsize;
+ }
+ while (curpos != pos)
+ {
+ memcpy (ptr + curpos, ptr + valueinit, fieldsize);
+ memcpy (mask + curpos, mask + valueinit, fieldsize);
+ curpos += fieldsize;
+ }
+ }
+
+ curpos = pos;
+ if (val)
+ do
+ {
+ gcc_assert (curpos >= 0
+ && (curpos + fieldsize
+ <= (HOST_WIDE_INT) len));
+ if (full)
+ {
+ memcpy (ptr + curpos, ptr + pos, fieldsize);
+ memcpy (mask + curpos, mask + pos, fieldsize);
+ }
+ else if (!cxx_native_encode_aggregate (val,
+ ptr + curpos,
+ fieldsize,
+ mask + curpos,
+ ctx, non_constant_p,
+ overflow_p))
+ return 0;
+ else
+ {
+ full = true;
+ pos = curpos;
+ }
+ curpos += fieldsize;
+ }
+ while (count-- != 0);
+ }
+ return MIN (total_bytes, len);
+ }
+ else if (TREE_CODE (type) == RECORD_TYPE)
+ {
+ unsigned HOST_WIDE_INT cnt;
+ constructor_elt *ce;
+ tree fld_base = TYPE_FIELDS (type);
+
+ memset (ptr, '\0', MIN (total_bytes, len));
+ FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
+ {
+ tree field = ce->index;
+ tree val = ce->value;
+ HOST_WIDE_INT pos, fieldsize;
+ unsigned HOST_WIDE_INT bpos = 0, epos = 0;
+
+ if (field == NULL_TREE)
+ return 0;
+
+ if (!CONSTRUCTOR_NO_CLEARING (init))
+ {
+ tree fld = next_initializable_field (fld_base);
+ fld_base = DECL_CHAIN (fld);
+ if (fld == NULL_TREE)
+ return 0;
+ if (fld != field)
+ {
+ cnt--;
+ field = fld;
+ val = build_value_init (TREE_TYPE (fld),
+ tf_warning_or_error);
+ val = cxx_eval_constant_expression (ctx, val, false,
+ non_constant_p,
+ overflow_p);
+ }
+ }
+
+ pos = int_byte_position (field);
+ gcc_assert ((HOST_WIDE_INT) len >= pos);
+
+ if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
+ && TYPE_DOMAIN (TREE_TYPE (field))
+ && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
+ return 0;
+ if (DECL_SIZE_UNIT (field) == NULL_TREE
+ || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
+ return 0;
+ fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
+ if (fieldsize == 0)
+ continue;
+
+ if (DECL_BIT_FIELD (field))
+ {
+ if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
+ return 0;
+ fieldsize = TYPE_PRECISION (TREE_TYPE (field));
+ bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
+ bpos %= BITS_PER_UNIT;
+ fieldsize += bpos;
+ epos = fieldsize % BITS_PER_UNIT;
+ fieldsize += BITS_PER_UNIT - 1;
+ fieldsize /= BITS_PER_UNIT;
+ }
+
+ gcc_assert (pos + fieldsize > 0);
+
+ if (val == NULL_TREE)
+ continue;
+
+ if (DECL_BIT_FIELD (field))
+ {
+ /* FIXME: Handle PDP endian. */
+ if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
+ return 0;
+
+ if (TREE_CODE (val) != INTEGER_CST)
+ return 0;
+
+ tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
+ tree repr_type = NULL_TREE;
+ HOST_WIDE_INT rpos = 0;
+ if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
+ {
+ rpos = int_byte_position (repr);
+ repr_type = TREE_TYPE (repr);
+ }
+ else
+ {
+ repr_type = cxx_find_bitfield_repr_type (fieldsize, len);
+ if (repr_type == NULL_TREE)
+ return 0;
+ HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
+ gcc_assert (repr_size > 0 && repr_size <= len);
+ if (pos + repr_size <= len)
+ rpos = pos;
+ else
+ {
+ rpos = len - repr_size;
+ gcc_assert (rpos <= pos);
+ }
+ }
+
+ if (rpos > pos)
+ return 0;
+ wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
+ int diff = (TYPE_PRECISION (repr_type)
+ - TYPE_PRECISION (TREE_TYPE (field)));
+ HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
+ if (!BYTES_BIG_ENDIAN)
+ w = wi::lshift (w, bitoff);
+ else
+ w = wi::lshift (w, diff - bitoff);
+ val = wide_int_to_tree (repr_type, w);
+
+ unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
+ / BITS_PER_UNIT + 1];
+ int l = native_encode_expr (val, buf, sizeof buf, 0);
+ if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
+ return 0;
+
+ /* If the bitfield does not start at byte boundary, handle
+ the partial byte at the start. */
+ if (bpos)
+ {
+ gcc_assert (pos >= 0 && len >= 1);
+ if (!BYTES_BIG_ENDIAN)
+ {
+ int msk = (1 << bpos) - 1;
+ buf[pos - rpos] &= ~msk;
+ buf[pos - rpos] |= ptr[pos] & msk;
+ if (fieldsize > 1 || epos == 0)
+ mask[pos] &= msk;
+ else
+ mask[pos] &= (msk | ~((1 << epos) - 1));
+ }
+ else
+ {
+ int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
+ buf[pos - rpos] &= msk;
+ buf[pos - rpos] |= ptr[pos] & ~msk;
+ if (fieldsize > 1 || epos == 0)
+ mask[pos] &= ~msk;
+ else
+ mask[pos] &= (~msk
+ | ((1 << (BITS_PER_UNIT - epos))
+ - 1));
+ }
+ }
+ /* If the bitfield does not end at byte boundary, handle
+ the partial byte at the end. */
+ if (epos)
+ {
+ gcc_assert (pos + fieldsize <= (HOST_WIDE_INT) len);
+ if (!BYTES_BIG_ENDIAN)
+ {
+ int msk = (1 << epos) - 1;
+ buf[pos - rpos + fieldsize - 1] &= msk;
+ buf[pos - rpos + fieldsize - 1]
+ |= ptr[pos + fieldsize - 1] & ~msk;
+ if (fieldsize > 1 || bpos == 0)
+ mask[pos + fieldsize - 1] &= ~msk;
+ }
+ else
+ {
+ int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
+ buf[pos - rpos + fieldsize - 1] &= ~msk;
+ buf[pos - rpos + fieldsize - 1]
+ |= ptr[pos + fieldsize - 1] & msk;
+ if (fieldsize > 1 || bpos == 0)
+ mask[pos + fieldsize - 1] &= msk;
+ }
+ }
+ gcc_assert (pos >= 0
+ && (pos + fieldsize
+ <= (HOST_WIDE_INT) len));
+ memcpy (ptr + pos, buf + (pos - rpos), fieldsize);
+ if (fieldsize > (bpos != 0) + (epos != 0))
+ memset (mask + pos + (bpos != 0), 0,
+ fieldsize - (bpos != 0) - (epos != 0));
+ continue;
+ }
+
+ gcc_assert (pos >= 0
+ && (pos + fieldsize <= (HOST_WIDE_INT) len));
+ if (!cxx_native_encode_aggregate (val, ptr + pos,
+ fieldsize, mask + pos,
+ ctx, non_constant_p,
+ overflow_p))
+ return 0;
+ }
+ return MIN (total_bytes, len);
+ }
+ return 0;
+ }
+}
+
+/* Subroutine of cxx_eval_constant_expression.
+ Attempt to evaluate a BIT_CAST_EXPR. */
+
+static tree
+cxx_eval_bit_cast (const constexpr_ctx *ctx, tree t, bool *non_constant_p,
+ bool *overflow_p)
+{
+ if (check_bit_cast_type (ctx, EXPR_LOCATION (t), TREE_TYPE (t),
+ TREE_TYPE (t))
+ || check_bit_cast_type (ctx, cp_expr_loc_or_loc (TREE_OPERAND (t, 0),
+ EXPR_LOCATION (t)),
+ TREE_TYPE (TREE_OPERAND (t, 0)),
+ TREE_TYPE (TREE_OPERAND (t, 0))))
+ {
+ *non_constant_p = true;
+ return t;
+ }
+
+ tree op = cxx_eval_constant_expression (ctx, TREE_OPERAND (t, 0), false,
+ non_constant_p, overflow_p);
+ if (*non_constant_p)
+ return t;
+
+ location_t loc = EXPR_LOCATION (t);
+ if (BITS_PER_UNIT != 8 || CHAR_BIT != 8)
+ {
+ if (!ctx->quiet)
+ sorry_at (loc, "%qs cannot be constant evaluated on the target",
+ "__builtin_bit_cast");
+ *non_constant_p = true;
+ return t;
+ }
+
+ if (!tree_fits_shwi_p (TYPE_SIZE_UNIT (TREE_TYPE (t))))
+ {
+ if (!ctx->quiet)
+ sorry_at (loc, "%qs cannot be constant evaluated because the "
+ "type is too large", "__builtin_bit_cast");
+ *non_constant_p = true;
+ return t;
+ }
+
+ HOST_WIDE_INT len = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (t)));
+ if (len < 0 || (int) len != len)
+ {
+ if (!ctx->quiet)
+ sorry_at (loc, "%qs cannot be constant evaluated because the "
+ "type is too large", "__builtin_bit_cast");
+ *non_constant_p = true;
+ return t;
+ }
+
+ unsigned char buf[64];
+ unsigned char *ptr, *mask;
+ size_t alen = (size_t) len * 2;
+ if (alen <= sizeof (buf))
+ ptr = buf;
+ else
+ ptr = XNEWVEC (unsigned char, alen);
+ mask = ptr + (size_t) len;
+ /* At the beginning consider everything indeterminate. */
+ memset (mask, ~0, (size_t) len);
+
+ if (cxx_native_encode_aggregate (op, ptr, len, mask, ctx, non_constant_p,
+ overflow_p) != len)
+ {
+ if (!ctx->quiet)
+ sorry_at (loc, "%qs cannot be constant evaluated because the "
+ "argument cannot be encoded", "__builtin_bit_cast");
+ *non_constant_p = true;
+ return t;
+ }
+
+ if (can_native_interpret_type_p (TREE_TYPE (t)))
+ if (tree r = native_interpret_expr (TREE_TYPE (t), ptr, len))
+ {
+ for (int i = 0; i < len; i++)
+ if (mask[i])
+ {
+ if (!ctx->quiet)
+ error_at (loc, "%qs accessing uninitialized byte at offset %d",
+ "__builtin_bit_cast", i);
+ *non_constant_p = true;
+ r = t;
+ break;
+ }
+ if (ptr != buf)
+ XDELETE (ptr);
+ return r;
+ }
+
+ if (TREE_CODE (TREE_TYPE (t)) == RECORD_TYPE)
+ {
+ tree r = cxx_native_interpret_aggregate (TREE_TYPE (t), ptr, 0, len,
+ mask, ctx, non_constant_p, loc);
+ if (r != error_mark_node)
+ {
+ if (ptr != buf)
+ XDELETE (ptr);
+ return r;
+ }
+ if (*non_constant_p)
+ return t;
+ }
+
+ if (!ctx->quiet)
+ sorry_at (loc, "%qs cannot be constant evaluated because the "
+ "argument cannot be interpreted", "__builtin_bit_cast");
+ *non_constant_p = true;
+ return t;
+}
+
/* Subroutine of cxx_eval_constant_expression.
Evaluate a short-circuited logical expression T in the context
of a given constexpr CALL. BAILOUT_VALUE is the value for
@@ -6537,6 +7339,10 @@ cxx_eval_constant_expression (const cons
*non_constant_p = true;
return t;
+ case BIT_CAST_EXPR:
+ r = cxx_eval_bit_cast (ctx, t, non_constant_p, overflow_p);
+ break;
+
default:
if (STATEMENT_CODE_P (TREE_CODE (t)))
{
@@ -8315,6 +9121,9 @@ potential_constant_expression_1 (tree t,
case ANNOTATE_EXPR:
return RECUR (TREE_OPERAND (t, 0), rval);
+ case BIT_CAST_EXPR:
+ return RECUR (TREE_OPERAND (t, 0), rval);
+
/* Coroutine await, yield and return expressions are not. */
case CO_AWAIT_EXPR:
case CO_YIELD_EXPR:
--- gcc/cp/cp-gimplify.c.jj 2020-07-18 10:48:51.514492573 +0200
+++ gcc/cp/cp-gimplify.c 2020-07-18 10:52:46.191021162 +0200
@@ -1867,6 +1867,11 @@ cp_genericize_r (tree *stmt_p, int *walk
}
break;
+ case BIT_CAST_EXPR:
+ *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
+ TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
+ break;
+
default:
if (IS_TYPE_OR_DECL_P (stmt))
*walk_subtrees = 0;
--- gcc/testsuite/g++.dg/cpp2a/bit-cast1.C.jj 2020-07-18 10:52:46.191021162
+0200
+++ gcc/testsuite/g++.dg/cpp2a/bit-cast1.C 2020-07-18 10:52:46.191021162
+0200
@@ -0,0 +1,47 @@
+// { dg-do compile }
+
+struct S { short a, b; };
+struct T { float a[16]; };
+struct U { int b[16]; };
+
+#if __SIZEOF_FLOAT__ == __SIZEOF_INT__
+int
+f1 (float x)
+{
+ return __builtin_bit_cast (int, x);
+}
+#endif
+
+#if 2 * __SIZEOF_SHORT__ == __SIZEOF_INT__
+S
+f2 (int x)
+{
+ return __builtin_bit_cast (S, x);
+}
+
+int
+f3 (S x)
+{
+ return __builtin_bit_cast (int, x);
+}
+#endif
+
+#if __SIZEOF_FLOAT__ == __SIZEOF_INT__
+U
+f4 (T &x)
+{
+ return __builtin_bit_cast (U, x);
+}
+
+T
+f5 (int (&x)[16])
+{
+ return __builtin_bit_cast (T, x);
+}
+#endif
+
+int
+f6 ()
+{
+ return __builtin_bit_cast (unsigned char, (signed char) 0);
+}
--- gcc/testsuite/g++.dg/cpp2a/bit-cast2.C.jj 2020-07-18 10:52:46.191021162
+0200
+++ gcc/testsuite/g++.dg/cpp2a/bit-cast2.C 2020-07-18 10:52:46.191021162
+0200
@@ -0,0 +1,57 @@
+// { dg-do compile }
+
+struct S { ~S (); int s; };
+S s;
+struct V; // { dg-message "forward declaration of
'struct V'" }
+extern V v; // { dg-error "'v' has incomplete type"
}
+extern V *p;
+struct U { int a, b; };
+U u;
+
+void
+foo (int *q)
+{
+ __builtin_bit_cast (int, s); // { dg-error "'__builtin_bit_cast' source
type 'S' is not trivially copyable" }
+ __builtin_bit_cast (S, 0); // { dg-error "'__builtin_bit_cast'
destination type 'S' is not trivially copyable" }
+ __builtin_bit_cast (int &, q); // { dg-error "'__builtin_bit_cast' destination type
'int&' is not trivially copyable" }
+ __builtin_bit_cast (int [1], 0); // { dg-error "'__builtin_bit_cast'
destination type \[^\n\r]* is an array type" }
+ __builtin_bit_cast (V, 0); // { dg-error "invalid use of incomplete
type 'struct V'" }
+ __builtin_bit_cast (int, v);
+ __builtin_bit_cast (int, *p); // { dg-error "invalid use of
incomplete type 'struct V'" }
+ __builtin_bit_cast (U, 0); // { dg-error "'__builtin_bit_cast' source
size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+ __builtin_bit_cast (int, u); // { dg-error "'__builtin_bit_cast' source
size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+}
+
+template <int N>
+void
+bar (int *q)
+{
+ __builtin_bit_cast (int, s); // { dg-error "'__builtin_bit_cast' source
type 'S' is not trivially copyable" }
+ __builtin_bit_cast (S, 0); // { dg-error "'__builtin_bit_cast'
destination type 'S' is not trivially copyable" }
+ __builtin_bit_cast (int &, q); // { dg-error "'__builtin_bit_cast' destination type
'int&' is not trivially copyable" }
+ __builtin_bit_cast (int [1], 0); // { dg-error "'__builtin_bit_cast'
destination type \[^\n\r]* is an array type" }
+ __builtin_bit_cast (V, 0); // { dg-error "invalid use of incomplete
type 'struct V'" }
+ __builtin_bit_cast (int, *p); // { dg-error "invalid use of
incomplete type 'struct V'" }
+ __builtin_bit_cast (U, 0); // { dg-error "'__builtin_bit_cast' source
size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+ __builtin_bit_cast (int, u); // { dg-error "'__builtin_bit_cast' source
size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+}
+
+template <typename T1, typename T2, typename T3, typename T4>
+void
+baz (T3 s, T4 *p, T1 *q)
+{
+ __builtin_bit_cast (int, s); // { dg-error "'__builtin_bit_cast' source
type 'S' is not trivially copyable" }
+ __builtin_bit_cast (T3, 0); // { dg-error "'__builtin_bit_cast'
destination type 'S' is not trivially copyable" }
+ __builtin_bit_cast (T1 &, q); // { dg-error "'__builtin_bit_cast'
destination type 'int&' is not trivially copyable" }
+ __builtin_bit_cast (T2, 0); // { dg-error "'__builtin_bit_cast'
destination type \[^\n\r]* is an array type" }
+ __builtin_bit_cast (T4, 0); // { dg-error "invalid use of incomplete
type 'struct V'" }
+ __builtin_bit_cast (int, *p); // { dg-error "invalid use of
incomplete type 'struct V'" }
+ __builtin_bit_cast (U, (T1) 0); // { dg-error "'__builtin_bit_cast' source
size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+ __builtin_bit_cast (T1, u); // { dg-error "'__builtin_bit_cast' source
size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+}
+
+void
+qux (int *q)
+{
+ baz <int, int [1], S, V> (s, p, q);
+}
--- gcc/testsuite/g++.dg/cpp2a/bit-cast3.C.jj 2020-07-18 10:52:46.191021162
+0200
+++ gcc/testsuite/g++.dg/cpp2a/bit-cast3.C 2020-07-18 11:16:31.003976285
+0200
@@ -0,0 +1,229 @@
+// { dg-do compile { target c++11 } }
+
+template <typename To, typename From>
+constexpr To
+bit_cast (const From &from)
+{
+ return __builtin_bit_cast (To, from);
+}
+
+template <typename To, typename From>
+constexpr bool
+check (const From &from)
+{
+ return bit_cast <From> (bit_cast <To> (from)) == from;
+}
+
+struct A
+{
+ int a, b, c;
+ constexpr bool operator == (const A &x) const
+ {
+ return x.a == a && x.b == b && x.c == c;
+ }
+};
+
+struct B
+{
+ unsigned a[3];
+ constexpr bool operator == (const B &x) const
+ {
+ return x.a[0] == a[0] && x.a[1] == a[1] && x.a[2] == a[2];
+ }
+};
+
+struct C
+{
+ char a[2][3][2];
+ constexpr bool operator == (const C &x) const
+ {
+ return x.a[0][0][0] == a[0][0][0]
+ && x.a[0][0][1] == a[0][0][1]
+ && x.a[0][1][0] == a[0][1][0]
+ && x.a[0][1][1] == a[0][1][1]
+ && x.a[0][2][0] == a[0][2][0]
+ && x.a[0][2][1] == a[0][2][1]
+ && x.a[1][0][0] == a[1][0][0]
+ && x.a[1][0][1] == a[1][0][1]
+ && x.a[1][1][0] == a[1][1][0]
+ && x.a[1][1][1] == a[1][1][1]
+ && x.a[1][2][0] == a[1][2][0]
+ && x.a[1][2][1] == a[1][2][1];
+ }
+};
+
+struct D
+{
+ int a, b;
+ constexpr bool operator == (const D &x) const
+ {
+ return x.a == a && x.b == b;
+ }
+};
+
+struct E {};
+struct F { char c, d, e, f; };
+struct G : public D, E, F
+{
+ int g;
+ constexpr bool operator == (const G &x) const
+ {
+ return x.a == a && x.b == b && x.c == c && x.d == d
+ && x.e == e && x.f == f && x.g == g;
+ }
+};
+
+struct H
+{
+ int a, b[2], c;
+ constexpr bool operator == (const H &x) const
+ {
+ return x.a == a && x.b[0] == b[0] && x.b[1] == b[1] && x.c == c;
+ }
+};
+
+#if __SIZEOF_INT__ == 4
+struct I
+{
+ int a;
+ int b : 3;
+ int c : 24;
+ int d : 5;
+ int e;
+ constexpr bool operator == (const I &x) const
+ {
+ return x.a == a && x.b == b && x.c == c && x.d == d && x.e == e;
+ }
+};
+#endif
+
+#if __SIZEOF_INT__ == 4 && __SIZEOF_LONG_LONG__ == 8
+struct J
+{
+ long long int a, b : 11, c : 3, d : 37, e : 1, f : 10, g : 2, h;
+ constexpr bool operator == (const J &x) const
+ {
+ return x.a == a && x.b == b && x.c == c && x.d == d && x.e == e
+ && x.f == f && x.g == g && x.h == h;
+ }
+};
+
+struct K
+{
+ long long int a, b, c;
+ constexpr bool operator == (const K &x) const
+ {
+ return x.a == a && x.b == b && x.c == c;
+ }
+};
+
+struct M
+{
+ signed a : 6, b : 7, c : 6, d : 5;
+ unsigned char e;
+ unsigned int f;
+ long long int g;
+ constexpr bool operator == (const M &x) const
+ {
+ return x.a == a && x.b == b && x.c == c && x.d == d && x.e == e
+ && x.f == f && x.g == g;
+ }
+};
+
+struct N
+{
+ unsigned long long int a, b;
+ constexpr bool operator == (const N &x) const
+ {
+ return x.a == a && x.b == b;
+ }
+};
+#endif
+
+static_assert (check <unsigned int> (0), "");
+static_assert (check <long long int> (0xdeadbeeffeedbac1ULL), "");
+static_assert (check <signed char> ((unsigned char) 42), "");
+static_assert (check <char> ((unsigned char) 42), "");
+static_assert (check <unsigned char> ((unsigned char) 42), "");
+static_assert (check <signed char> ((signed char) 42), "");
+static_assert (check <char> ((signed char) 42), "");
+static_assert (check <unsigned char> ((signed char) 42), "");
+static_assert (check <signed char> ((char) 42), "");
+static_assert (check <char> ((char) 42), "");
+static_assert (check <unsigned char> ((char) 42), "");
+#if __SIZEOF_INT__ == __SIZEOF_FLOAT__
+static_assert (check <int> (2.5f), "");
+static_assert (check <unsigned int> (136.5f), "");
+#endif
+#if __SIZEOF_LONG_LONG__ == __SIZEOF_DOUBLE__
+static_assert (check <long long> (2.5), "");
+static_assert (check <long long unsigned> (123456.75), "");
+#endif
+
+static_assert (check <B> (A{ 1, 2, 3 }), "");
+static_assert (check <A> (B{ 4, 5, 6 }), "");
+
+#if __SIZEOF_INT__ == 4
+static_assert (check <C> (A{ 7, 8, 9 }), "");
+static_assert (check <C> (B{ 10, 11, 12 }), "");
+static_assert (check <A> (C{ { { { 13, 14 }, { 15, 16 }, { 17, 18 } },
+ { { 19, 20 }, { 21, 22 }, { 23, 24 } } } }), "");
+constexpr unsigned char c[] = { 1, 2, 3, 4 };
+#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
+static_assert (bit_cast <unsigned int> (c) == 0x04030201U, "");
+#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+static_assert (bit_cast <unsigned int> (c) == 0x01020304U, "");
+#endif
+
+#if __cplusplus >= 201703L
+static_assert (check <G> (H { 0x12345678, { 0x23456789, 0x5a876543 }, 0x3ba78654 }),
"");
+#endif
+constexpr int d[] = { 0x12345678, 0x23456789, 0x5a876543, 0x3ba78654 };
+static_assert (bit_cast <G> (d) == bit_cast <G> (H { 0x12345678, { 0x23456789,
0x5a876543 }, 0x3ba78654 }), "");
+
+#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
+static_assert (bit_cast <I> (A { 0x7efa3412, 0x5a876543, 0x1eeffeed })
+ == I { 0x7efa3412, 3, 0x50eca8, 0xb, 0x1eeffeed }, "");
+static_assert (bit_cast <A> (I { 0x7efa3412, 3, 0x50eca8, 0xb, 0x1eeffeed })
+ == A { 0x7efa3412, 0x5a876543, 0x1eeffeed }, "");
+#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+static_assert (bit_cast <I> (A { 0x7efa3412, 0x5a876543, 0x1eeffeed })
+ == I { 0x7efa3412, 2, -0x2bc4d6, 0x3, 0x1eeffeed }, "");
+static_assert (bit_cast <A> (I { 0x7efa3412, 2, -0x2bc4d6, 0x3, 0x1eeffeed })
+ == A { 0x7efa3412, 0x5a876543, 0x1eeffeed }, "");
+#endif
+#endif
+
+#if 2 * __SIZEOF_INT__ == __SIZEOF_LONG_LONG__ && __SIZEOF_INT__ >= 4
+constexpr unsigned long long a = 0xdeadbeeffee1deadULL;
+constexpr unsigned b[] = { 0xfeedbacU, 0xbeeffeedU };
+#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
+static_assert (bit_cast <D> (a) == D { int (0xfee1deadU), int (0xdeadbeefU) },
"");
+static_assert (bit_cast <unsigned long long> (b) == 0xbeeffeed0feedbacULL, "");
+#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+static_assert (bit_cast <D> (a) == D { int (0xdeadbeefU), int (0xfee1deadU) },
"");
+static_assert (bit_cast <unsigned long long> (b) == 0x0feedbacbeeffeedULL, "");
+#endif
+#endif
+
+#if __SIZEOF_INT__ == 4 && __SIZEOF_LONG_LONG__ == 8
+#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
+static_assert (bit_cast <J> (K { 0x0feedbacdeadbeefLL, 7862463375103529997LL,
0x0feedbacdeadbeefLL })
+ == J { 0x0feedbacdeadbeefLL, -1011, 2, -0xbacdeadbeLL, -1, -303, 1,
0x0feedbacdeadbeefLL }, "");
+static_assert (bit_cast <K> (J { 0x0feedbacdeadbeefLL, -1011, 2,
-0xbacdeadbeLL, -1, -303, 1, 0x0feedbacdeadbeefLL })
+ == K { 0x0feedbacdeadbeefLL, 7862463375103529997LL, 0x0feedbacdeadbeefLL
}, "");
+static_assert (bit_cast <M> (N { 0xfeedbacdeadbeef8ULL, 0x123456789abcde42ULL
})
+ == M { -8, 59, 31, -5, 234, 0xfeedbacdU, 0x123456789abcde42ULL },
"");
+static_assert (bit_cast <N> (M { -8, 59, 31, -5, 234, 0xfeedbacdU,
0x123456789abcde42ULL })
+ == N { 0xfeedbacdeadbeef8ULL, 0x123456789abcde42ULL }, "");
+#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+static_assert (bit_cast <J> (K { 0x0feedbacdeadbeefLL, -9103311533965288635LL,
0x0feedbacdeadbeefLL })
+ == J { 0x0feedbacdeadbeefLL, -1011, 2, -0xbacdeadbeLL, -1, -303, 1,
0x0feedbacdeadbeefLL }, "");
+static_assert (bit_cast <K> (J { 0x0feedbacdeadbeefLL, -1011, 2,
-0xbacdeadbeLL, -1, -303, 1, 0x0feedbacdeadbeefLL })
+ == K { 0x0feedbacdeadbeefLL, -9103311533965288635LL, 0x0feedbacdeadbeefLL
}, "");
+static_assert (bit_cast <M> (N { 0xfeedbacdeadbeef8ULL, 0x123456789abcde42ULL
})
+ == M { -1, -35, -19, -6, 205, 0xeadbeef8U, 0x123456789abcde42ULL },
"");
+static_assert (bit_cast <N> (M { -1, -35, -19, -6, 205, 0xeadbeef8U,
0x123456789abcde42ULL })
+ == N { 0xfeedbacdeadbeef8ULL, 0x123456789abcde42ULL }, "");
+#endif
+#endif
--- gcc/testsuite/g++.dg/cpp2a/bit-cast4.C.jj 2020-07-18 10:52:46.191021162
+0200
+++ gcc/testsuite/g++.dg/cpp2a/bit-cast4.C 2020-07-18 10:52:46.191021162
+0200
@@ -0,0 +1,45 @@
+// { dg-do compile { target c++11 } }
+
+template <typename To, typename From>
+constexpr To
+bit_cast (const From &from)
+{
+ return __builtin_bit_cast (To, from);
+}
+// { dg-error "'__builtin_bit_cast' is not constant expression because 'U' is a union
type" "" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not constant expression because 'const U' is a union
type" "" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not constant expression because 'B' contains a union
type" "" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not constant expression because 'char\\\*' is a
pointer type" "" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not constant expression because 'const int\\\* const'
is a pointer type" "" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not constant expression because 'C' contains a pointer
type" "" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not constant expression because 'const C' contains a
pointer type" "" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not constant expression because 'int D::\\\* const' is
a pointer to member type" "" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not constant expression because 'int \\\(D::\\\*
const\\\)\\\(\\\) const' is a pointer to member type" "" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not constant expression because 'int D::\\\*' is a
pointer to member type" "" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not constant expression because 'int
\\\(D::\\\*\\\)\\\(\\\)' is a pointer to member type" "" { target *-*-* } 7 }
+
+union U { int u; };
+struct A { int a; U b; };
+struct B : public A { int c; };
+struct C { const int *p; };
+constexpr int a[] = { 1, 2, 3 };
+constexpr const int *b = &a[0];
+constexpr C c = { b };
+struct D { int d; constexpr int foo () const { return 1; } };
+constexpr int D::*d = &D::d;
+constexpr int (D::*e) () const = &D::foo;
+struct E { __INTPTR_TYPE__ e, f; };
+constexpr E f = { 1, 2 };
+constexpr U g { 0 };
+
+constexpr auto z = bit_cast <U> (0);
+constexpr auto y = bit_cast <int> (g);
+constexpr auto x = bit_cast <B> (a);
+constexpr auto w = bit_cast <char *> ((__INTPTR_TYPE__) 0);
+constexpr auto v = bit_cast <__UINTPTR_TYPE__> (b);
+constexpr auto u = bit_cast <C> ((__INTPTR_TYPE__) 0);
+constexpr auto t = bit_cast <__INTPTR_TYPE__> (c);
+constexpr auto s = bit_cast <__INTPTR_TYPE__> (d);
+constexpr auto r = bit_cast <E> (e);
+constexpr auto q = bit_cast <int D::*> ((__INTPTR_TYPE__) 0);
+constexpr auto p = bit_cast <int (D::*) ()> (f);
--- gcc/testsuite/g++.dg/cpp2a/bit-cast5.C.jj 2020-07-18 10:52:46.192021147
+0200
+++ gcc/testsuite/g++.dg/cpp2a/bit-cast5.C 2020-07-18 10:52:46.191021162
+0200
@@ -0,0 +1,69 @@
+// { dg-do compile { target { c++20 && { ilp32 || lp64 } } } }
+
+struct A { signed char a, b, c, d, e, f; };
+struct B {};
+struct C { B a, b; short c; B d; };
+struct D { int a : 4, b : 24, c : 4; };
+struct E { B a, b; short c; };
+struct F { B a; signed char b, c; B d; };
+
+constexpr bool
+f1 ()
+{
+ A a;
+ a.c = 23; a.d = 42;
+ C b = __builtin_bit_cast (C, a); // OK
+ return false;
+}
+
+constexpr bool
+f2 ()
+{
+ A a;
+ a.a = 1; a.b = 2; a.c = 3; a.e = 4; a.f = 5;
+ C b = __builtin_bit_cast (C, a); // { dg-error "'__builtin_bit_cast'
accessing uninitialized byte at offset 3" }
+ return false;
+}
+
+constexpr bool
+f3 ()
+{
+ D a;
+ a.b = 1;
+ F b = __builtin_bit_cast (F, a); // OK
+ return false;
+}
+
+constexpr bool
+f4 ()
+{
+ D a;
+ a.b = 1; a.c = 2;
+ E b = __builtin_bit_cast (E, a); // OK
+ return false;
+}
+
+constexpr bool
+f5 ()
+{
+ D a;
+ a.b = 1;
+ E b = __builtin_bit_cast (E, a); // { dg-error "'__builtin_bit_cast'
accessing uninitialized byte at offset 3" }
+ return false;
+}
+
+constexpr bool
+f6 ()
+{
+ D a;
+ a.c = 1;
+ E b = __builtin_bit_cast (E, a); // { dg-error "'__builtin_bit_cast'
accessing uninitialized byte at offset 2" }
+ return false;
+}
+
+constexpr bool a = f1 ();
+constexpr bool b = f2 ();
+constexpr bool c = f3 ();
+constexpr bool d = f4 ();
+constexpr bool e = f5 ();
+constexpr bool f = f6 ();
Jakub