On 14 March 2018 at 23:27, Jonathan Wakely wrote:
> Here's one way to generalize this idea. We could potentially replace
> most of the lightweight __glibcxx_assert checks with this, to get
> zero-overhead static checking at compile-time whenever possible (even
> in constexpr functions) and have optional run-time assertions for the
> remaining cases.

Thinking about this some more, we probably don't want to do this for
most __glibcxx_assert uses, because it's probably rare that we can
statically detect most errors in something like
std::vector::operator[]. I doubt we would catch many bugs that way, as
most bugs would involve non-constant indices and vectors that have
changed size dynamically at run-time.

It *might* be useful  in vector::front, vector::back, string::front,
deque::front etc. to catch bugs where users do:

std::string s;
// ...
someFunction(&s.front(), s.size());

It seems most valuable in constexpr functions (where we definitely
expect constant arguments in many cases) and where run-time arguments
will typically be constants, like in the attached patch for atomic
objects.
diff --git a/libstdc++-v3/include/bits/atomic_base.h 
b/libstdc++-v3/include/bits/atomic_base.h
index a1fadcd8056..0f6783d257c 100644
--- a/libstdc++-v3/include/bits/atomic_base.h
+++ b/libstdc++-v3/include/bits/atomic_base.h
@@ -186,9 +186,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
     clear(memory_order __m = memory_order_seq_cst) noexcept
     {
       memory_order __b = __m & __memory_order_mask;
-      __glibcxx_assert(__b != memory_order_consume);
-      __glibcxx_assert(__b != memory_order_acquire);
-      __glibcxx_assert(__b != memory_order_acq_rel);
+      __glibcxx_assert2(__b != memory_order_consume, __invalid_memory_order());
+      __glibcxx_assert2(__b != memory_order_acquire, __invalid_memory_order());
+      __glibcxx_assert2(__b != memory_order_acq_rel, __invalid_memory_order());
 
       __atomic_clear (&_M_i, __m);
     }
@@ -197,9 +197,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
     clear(memory_order __m = memory_order_seq_cst) volatile noexcept
     {
       memory_order __b = __m & __memory_order_mask;
-      __glibcxx_assert(__b != memory_order_consume);
-      __glibcxx_assert(__b != memory_order_acquire);
-      __glibcxx_assert(__b != memory_order_acq_rel);
+      __glibcxx_assert2(__b != memory_order_consume, __invalid_memory_order());
+      __glibcxx_assert2(__b != memory_order_acquire, __invalid_memory_order());
+      __glibcxx_assert2(__b != memory_order_acq_rel, __invalid_memory_order());
 
       __atomic_clear (&_M_i, __m);
     }
@@ -208,6 +208,10 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
     static constexpr __atomic_flag_data_type
     _S_init(bool __i)
     { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
+
+    static void
+    __invalid_memory_order()
+    __attribute__((__error__("invalid memory order for atomic_flag::clear")));
   };
 
 
@@ -367,9 +371,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
       {
        memory_order __b = __m & __memory_order_mask;
-       __glibcxx_assert(__b != memory_order_acquire);
-       __glibcxx_assert(__b != memory_order_acq_rel);
-       __glibcxx_assert(__b != memory_order_consume);
+       __glibcxx_assert2(__b != memory_order_acquire, __invalid());
+       __glibcxx_assert2(__b != memory_order_acq_rel, __invalid());
+       __glibcxx_assert2(__b != memory_order_consume, __invalid());
 
        __atomic_store_n(&_M_i, __i, __m);
       }
@@ -379,9 +383,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
            memory_order __m = memory_order_seq_cst) volatile noexcept
       {
        memory_order __b = __m & __memory_order_mask;
-       __glibcxx_assert(__b != memory_order_acquire);
-       __glibcxx_assert(__b != memory_order_acq_rel);
-       __glibcxx_assert(__b != memory_order_consume);
+       __glibcxx_assert2(__b != memory_order_acquire, __invalid());
+       __glibcxx_assert2(__b != memory_order_acq_rel, __invalid());
+       __glibcxx_assert2(__b != memory_order_consume, __invalid());
 
        __atomic_store_n(&_M_i, __i, __m);
       }
@@ -390,8 +394,8 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       load(memory_order __m = memory_order_seq_cst) const noexcept
       {
        memory_order __b = __m & __memory_order_mask;
-       __glibcxx_assert(__b != memory_order_release);
-       __glibcxx_assert(__b != memory_order_acq_rel);
+       __glibcxx_assert2(__b != memory_order_release, __invalid());
+       __glibcxx_assert2(__b != memory_order_acq_rel, __invalid());
 
        return __atomic_load_n(&_M_i, __m);
       }
@@ -400,8 +404,8 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
       {
        memory_order __b = __m & __memory_order_mask;
-       __glibcxx_assert(__b != memory_order_release);
-       __glibcxx_assert(__b != memory_order_acq_rel);
+       __glibcxx_assert2(__b != memory_order_release, __invalid());
+       __glibcxx_assert2(__b != memory_order_acq_rel, __invalid());
 
        return __atomic_load_n(&_M_i, __m);
       }
@@ -427,9 +431,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       {
        memory_order __b2 = __m2 & __memory_order_mask;
        memory_order __b1 = __m1 & __memory_order_mask;
-       __glibcxx_assert(__b2 != memory_order_release);
-       __glibcxx_assert(__b2 != memory_order_acq_rel);
-       __glibcxx_assert(__b2 <= __b1);
+       __glibcxx_assert2(__b2 != memory_order_release, __invalid());
+       __glibcxx_assert2(__b2 != memory_order_acq_rel, __invalid());
+       __glibcxx_assert2(__b2 <= __b1, __invalid());
 
        return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
       }
@@ -441,9 +445,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       {
        memory_order __b2 = __m2 & __memory_order_mask;
        memory_order __b1 = __m1 & __memory_order_mask;
-       __glibcxx_assert(__b2 != memory_order_release);
-       __glibcxx_assert(__b2 != memory_order_acq_rel);
-       __glibcxx_assert(__b2 <= __b1);
+       __glibcxx_assert2(__b2 != memory_order_release, __invalid());
+       __glibcxx_assert2(__b2 != memory_order_acq_rel, __invalid());
+       __glibcxx_assert2(__b2 <= __b1, __invalid());
 
        return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
       }
@@ -470,9 +474,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       {
        memory_order __b2 = __m2 & __memory_order_mask;
        memory_order __b1 = __m1 & __memory_order_mask;
-       __glibcxx_assert(__b2 != memory_order_release);
-       __glibcxx_assert(__b2 != memory_order_acq_rel);
-       __glibcxx_assert(__b2 <= __b1);
+       __glibcxx_assert2(__b2 != memory_order_release, __invalid());
+       __glibcxx_assert2(__b2 != memory_order_acq_rel, __invalid());
+       __glibcxx_assert2(__b2 <= __b1, __invalid());
 
        return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
       }
@@ -485,9 +489,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
        memory_order __b2 = __m2 & __memory_order_mask;
        memory_order __b1 = __m1 & __memory_order_mask;
 
-       __glibcxx_assert(__b2 != memory_order_release);
-       __glibcxx_assert(__b2 != memory_order_acq_rel);
-       __glibcxx_assert(__b2 <= __b1);
+       __glibcxx_assert2(__b2 != memory_order_release, __invalid());
+       __glibcxx_assert2(__b2 != memory_order_acq_rel, __invalid());
+       __glibcxx_assert2(__b2 <= __b1, __invalid());
 
        return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
       }
@@ -557,6 +561,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       fetch_xor(__int_type __i,
                memory_order __m = memory_order_seq_cst) volatile noexcept
       { return __atomic_fetch_xor(&_M_i, __i, __m); }
+
+    private:
+      static void
+      __invalid()
+      __attribute__((__error__("invalid memory order for atomic operation")));
     };
 
 
@@ -684,9 +693,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       {
         memory_order __b = __m & __memory_order_mask;
 
-       __glibcxx_assert(__b != memory_order_acquire);
-       __glibcxx_assert(__b != memory_order_acq_rel);
-       __glibcxx_assert(__b != memory_order_consume);
+       __glibcxx_assert2(__b != memory_order_acquire, __invalid());
+       __glibcxx_assert2(__b != memory_order_acq_rel, __invalid());
+       __glibcxx_assert2(__b != memory_order_consume, __invalid());
 
        __atomic_store_n(&_M_p, __p, __m);
       }
@@ -696,9 +705,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
            memory_order __m = memory_order_seq_cst) volatile noexcept
       {
        memory_order __b = __m & __memory_order_mask;
-       __glibcxx_assert(__b != memory_order_acquire);
-       __glibcxx_assert(__b != memory_order_acq_rel);
-       __glibcxx_assert(__b != memory_order_consume);
+       __glibcxx_assert2(__b != memory_order_acquire, __invalid());
+       __glibcxx_assert2(__b != memory_order_acq_rel, __invalid());
+       __glibcxx_assert2(__b != memory_order_consume, __invalid());
 
        __atomic_store_n(&_M_p, __p, __m);
       }
@@ -707,8 +716,8 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       load(memory_order __m = memory_order_seq_cst) const noexcept
       {
        memory_order __b = __m & __memory_order_mask;
-       __glibcxx_assert(__b != memory_order_release);
-       __glibcxx_assert(__b != memory_order_acq_rel);
+       __glibcxx_assert2(__b != memory_order_release, __invalid());
+       __glibcxx_assert2(__b != memory_order_acq_rel, __invalid());
 
        return __atomic_load_n(&_M_p, __m);
       }
@@ -717,8 +726,8 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
       {
        memory_order __b = __m & __memory_order_mask;
-       __glibcxx_assert(__b != memory_order_release);
-       __glibcxx_assert(__b != memory_order_acq_rel);
+       __glibcxx_assert2(__b != memory_order_release, __invalid());
+       __glibcxx_assert2(__b != memory_order_acq_rel, __invalid());
 
        return __atomic_load_n(&_M_p, __m);
       }
@@ -730,7 +739,6 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
        return __atomic_exchange_n(&_M_p, __p, __m);
       }
 
-
       _GLIBCXX_ALWAYS_INLINE __pointer_type
       exchange(__pointer_type __p,
               memory_order __m = memory_order_seq_cst) volatile noexcept
@@ -745,9 +753,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       {
        memory_order __b2 = __m2 & __memory_order_mask;
        memory_order __b1 = __m1 & __memory_order_mask;
-       __glibcxx_assert(__b2 != memory_order_release);
-       __glibcxx_assert(__b2 != memory_order_acq_rel);
-       __glibcxx_assert(__b2 <= __b1);
+       __glibcxx_assert2(__b2 != memory_order_release, __invalid());
+       __glibcxx_assert2(__b2 != memory_order_acq_rel, __invalid());
+       __glibcxx_assert2(__b2 <= __b1, __invalid());
 
        return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
       }
@@ -760,9 +768,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
        memory_order __b2 = __m2 & __memory_order_mask;
        memory_order __b1 = __m1 & __memory_order_mask;
 
-       __glibcxx_assert(__b2 != memory_order_release);
-       __glibcxx_assert(__b2 != memory_order_acq_rel);
-       __glibcxx_assert(__b2 <= __b1);
+       __glibcxx_assert2(__b2 != memory_order_release, __invalid());
+       __glibcxx_assert2(__b2 != memory_order_acq_rel, __invalid());
+       __glibcxx_assert2(__b2 <= __b1, __invalid());
 
        return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
       }
@@ -786,6 +794,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       fetch_sub(ptrdiff_t __d,
                memory_order __m = memory_order_seq_cst) volatile noexcept
       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
+
+    private:
+      static void
+      __invalid()
+      __attribute__((__error__("invalid memory order for atomic operation")));
     };
 
   // @} group atomics

Reply via email to