This is an automated email from the ASF dual-hosted git repository.

gkoszyk pushed a commit to branch partition_redesign
in repository https://gitbox.apache.org/repos/asf/iggy.git

commit 245f76c7d584598077990ac7aaeb23604e0a435b
Author: numinex <[email protected]>
AuthorDate: Thu Mar 19 22:38:33 2026 +0100

    use alignedvec
---
 Cargo.lock          |   3 +
 core/buf/Cargo.toml |   1 +
 core/buf/src/lib.rs | 480 +++++++++++++++++++---------------------------------
 3 files changed, 182 insertions(+), 302 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index 430c4ac31..e6f53bb5f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1570,6 +1570,9 @@ dependencies = [
 [[package]]
 name = "buf"
 version = "0.1.0"
+dependencies = [
+ "aligned-vec",
+]
 
 [[package]]
 name = "built"
diff --git a/core/buf/Cargo.toml b/core/buf/Cargo.toml
index 35387e669..8a5d193a9 100644
--- a/core/buf/Cargo.toml
+++ b/core/buf/Cargo.toml
@@ -4,3 +4,4 @@ version = "0.1.0"
 edition = "2024"
 
 [dependencies]
+aligned-vec = "0.6"
diff --git a/core/buf/src/lib.rs b/core/buf/src/lib.rs
index 654aa02e1..d24d6e470 100644
--- a/core/buf/src/lib.rs
+++ b/core/buf/src/lib.rs
@@ -1,117 +1,73 @@
-use std::alloc::{Layout, alloc, dealloc};
 use std::mem::ManuallyDrop;
-use std::ops::{Deref, RangeBounds, Bound};
-use std::ptr::{self, NonNull, slice_from_raw_parts_mut};
+use std::ptr::NonNull;
 use std::slice;
 use std::sync::atomic::{AtomicUsize, Ordering, fence};
 
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct Owned {
-    inner: Vec<u8>,
-}
-
-#[derive(Clone, Copy)]
-struct Half {
-    ptr: NonNull<u8>,
-    len: usize,
-    ctrlb: NonNull<ControlBlock>,
-}
+use aligned_vec::{AVec, ConstAlign};
 
+#[repr(C, align(64))]
 struct ControlBlock {
     ref_count: AtomicUsize,
     base: NonNull<u8>,
     len: usize,
-    cap: usize,
-}
-
-fn create_control_block(base: NonNull<u8>, len: usize, cap: usize) -> 
NonNull<ControlBlock> {
-    let ctrlb = Box::new(ControlBlock {
-        ref_count: AtomicUsize::new(1),
-        base,
-        len,
-        cap,
-    });
-    // SAFETY: `ctrlb` is a valid control block for the lifetime of the 
returned halves.
-    unsafe { NonNull::new_unchecked(Box::into_raw(ctrlb)) }
-}
-
-pub struct TwoHalves {
-    buf: (Half, Half),
-    split_at: usize,
+    capacity: usize,
+    _pad: [u8; 32],
 }
 
-impl From<Vec<u8>> for Owned {
-    fn from(vec: Vec<u8>) -> Self {
-        Self { inner: vec }
+impl ControlBlock {
+    fn new(base: NonNull<u8>, len: usize, capacity: usize) -> NonNull<Self> {
+        let ctrl = Box::new(ControlBlock {
+            ref_count: AtomicUsize::new(1),
+            base,
+            len,
+            capacity,
+            _pad: [0; 32],
+        });
+        // SAFETY: Box::into_raw returns a valid pointer
+        unsafe { NonNull::new_unchecked(Box::into_raw(ctrl)) }
     }
 }
 
-impl From<Owned> for Vec<u8> {
-    fn from(value: Owned) -> Self {
-        value.inner
-    }
+#[derive(Copy)]
+struct Extent {
+    ptr: NonNull<u8>,
+    len: usize,
+    ctrlb: NonNull<ControlBlock>,
+    _pad: usize,
 }
 
-impl Owned {
-    pub fn from_vec(vec: Vec<u8>) -> Self {
-        Self { inner: vec }
+impl Extent {
+    fn as_slice(&self) -> &[u8] {
+        // SAFETY: ptr and len describe a valid allocation
+        unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
     }
 
-    pub fn as_slice(&self) -> &[u8] {
-        &self.inner
+    unsafe fn as_mut_slice(&mut self) -> &mut [u8] {
+        // SAFETY: caller guarantees exclusive access
+        unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
     }
 
-    pub fn as_mut_slice(&mut self) -> &mut [u8] {
-        &mut self.inner
-    }
+    fn copy_from_slice<const ALIGN: usize>(src: &[u8]) -> Self {
+        let mut v: AVec<u8, ConstAlign<ALIGN>> = AVec::new(ALIGN);
+        v.extend_from_slice(src);
 
-    pub fn split_at(self, split_at: usize) -> TwoHalves {
-        assert!(split_at <= self.inner.len());
-        // Transfering ownership of the `Inner` to `TwoHalves`, which is from 
now on responsible for dropping it.
-        let mut inner = ManuallyDrop::new(self.inner);
-        let len = inner.len();
-        let cap = inner.capacity();
+        let (ptr, _, len, capacity) = v.into_raw_parts();
+        let data = unsafe { NonNull::new_unchecked(ptr) };
 
-        // SAFETY: both pointers are constructed from the same `Inner` 
allocation, the split_at bounds are validated.
-        // The control block captures original `Inner` metadata to allow 
reconstructing the original frame for merging/dropping.
-        // The ptr provenence rules are maintained by the use of `NonNull` 
apis.
-        let ptr = inner.as_mut_ptr();
-        let base = unsafe { NonNull::new_unchecked(ptr) };
-        let other = unsafe { NonNull::new_unchecked(ptr.add(split_at)) };
+        let ctrlb = ControlBlock::new(data, len, capacity);
 
-        let ctrlb = create_control_block(base, len, cap);
-        TwoHalves {
-            buf: (
-                Half {
-                    ptr: base,
-                    len: split_at,
-                    ctrlb,
-                },
-                Half {
-                    ptr: other,
-                    len: len - split_at,
-                    ctrlb,
-                },
-            ),
-            split_at,
+        Extent {
+            ptr: data,
+            len,
+            ctrlb,
+            _pad: 0,
         }
     }
 }
 
-impl Half {
-    fn as_slice(&self) -> &[u8] {
-        self.ptr.as_ptr();
-        // SAFETY: `ptr,len` always describe a live allocation owned by 
`ctrlb`.
-        unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
-    }
-
-    unsafe fn as_mut_slice(&mut self) -> &mut [u8] {
-        // SAFETY: caller must provide the safety guarantees for mutable 
access.
-        unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
-    }
-
-    fn share(&self) -> Self {
-        // SAFETY: `ctrlb` points to a valid control block for the lifetime of 
this half.
+impl Clone for Extent {
+    fn clone(&self) -> Self {
+        // SAFETY: `self.ctrlb` points to a live control block while `self` is 
alive.
         unsafe {
             self.ctrlb
                 .as_ref()
@@ -120,35 +76,15 @@ impl Half {
         }
         *self
     }
-
-    fn copy_from_slice(src: &[u8]) -> Self {
-        let mut vec = Vec::with_capacity(src.len());
-        vec.extend_from_slice(src);
-
-        // Transfering ownership of the `Inner` to `Half`, which is from now 
on responsible for dropping it.
-        let mut inner = ManuallyDrop::new(vec);
-        let ptr = inner.as_mut_ptr();
-        let base = unsafe { NonNull::new_unchecked(ptr) };
-        let len = inner.len();
-        let cap = inner.capacity();
-
-        let ctrlb = create_control_block(base, len, cap);
-        Self {
-            ptr: base,
-            len,
-            ctrlb,
-        }
-    }
 }
 
-/// Drops the control block, together with associated allocation if this is 
the last reference.
-/// This is used for both halves, so it must be careful to only drop the 
shared allocation once.
-unsafe fn release_control_block_w_allocation(ctrlb: NonNull<ControlBlock>) {
-    // SAFETY: caller guarantees `ctrlb` points to a live control block.
+unsafe fn release_control_block_w_allocation<const ALIGN: usize>(ctrlb: 
NonNull<ControlBlock>) {
+    // SAFETY: ctrlb is valid per function preconditions
     let old = unsafe { ctrlb.as_ref() }
         .ref_count
         .fetch_sub(1, Ordering::Release);
     debug_assert!(old > 0, "control block refcount underflow");
+
     if old != 1 {
         return;
     }
@@ -173,10 +109,19 @@ unsafe fn release_control_block_w_allocation(ctrlb: 
NonNull<ControlBlock>) {
     //
     fence(Ordering::Acquire);
 
-    // SAFETY: refcount reached zero, so this control block is uniquely owned 
here.
+    // SAFETY: refcount is zero, we have exclusive ownership
     let ctrlb = unsafe { Box::from_raw(ctrlb.as_ptr()) };
-    // SAFETY: `base,len,cap` were captured from a `Vec<u8>` allocation and 
are still valid.
-    let _vec = unsafe { Vec::from_raw_parts(ctrlb.base.as_ptr(), ctrlb.len, 
ctrlb.cap) };
+
+    // SAFETY: `ctrlb.base`, `ctrlb.len` and `ctrlb.capacity` were captured 
from an `AVec`
+    // allocation. We reconstruct the AVec and let it deallocate properly.
+    let _ = unsafe {
+        AVec::<u8, ConstAlign<ALIGN>>::from_raw_parts(
+            ctrlb.base.as_ptr(),
+            ALIGN,
+            ctrlb.len,
+            ctrlb.capacity,
+        )
+    };
 }
 
 unsafe fn reclaim_unique_control_block(ctrlb: NonNull<ControlBlock>) -> 
ControlBlock {
@@ -190,7 +135,74 @@ unsafe fn reclaim_unique_control_block(ctrlb: 
NonNull<ControlBlock>) -> ControlB
     unsafe { *Box::from_raw(ctrlb.as_ptr()) }
 }
 
-impl TwoHalves {
+#[derive(Debug)]
+pub struct Owned<const ALIGN: usize = 4096> {
+    inner: AVec<u8, ConstAlign<ALIGN>>,
+}
+
+impl<const ALIGN: usize> From<AVec<u8, ConstAlign<ALIGN>>> for Owned<ALIGN> {
+    fn from(vec: AVec<u8, ConstAlign<ALIGN>>) -> Self {
+        Self { inner: vec }
+    }
+}
+
+impl<const ALIGN: usize> From<Owned<ALIGN>> for AVec<u8, ConstAlign<ALIGN>> {
+    fn from(value: Owned<ALIGN>) -> Self {
+        value.inner
+    }
+}
+
+impl<const ALIGN: usize> Owned<ALIGN> {
+    pub fn as_slice(&self) -> &[u8] {
+        &self.inner
+    }
+
+    pub fn as_mut_slice(&mut self) -> &mut [u8] {
+        &mut self.inner
+    }
+
+    /// Split `Owned` buffer into two halves
+    ///
+    /// # Panics
+    /// Panics if `split_at > self.len()` or if `split_at` is not a multiple 
of `ALIGN` bytes.
+    pub fn split_at(self, split_at: usize) -> TwoHalves<ALIGN> {
+        assert!(split_at <= self.inner.len());
+
+        // Take ownership of the AVec's allocation. After this, we are 
responsible
+        // for deallocating via `AVec::from_raw_parts` or equivalent.
+        let (ptr, _, len, capacity) = self.inner.into_raw_parts();
+
+        // SAFETY: both pointers are constructed from the same `Inner` 
allocation, the split_at bounds are validated.
+        // The control block captures original `Inner` metadata to allow 
reconstructing the original frame for merging/dropping.
+        // The ptr provenence rules are maintained by the use of `NonNull` 
apis.
+        let base: NonNull<u8> = unsafe { NonNull::new_unchecked(ptr) };
+        let tail = unsafe { NonNull::new_unchecked(ptr.add(split_at)) };
+        let ctrlb = ControlBlock::new(base, len, capacity);
+
+        TwoHalves {
+            buf: (
+                Extent {
+                    ptr: base,
+                    len: split_at,
+                    ctrlb,
+                    _pad: 0,
+                },
+                Extent {
+                    ptr: tail,
+                    len: len - split_at,
+                    ctrlb,
+                    _pad: 0,
+                },
+            ),
+        }
+    }
+}
+
+pub struct TwoHalves<const ALIGN: usize> {
+    buf: (Extent, Extent),
+}
+
+impl<const ALIGN: usize> TwoHalves<ALIGN> {
     pub fn head(&self) -> &[u8] {
         self.buf.0.as_slice()
     }
@@ -206,7 +218,7 @@ impl TwoHalves {
     }
 
     pub fn split_at(&self) -> usize {
-        self.split_at
+        self.buf.0.len
     }
 
     pub fn total_len(&self) -> usize {
@@ -219,68 +231,72 @@ impl TwoHalves {
         unsafe { self.buf.1.ctrlb.as_ref().ref_count.load(Ordering::Acquire) 
== 1 }
     }
 
-    pub fn try_merge(self) -> Result<Owned, Self> {
+    pub fn try_merge(self) -> Result<Owned<ALIGN>, Self> {
         if !self.is_unique() {
             return Err(self);
         }
 
-        // We transfer the ownership to `Owned`, in order to prevent 
double-free, we must not drop `Self`.
+        // Transfer ownership to prevent double-free
         let this = ManuallyDrop::new(self);
         let head = this.buf.0;
         let tail = this.buf.1;
-        let split_at = this.split_at;
+        let split_at = head.len;
 
         // SAFETY: `tail.ctrlb` is unique at this point,
         // If `head.ctrlb != tail.ctrlb`, the head owns a standalone allocation
         // that must be released after copying.
         unsafe {
             let ctrlb_eq = std::ptr::addr_eq(head.ctrlb.as_ptr(), 
tail.ctrlb.as_ptr());
+
             if !ctrlb_eq {
                 let tail_ctrlb = tail.ctrlb.as_ref();
 
                 // We are patching up the original allocation, with the 
current head data, so that the resulting `Owned` has correct content.
                 let dst = slice::from_raw_parts_mut(tail_ctrlb.base.as_ptr(), 
split_at);
                 dst.copy_from_slice(head.as_slice());
-                release_control_block_w_allocation(head.ctrlb);
+                release_control_block_w_allocation::<ALIGN>(head.ctrlb);
             }
 
             let ctrlb = reclaim_unique_control_block(tail.ctrlb);
-            // SAFETY: `ctrlb.base,len,cap` were captured from a `Vec<u8>` 
allocation and
+            // SAFETY: `ctrlb.base,capacity` were captured from an `AVec<u8>` 
allocation and
             // are now exclusively owned by this path.
-            let inner = Vec::from_raw_parts(ctrlb.base.as_ptr(), ctrlb.len, 
ctrlb.cap);
+            let inner = AVec::from_raw_parts(ctrlb.base.as_ptr(), ALIGN, 
ctrlb.len, ctrlb.capacity);
             Ok(Owned { inner })
         }
     }
 }
 
-impl Clone for TwoHalves {
+impl<const ALIGN: usize> Clone for TwoHalves<ALIGN> {
     fn clone(&self) -> Self {
         Self {
-            buf: (Half::copy_from_slice(self.head()), self.buf.1.share()),
-            split_at: self.split_at,
+            buf: (
+                Extent::copy_from_slice::<ALIGN>(self.head()),
+                self.buf.1.clone(),
+            ),
         }
     }
 }
 
-impl Drop for TwoHalves {
+impl<const ALIGN: usize> Drop for TwoHalves<ALIGN> {
     fn drop(&mut self) {
         // SAFETY: `buf.0.ctrlb` / `buf.1.ctrlb` point to live control blocks 
while `self` is alive.
+        let ctrlb_eq = std::ptr::addr_eq(self.buf.0.ctrlb.as_ptr(), 
self.buf.1.ctrlb.as_ptr());
         unsafe {
-            if std::ptr::addr_eq(self.buf.0.ctrlb.as_ptr(), 
self.buf.1.ctrlb.as_ptr()) {
-                release_control_block_w_allocation(self.buf.1.ctrlb);
+            if ctrlb_eq {
+                release_control_block_w_allocation::<ALIGN>(self.buf.1.ctrlb);
             } else {
-                // Two separate control blocks, so we must release both 
allocations.
-                release_control_block_w_allocation(self.buf.0.ctrlb);
-                release_control_block_w_allocation(self.buf.1.ctrlb);
+                // Different control blocks, release both
+                release_control_block_w_allocation::<ALIGN>(self.buf.0.ctrlb);
+                release_control_block_w_allocation::<ALIGN>(self.buf.1.ctrlb);
             }
         }
     }
 }
 
-impl std::fmt::Debug for TwoHalves {
+impl<const ALIGN: usize> std::fmt::Debug for TwoHalves<ALIGN> {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         f.debug_struct("TwoHalves")
-            .field("split_at", &self.split_at)
+            .field("split_at", &self.split_at())
             .field("head_len", &self.buf.0.len)
             .field("tail_len", &self.buf.1.len)
             .field("halves_alias", &(self.buf.0.ctrlb == self.buf.1.ctrlb))
@@ -291,10 +307,19 @@ impl std::fmt::Debug for TwoHalves {
 #[cfg(test)]
 mod tests {
     use super::Owned;
+    use aligned_vec::AVec;
+    use aligned_vec::ConstAlign;
+
+    fn make_owned(data: &[u8]) -> Owned {
+        let mut v: AVec<u8, ConstAlign<4096>> = AVec::new(4096);
+        v.extend_from_slice(data);
+        v.into()
+    }
 
     #[test]
     fn split_exposes_head_and_tail() {
-        let mut buffer = Owned::from_vec(vec![1, 2, 3, 4, 5]).split_at(2);
+        let owned = make_owned(&[1, 2, 3, 4, 5]);
+        let mut buffer = owned.split_at(2);
 
         assert_eq!(buffer.head(), &[1, 2]);
         assert_eq!(buffer.tail(), &[3, 4, 5]);
@@ -308,7 +333,8 @@ mod tests {
 
     #[test]
     fn clone_copies_head_and_shares_tail() {
-        let mut original = Owned::from_vec(vec![1, 2, 3, 4, 5]).split_at(2);
+        let owned = make_owned(&[1, 2, 3, 4, 5]);
+        let mut original = owned.split_at(2);
         let mut cloned = original.clone();
 
         assert!(!original.is_unique());
@@ -325,16 +351,18 @@ mod tests {
 
     #[test]
     fn try_merge_reuses_original_frame_when_unique() {
-        let mut buffer = Owned::from_vec(vec![1, 2, 3, 4, 5]).split_at(2);
+        let owned = make_owned(&[1, 2, 3, 4, 5]);
+        let mut buffer = owned.split_at(2);
         buffer.head_mut().copy_from_slice(&[8, 9]);
 
-        let merged: Vec<u8> = buffer.try_merge().unwrap().into();
-        assert_eq!(merged, vec![8, 9, 3, 4, 5]);
+        let merged: AVec<u8, ConstAlign<4096>> = 
buffer.try_merge().unwrap().into();
+        assert_eq!(merged.as_slice(), &[8, 9, 3, 4, 5]);
     }
 
     #[test]
     fn try_merge_fails_while_tail_is_shared() {
-        let buffer = Owned::from_vec(vec![1, 2, 3, 4, 5]).split_at(2);
+        let owned = make_owned(&[1, 2, 3, 4, 5]);
+        let buffer = owned.split_at(2);
         let clone = buffer.clone();
 
         let buffer = buffer.try_merge().unwrap_err();
@@ -342,13 +370,14 @@ mod tests {
 
         drop(clone);
 
-        let merged: Vec<u8> = buffer.try_merge().unwrap().into();
-        assert_eq!(merged, vec![1, 2, 3, 4, 5]);
+        let merged: AVec<u8, ConstAlign<4096>> = 
buffer.try_merge().unwrap().into();
+        assert_eq!(merged.as_slice(), &[1, 2, 3, 4, 5]);
     }
 
     #[test]
     fn merge_after_cloned_head_mutation_writes_back_to_original_frame() {
-        let buffer = Owned::from_vec(vec![1, 2, 3, 4, 5]).split_at(2);
+        let owned = make_owned(&[1, 2, 3, 4, 5]);
+        let buffer = owned.split_at(2);
         let mut clone = buffer.clone();
 
         drop(buffer);
@@ -356,24 +385,27 @@ mod tests {
         clone.head_mut().copy_from_slice(&[4, 2]);
         assert!(clone.is_unique());
 
-        let merged: Vec<u8> = clone.try_merge().unwrap().into();
-        assert_eq!(merged, vec![4, 2, 3, 4, 5]);
+        let merged: AVec<u8, ConstAlign<4096>> = 
clone.try_merge().unwrap().into();
+        assert_eq!(merged.as_slice(), &[4, 2, 3, 4, 5]);
     }
 
     #[test]
     fn zero_length_splits_work() {
-        let left_empty = Owned::from_vec(vec![1, 2, 3]).split_at(0);
+        let owned = make_owned(&[1, 2, 3]);
+        let left_empty = owned.split_at(0);
         assert_eq!(left_empty.head(), &[]);
         assert_eq!(left_empty.tail(), &[1, 2, 3]);
 
-        let right_empty = Owned::from_vec(vec![1, 2, 3]).split_at(3);
+        let owned = make_owned(&[1, 2, 3]);
+        let right_empty = owned.split_at(3);
         assert_eq!(right_empty.head(), &[1, 2, 3]);
         assert_eq!(right_empty.tail(), &[]);
     }
 
     #[test]
     fn clone_of_clone_keeps_tail_sharing_semantics() {
-        let original = Owned::from_vec(vec![1, 2, 3, 4, 5]).split_at(2);
+        let owned = make_owned(&[1, 2, 3, 4, 5]);
+        let original = owned.split_at(2);
         let clone1 = original.clone();
         let clone2 = clone1.clone();
 
@@ -382,159 +414,3 @@ mod tests {
         assert!(!clone2.is_unique());
     }
 }
-
-// 
=============================================================================
-// DESIGN: Packed Wide Pointer with Inline ControlBlock
-// 
=============================================================================
-//
-// GOAL: 16-byte sliceable refcounted buffer view, reusing ControlBlock for
-//       both Frozen and TwoHalves.
-//
-// 
-----------------------------------------------------------------------------
-// MEMORY LAYOUT
-// 
-----------------------------------------------------------------------------
-//
-//     ┌─────────────────────────┬────────────────────────────────────────────┐
-//     │  ControlBlock (16B)     │  data bytes                                │
-//     │  ref_count: AtomicUsize │  [0] [1] [2] [3] [4] [5] [6] [7] ...       │
-//     │  capacity:  usize       │                                            │
-//     └─────────────────────────┴────────────────────────────────────────────┘
-//     ^                         ^
-//     │                         │
-//     alloc_ptr            data_start (offset = 0)
-//
-// 
-----------------------------------------------------------------------------
-// PACKED VIEW STRUCT (16 bytes)
-// 
-----------------------------------------------------------------------------
-//
-//     struct PackedView {
-//         data: NonNull<[u8]>,   // fat pointer: (ptr, packed_len)
-//     }
-//
-//     The "length" portion of the fat pointer is PACKED:
-//
-//         packed_len = (alloc_offset << 32) | actual_len
-//
-//         ┌────────────────────────────────────────────────────────────────┐
-//         │  63 .............. 32 │ 31 ............................ 0     │
-//         │     alloc_offset      │         actual_len                    │
-//         └────────────────────────────────────────────────────────────────┘
-//
-//     - actual_len:   length of current view (max 4GB)
-//     - alloc_offset: distance from data_start to current ptr (max 4GB)
-//
-// 
-----------------------------------------------------------------------------
-// EXAMPLE: SLICING
-// 
-----------------------------------------------------------------------------
-//
-//     Initial (full buffer, len=8):
-//
-//         data.ptr ────────────────────────┐
-//                                          ▼
-//         ┌──────────────┬─────────────────────────────────────┐
-//         │ ControlBlock │ [0] [1] [2] [3] [4] [5] [6] [7]     │
-//         └──────────────┴─────────────────────────────────────┘
-//
-//         packed_len = (0 << 32) | 8   // offset=0, len=8
-//
-//
-//     After slice(3..6):
-//
-//                       data.ptr ──────────┐
-//                                          ▼
-//         ┌──────────────┬─────────────────────────────────────┐
-//         │ ControlBlock │ [0] [1] [2] [3] [4] [5] [6] [7]     │
-//         └──────────────┴─────────────────────────────────────┘
-//
-//         packed_len = (3 << 32) | 3   // offset=3, len=3
-//
-// 
-----------------------------------------------------------------------------
-// RECONSTRUCTING CONTROLBLOCK
-// 
-----------------------------------------------------------------------------
-//
-//     fn ctrl_block(&self) -> &ControlBlock {
-//         let data_ptr = self.data.as_ptr() as *const u8;
-//         let offset = self.data.len() >> 32;
-//         let data_start = data_ptr.sub(offset);
-//         let ctrl_ptr = data_start.sub(size_of::<ControlBlock>());
-//         &*ctrl_ptr.cast::<ControlBlock>()
-//     }
-//
-// 
-----------------------------------------------------------------------------
-// CONTROLBLOCK (shared by Frozen, TwoHalves, etc.)
-// 
-----------------------------------------------------------------------------
-//
-//     #[repr(C)]
-//     struct ControlBlock {
-//         ref_count: AtomicUsize,  // 8 bytes - atomic refcount
-//         capacity:  usize,        // 8 bytes - original alloc size for 
dealloc
-//     }
-//
-//     - ref_count: shared by all views into this allocation
-//     - capacity:  needed to reconstruct Layout for dealloc
-//
-//     NOTE: No need for `base` or `len` - both reconstructable from 
PackedView!
-//
-// 
-----------------------------------------------------------------------------
-// USAGE IN FROZEN
-// 
-----------------------------------------------------------------------------
-//
-//     pub struct Frozen(PackedView);  // 16 bytes
-//
-//     impl Frozen {
-//         fn slice(self, range: Range<usize>) -> Frozen {
-//             // Adjust ptr forward, update packed offset+len
-//             // No allocation, no refcount change (consumes self)
-//         }
-//
-//         fn clone(&self) -> Frozen {
-//             self.ctrl_block().ref_count.fetch_add(1, Relaxed);
-//             Frozen(PackedView { data: self.0.data })
-//         }
-//
-//         fn drop(&mut self) {
-//             if self.ctrl_block().ref_count.fetch_sub(1, Release) == 1 {
-//                 fence(Acquire);
-//                 let layout = Layout::from_size_align(
-//                     size_of::<ControlBlock>() + self.ctrl_block().capacity,
-//                     align_of::<ControlBlock>(),
-//                 );
-//                 dealloc(self.alloc_ptr(), layout);
-//             }
-//         }
-//     }
-//
-// 
-----------------------------------------------------------------------------
-// USAGE IN TWOHALVES
-// 
-----------------------------------------------------------------------------
-//
-//     pub struct TwoHalves {
-//         head: PackedView,   // 16 bytes - view into [0..split_at]
-//         tail: PackedView,   // 16 bytes - view into [split_at..len]
-//         split_at: usize,    // 8 bytes
-//     }
-//
-//     Memory:
-//
-//         head.ptr ───────────────┐
-//                                 ▼
-//         ┌──────────────┬───────────────┬───────────────────┐
-//         │ ControlBlock │ [head bytes]  │ [tail bytes]      │
-//         └──────────────┴───────────────┴───────────────────┘
-//                                         ^
-//                                         │
-//         tail.ptr ───────────────────────┘
-//
-//         head.packed_len = (0 << 32) | split_at
-//         tail.packed_len = (split_at << 32) | (total_len - split_at)
-//
-//     Both head and tail reconstruct the SAME ControlBlock.
-//
-
-// The downside of this solution is that we allow up to 4GB buffers 
-// (if somebody would send batch taht is bigger that, we'd have to reject it)
-// The upside is that we can fit more of those in a single cache-line and the 
size is an power of 2 number
-// smaller than 64, so there is no wasted space, I think it's a worthwhile 
tradeoff,
-// given that the access pattern (especially for Frozen), could invole, 
iterating through 
-// collection of those two times, once when searching for the right batch
-// and once when submitting the buffer to the kernel, so the cache locality is 
important.
\ No newline at end of file

Reply via email to