diff --git a/.clippy.toml b/.clippy.toml index 8fa8b11..05def10 100644 --- a/.clippy.toml +++ b/.clippy.toml @@ -1 +1 @@ -msrv = "1.56.0" \ No newline at end of file +msrv = "1.77.0" \ No newline at end of file diff --git a/.codecov.yml b/.codecov.yml index a0411e3..d36ab60 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -4,6 +4,11 @@ codecov: ignore: - integration/ - src/map/error.rs + # - src/unsync/tests.rs + # - src/unsync/tests/ + # - src/sync/tests.rs + # - src/sync/tests/ + coverage: status: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index edd8eeb..c3ad72c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -106,8 +106,8 @@ jobs: # cargo +nightly build --no-default-features --features alloc --target ${{ matrix.target }} -Z build-std=core,alloc # if: matrix.target == 'mips64-unknown-linux-gnuabi64' - test: - name: test + build: + name: build strategy: matrix: os: @@ -138,7 +138,39 @@ jobs: path: ~/.cargo key: ${{ runner.os }}-coverage-dotcargo - name: Run test - run: cargo hack test --feature-powerset --exclude-no-default-features --exclude-features tracing + run: cargo hack build --feature-powerset --exclude-no-default-features --exclude-features tracing + + test: + name: test + strategy: + matrix: + os: + - ubuntu-latest + - macos-latest + - windows-latest + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v3 + - name: Cache cargo build and registry + uses: actions/cache@v3 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-test-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-test- + - name: Install Rust + # --no-self-update is necessary because the windows environment cannot self-update rustup.exe. + run: rustup update stable --no-self-update && rustup default stable + - name: Cache ~/.cargo + uses: actions/cache@v3 + with: + path: ~/.cargo + key: ${{ runner.os }}-coverage-dotcargo + - name: Run test + run: cargo test --all-features sanitizer: name: sanitizer @@ -209,8 +241,38 @@ jobs: working-directory: integration - miri: - name: miri + miri-tb: + name: miri-tb + strategy: + matrix: + os: + - ubuntu-latest + - macos-latest + # - windows-latest + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v3 + - name: Cache cargo build and registry + uses: actions/cache@v3 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-miri-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-miri- + - name: Install cargo-hack + run: cargo install cargo-hack + - name: Miri (Linux) + run: ci/miri_tb.sh + if: matrix.os == 'ubuntu-latest' + - name: Miri + run: ci/miri_tb_generic.sh + if: matrix.os != 'ubuntu-latest' + + miri-sb: + name: miri-sb strategy: matrix: os: @@ -233,10 +295,10 @@ jobs: - name: Install cargo-hack run: cargo install cargo-hack - name: Miri (Linux) - run: ci/miri.sh + run: ci/miri_sb.sh if: matrix.os == 'ubuntu-latest' - name: Miri - run: ci/miri_generic.sh + run: ci/miri_sb_generic.sh if: matrix.os != 'ubuntu-latest' # loom: @@ -271,6 +333,7 @@ jobs: needs: - rustfmt - clippy + - build - test - sanitizer - valgrind diff --git a/CHANGELOG.md b/CHANGELOG.md index 871f626..6b6a570 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # CHANGELOG +## 0.14.0 + +- Supports unsync version `SkipMap`s +- Fix: dealloc potential in-unsed memory chunk +- Add `CompressionPolicy` as a configuration +- Increase the discarded tracker when find new version of a key + ## 0.13.0 - Remove `Comparator` generic on `Entry*` @@ -87,5 +94,3 @@ - Add `flush` API ## UNRELEASED - - diff --git a/Cargo.toml b/Cargo.toml index 63376f8..a4d2a91 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,15 +1,15 @@ [package] name = "skl" -version = "0.13.1" +version = "0.14.0" edition = "2021" -rust-version = "1.56.0" +rust-version = "1.77.0" repository = "https://github.com/al8n/skl" description = "A lock-free thread-safe concurrent ARENA based (heap backend or memory map backend) skiplist implementation which helps develop MVCC memtable for LSM-Tree." documentation = "https://docs.rs/skl" homepage = "https://github.com/al8n/skl" keywords = ["skiplist", "lock-free", "memtable", "concurrency", "arena"] categories = ["database", "data-structures", "concurrency", "no-std"] -license = "MIT/Apache-2.0" +license = "MIT OR Apache-2.0" [[bench]] path = "benches/bench.rs" @@ -34,7 +34,7 @@ required-features = ["memmap"] default = ["std"] alloc = ["rarena-allocator/alloc"] memmap = ["rarena-allocator/memmap", "std"] -std = ["rand/default", "either/default", "rarena-allocator/std"] +std = ["rand/default", "either/default", "rarena-allocator/std", "memchr/default", "among/default"] tracing = ["dep:tracing", "rarena-allocator/tracing"] # loom = ["dep:loom", "rarena-allocator/loom"] @@ -43,11 +43,18 @@ tracing = ["dep:tracing", "rarena-allocator/tracing"] getrandom = { version = "0.2", features = ["js"] } [dependencies] +among = { version = "0.1", default-features = false } +derive_more = "0.99" +dbutils = { version = "0.3", default-features = false } either = { version = "1", default-features = false } +memchr = { version = "2", default-features = false } rand = { version = "0.8", default-features = false, features = ["getrandom"] } -rarena-allocator = { version = "0.1", default-features = false } -ux2 = { version = "0.8", default-features = false, features = ["32"] } +# rarena-allocator = { version = "0.2", default-features = false, path = "../arena/rarena-allocator" } +rarena-allocator = { version = "0.2", default-features = false } +arbitrary-int = { version = "1.2", default-features = false } +paste = "1" +time = { version = "0.3", optional = true, features = ["std"] } tracing = { version = "0.1", optional = true } [dev-dependencies] diff --git a/README.md b/README.md index 8a4ba92..8adacbd 100644 --- a/README.md +++ b/README.md @@ -41,25 +41,33 @@ ## Features - **MVCC and 3D access**: Builtin MVCC (multiple versioning concurrency control) and key-value-version access support. -- **Lock-free and Concurrent-Safe:** SkipMap provide lock-free operations, ensuring efficient concurrent access without the need for explicit locking mechanisms. -- **Extensible for Key-Value Database Developers:** Designed as a low-level crate, SkipMap offer a flexible foundation for key-value database developers. You can easily build your own memtable or write-ahead-log (WAL) using these structures. +- **Lock-free and Concurrent-Safe:** `SkipMap` provide lock-free operations, ensuring efficient concurrent access without the need for explicit locking mechanisms. +- **Extensible for Key-Value Database Developers:** Designed as a low-level crate, `SkipMap` offer a flexible foundation for key-value database developers. You can easily build your own memtable or durable storage using these structures. - **Memory Efficiency:** These data structures are optimized for minimal memory overhead. They operate around references, avoiding unnecessary allocations and deep copies, which can be crucial for efficient memory usage. - **Segment tracker:** Builtin segment recollection support, a lock-free freelist helps reuse free segments. - **Prefix compression:** - Same key will only be stored once. - Keys with common prefix will be stored once (longest one must be inserted first). + - Keys are sub-slice of negeibours will be stored once (require `CompressionPolicy::High`). - **Discard tracker:** Builtin discard tracker, which will track discarded bytes to help end-users decide when to compact or rewrite. -- **Efficient Iteration:** Enjoy fast forward and backward iteration through the elements in your SkipMap. Additionally, bounded iterators are supported, allowing you to traverse only a specified range of elements efficiently. -- **Snapshot Support:** Create snapshots of your SkipMap, offering a read-only view of the contents at a specific moment in time. Snapshots provide a consistent view of the data, enabling implementations of transactional semantics and other use cases where data consistency is crucial. +- **Efficient Iteration:** Enjoy fast forward and backward iteration through the elements in your `SkipMap`. Additionally, bounded iterators are supported, allowing you to traverse only a specified range of elements efficiently. +- **Snapshot Support:** Create snapshots of your `SkipMap`, offering a read-only view of the contents at a specific moment in time. Snapshots provide a consistent view of the data, enabling implementations of transactional semantics and other use cases where data consistency is crucial. - **Memory Management Options:** - **Heap Allocation:** Memory allocation is handled by Rust's allocator, ensuring all data resides in RAM. - - **Mmap:** Data can be mapped to a disk file by the operating system, making it suitable for write-ahead-logs (WAL) and durable storage. + - **Mmap:** Data can be mapped to a disk file by the operating system, making it suitable for durable storage. - **Mmap Anonymous:** Mapped to anonymous memory (virtual memory) by the OS, ideal for large in-memory memtables, optimizing memory utilization. ## Examples Please see [examples](https://github.com/al8n/skl/tree/main/examples) folder for more details. +## Q & A + +- Does the on-disk version `SkipMap` ensure crash safety or power failure resilience? + + No, on-disk version `SkipMap` does not ensure crash safety or power failure resilience. Hence, it is not recommended to directly + use the `SkipMap` as a durable database. It is recommended to use the on-disk version `SkipMap` as a final frozen file for quick lookup. + ## Tests - `test`: diff --git a/benches/bench.rs b/benches/bench.rs index 7b62fc8..572c4e9 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -1,7 +1,7 @@ use criterion::*; use parking_lot::Mutex; use rand::prelude::*; -use skl::*; +use skl::{sync::map::SkipMap, *}; use std::{ collections::*, sync::{atomic::*, *}, @@ -25,11 +25,11 @@ fn fixed_map_round( fn fixed_skiplist_round(l: &SkipMap, case: &(Vec, bool), exp: &Vec) { if case.1 { - if let Some(v) = l.get(0, &case.0) { + if let Some(v) = l.get(&case.0) { assert_eq!(v.value(), exp); } } else { - l.insert(0, &case.0, exp).unwrap(); + l.insert(&case.0, exp).unwrap(); } } @@ -42,7 +42,7 @@ fn random_key(rng: &mut ThreadRng) -> Vec { fn bench_read_write_fixed_skiplist_frac(b: &mut Bencher<'_>, frac: &usize) { let frac = *frac; let value = b"00123".to_vec(); - let list = Arc::new(SkipMap::with_options(Options::new().with_capacity(512 << 20)).unwrap()); + let list = Arc::new(SkipMap::new(Options::new().with_capacity(512 << 20)).unwrap()); let l = list.clone(); let stop = Arc::new(AtomicBool::new(false)); let s = stop.clone(); @@ -164,7 +164,7 @@ fn bench_write_fixed_map(c: &mut Criterion) { } fn bench_write_fixed_skiplist(c: &mut Criterion) { - let list = Arc::new(SkipMap::with_options(Options::new().with_capacity(512 << 21)).unwrap()); + let list = Arc::new(SkipMap::new(Options::new().with_capacity(512 << 21)).unwrap()); let value = b"00123".to_vec(); let l = Arc::clone(&list); let stop = Arc::new(AtomicBool::new(false)); @@ -182,7 +182,7 @@ fn bench_write_fixed_skiplist(c: &mut Criterion) { b.iter_batched( || random_key(&mut rng), |key| { - list.insert(0, &key, &value).unwrap(); + list.insert(&key, &value).unwrap(); }, BatchSize::SmallInput, ) diff --git a/ci/miri.sh b/ci/miri_sb.sh similarity index 100% rename from ci/miri.sh rename to ci/miri_sb.sh diff --git a/ci/miri_generic.sh b/ci/miri_sb_generic.sh similarity index 100% rename from ci/miri_generic.sh rename to ci/miri_sb_generic.sh diff --git a/ci/miri_tb.sh b/ci/miri_tb.sh new file mode 100755 index 0000000..f9a182d --- /dev/null +++ b/ci/miri_tb.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -e + +rustup toolchain install nightly --component miri +rustup override set nightly +cargo miri setup + +export MIRIFLAGS="-Zmiri-strict-provenance -Zmiri-disable-isolation -Zmiri-symbolic-alignment-check -Zmiri-tree-borrows" + +cargo miri test --all-features --target x86_64-unknown-linux-gnu +# cargo miri test --tests --target aarch64-unknown-linux-gnu #crossbeam_utils has problem on this platform +cargo miri test --all-features --target i686-unknown-linux-gnu +cargo miri test --all-features --target powerpc64-unknown-linux-gnu diff --git a/ci/miri_tb_generic.sh b/ci/miri_tb_generic.sh new file mode 100755 index 0000000..4ff995e --- /dev/null +++ b/ci/miri_tb_generic.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -e + +rustup toolchain install nightly --component miri +rustup override set nightly +cargo miri setup + +export MIRIFLAGS="-Zmiri-strict-provenance -Zmiri-disable-isolation -Zmiri-symbolic-alignment-check -Zmiri-tree-borrows" + +cargo miri test --tests --features memmap diff --git a/examples/heap.rs b/examples/heap.rs index 24b2025..723f40e 100644 --- a/examples/heap.rs +++ b/examples/heap.rs @@ -1,5 +1,4 @@ -use skl::*; -use std::sync::Arc; +use skl::{sync::map::*, *}; pub fn key(i: usize) -> Vec { format!("{:05}", i).into_bytes() @@ -11,24 +10,23 @@ pub fn new_value(i: usize) -> Vec { fn main() { const N: usize = 1000; - let l = SkipMap::with_options(Options::new().with_capacity(1 << 20)).unwrap(); - let mut wg = Arc::new(()); + + let l = SkipMap::new(Options::new().with_capacity(1 << 20)).unwrap(); + for i in 0..N { - let w = wg.clone(); let l = l.clone(); std::thread::spawn(move || { - l.insert(0, &key(i), &new_value(i)).unwrap(); - drop(w); + l.insert(&key(i), &new_value(i)).unwrap(); }); } - while Arc::get_mut(&mut wg).is_none() {} + + while l.refs() > 1 {} + for i in 0..N { - let w = wg.clone(); let l = l.clone(); std::thread::spawn(move || { let k = key(i); - assert_eq!(l.get(0, &k).unwrap().value(), new_value(i), "broken: {i}"); - drop(w); + assert_eq!(l.get(&k).unwrap().value(), new_value(i), "broken: {i}"); }); } } diff --git a/examples/mmap.rs b/examples/mmap.rs index 8e8dcff..0ff0159 100644 --- a/examples/mmap.rs +++ b/examples/mmap.rs @@ -1,5 +1,4 @@ -use skl::SkipMap; -use std::sync::Arc; +use skl::{sync::map::SkipMap, Options}; pub fn key(i: usize) -> Vec { format!("{:05}", i).into_bytes() @@ -17,24 +16,23 @@ fn main() { .read(true) .write(true); - let l = SkipMap::map_mut("test.wal", open_options, mmap_options).unwrap(); - let mut wg = Arc::new(()); + let l = + unsafe { SkipMap::map_mut("test.wal", Options::new(), open_options, mmap_options).unwrap() }; + for i in 0..N { - let w = wg.clone(); let l = l.clone(); std::thread::spawn(move || { - l.insert(0, &key(i), &new_value(i)).unwrap(); - drop(w); + l.insert(&key(i), &new_value(i)).unwrap(); }); } - while Arc::get_mut(&mut wg).is_none() {} + + while l.refs() > 1 {} + for i in 0..N { - let w = wg.clone(); let l = l.clone(); std::thread::spawn(move || { let k = key(i); - assert_eq!(l.get(0, &k).unwrap().value(), new_value(i), "broken: {i}"); - drop(w); + assert_eq!(l.get(&k).unwrap().value(), new_value(i), "broken: {i}"); }); } } diff --git a/examples/mmap_anon.rs b/examples/mmap_anon.rs index c1c4391..8a27c7c 100644 --- a/examples/mmap_anon.rs +++ b/examples/mmap_anon.rs @@ -1,5 +1,4 @@ -use skl::SkipMap; -use std::sync::Arc; +use skl::{sync::map::SkipMap, Options}; pub fn key(i: usize) -> Vec { format!("{:05}", i).into_bytes() @@ -13,24 +12,23 @@ fn main() { const N: usize = 1000; let mmap_options = skl::MmapOptions::default().len(1 << 20); - let l = SkipMap::map_anon(mmap_options).unwrap(); - let mut wg = Arc::new(()); + + let l = SkipMap::map_anon(Options::new(), mmap_options).unwrap(); + for i in 0..N { - let w = wg.clone(); let l = l.clone(); std::thread::spawn(move || { - l.insert(0, &key(i), &new_value(i)).unwrap(); - drop(w); + l.insert(&key(i), &new_value(i)).unwrap(); }); } - while Arc::get_mut(&mut wg).is_none() {} + + while l.refs() > 1 {} + for i in 0..N { - let w = wg.clone(); let l = l.clone(); std::thread::spawn(move || { let k = key(i); - assert_eq!(l.get(0, &k).unwrap().value(), new_value(i), "broken: {i}"); - drop(w); + assert_eq!(l.get(&k).unwrap().value(), new_value(i), "broken: {i}"); }); } } diff --git a/integration/src/bin/test-mmap-anon.rs b/integration/src/bin/test-mmap-anon.rs index 86993ae..8669258 100644 --- a/integration/src/bin/test-mmap-anon.rs +++ b/integration/src/bin/test-mmap-anon.rs @@ -1,16 +1,16 @@ use integration::{big_value, key, new_value}; -use skl::*; +use skl::{sync::map::*, *}; fn main() { { const N: usize = 10; let mmap_options = MmapOptions::default().len(1 << 20); - let l = SkipMap::map_anon(mmap_options).unwrap(); + let l = SkipMap::map_anon(Options::new(), mmap_options).unwrap(); for i in 0..N { let l = l.clone(); std::thread::spawn(move || { - l.insert(0, &key(i), &new_value(i)).unwrap(); + l.insert(&key(i), &new_value(i)).unwrap(); drop(l); }); } @@ -21,7 +21,7 @@ fn main() { let l = l.clone(); std::thread::spawn(move || { let k = key(i); - assert_eq!(l.get(0, &k).unwrap().value(), new_value(i), "broken: {i}"); + assert_eq!(l.get(&k).unwrap().value(), new_value(i), "broken: {i}"); drop(l); }); } @@ -34,11 +34,11 @@ fn main() { const N2: usize = 100; let mmap_options = MmapOptions::default().len(120 << 20); - let l = SkipMap::map_anon(mmap_options).unwrap(); + let l = SkipMap::map_anon(Options::new(), mmap_options).unwrap(); for i in 0..N2 { let l = l.clone(); std::thread::spawn(move || { - l.insert(0, &key(i), &big_value(i)).unwrap(); + l.insert(&key(i), &big_value(i)).unwrap(); }); } while l.refs() > 1 { @@ -49,7 +49,7 @@ fn main() { let l = l.clone(); std::thread::spawn(move || { let k = key(i); - assert_eq!(l.get(0, &k).unwrap().value(), big_value(i), "broken: {i}"); + assert_eq!(l.get(&k).unwrap().value(), big_value(i), "broken: {i}"); }); } while l.refs() > 1 { diff --git a/integration/src/bin/test-mmap.rs b/integration/src/bin/test-mmap.rs index 96fc532..afd20c0 100644 --- a/integration/src/bin/test-mmap.rs +++ b/integration/src/bin/test-mmap.rs @@ -1,5 +1,5 @@ use integration::{key, new_value}; -use skl::*; +use skl::{sync::map::SkipMap, *}; fn main() { let dir = tempfile::tempdir().unwrap(); @@ -12,11 +12,11 @@ fn main() { .read(true) .write(true); let mmap_options = MmapOptions::default(); - let l = SkipMap::map_mut(&p, open_options, mmap_options).unwrap(); + let l = unsafe { SkipMap::map_mut(&p, Options::new(), open_options, mmap_options).unwrap() }; for i in 0..N { let l = l.clone(); std::thread::spawn(move || { - l.insert(0, &key(i), &new_value(i)).unwrap(); + l.insert(&key(i), &new_value(i)).unwrap(); drop(l); }); } @@ -27,7 +27,7 @@ fn main() { let l = l.clone(); std::thread::spawn(move || { let k = key(i); - assert_eq!(l.get(0, &k).unwrap().value(), new_value(i), "broken: {i}"); + assert_eq!(l.get(&k).unwrap().value(), new_value(i), "broken: {i}"); drop(l); }); } @@ -41,13 +41,13 @@ fn main() { let open_options = OpenOptions::default().read(true); let mmap_options = MmapOptions::default(); - let l = SkipMap::::map(&p, open_options, mmap_options, 0).unwrap(); + let l = unsafe { SkipMap::map(&p, Options::new(), open_options, mmap_options).unwrap() }; assert_eq!(N2, l.len()); for i in 0..N2 { let l = l.clone(); std::thread::spawn(move || { let k = key(i); - assert_eq!(l.get(0, &k).unwrap().value(), new_value(i), "broken: {i}"); + assert_eq!(l.get(&k).unwrap().value(), new_value(i), "broken: {i}"); }); } while l.refs() > 1 { diff --git a/integration/src/bin/test-vec.rs b/integration/src/bin/test-vec.rs index b9cc3fe..0317094 100644 --- a/integration/src/bin/test-vec.rs +++ b/integration/src/bin/test-vec.rs @@ -1,14 +1,14 @@ use integration::{big_value, key, new_value}; -use skl::*; +use skl::{sync::map::SkipMap, *}; fn main() { { const N: usize = 10; - let l = SkipMap::with_options(Options::new().with_capacity(1 << 20)).unwrap(); + let l = SkipMap::new(Options::new().with_capacity(1 << 20)).unwrap(); for i in 0..N { let l = l.clone(); std::thread::spawn(move || { - l.insert(0, &key(i), &new_value(i)).unwrap(); + l.insert(&key(i), &new_value(i)).unwrap(); drop(l); }); } @@ -19,7 +19,7 @@ fn main() { let l = l.clone(); std::thread::spawn(move || { let k = key(i); - assert_eq!(l.get(0, &k).unwrap().value(), new_value(i), "broken: {i}"); + assert_eq!(l.get(&k).unwrap().value(), new_value(i), "broken: {i}"); drop(l); }); } @@ -30,11 +30,11 @@ fn main() { { const N2: usize = 10; - let l = SkipMap::with_options(Options::new().with_capacity(120 << 20)).unwrap(); + let l = SkipMap::new(Options::new().with_capacity(120 << 20)).unwrap(); for i in 0..N2 { let l = l.clone(); std::thread::spawn(move || { - l.insert(0, &key(i), &big_value(i)).unwrap(); + l.insert(&key(i), &big_value(i)).unwrap(); }); } while l.refs() > 1 { @@ -45,7 +45,7 @@ fn main() { let l = l.clone(); std::thread::spawn(move || { let k = key(i); - assert_eq!(l.get(0, &k).unwrap().value(), big_value(i), "broken: {i}"); + assert_eq!(l.get(&k).unwrap().value(), big_value(i), "broken: {i}"); }); } while l.refs() > 1 { diff --git a/src/allocator.rs b/src/allocator.rs new file mode 100644 index 0000000..248eb49 --- /dev/null +++ b/src/allocator.rs @@ -0,0 +1,1155 @@ +use either::Either; +use rarena_allocator::{Allocator as ArenaAllocator, ArenaOptions, BytesRefMut, Memory}; + +use super::*; + +use core::{marker::PhantomData, mem, ptr::NonNull, sync::atomic::Ordering}; + +pub trait Allocator: Sealed {} + +impl Allocator for T where T: Sealed {} + +pub(crate) use sealed::*; + +mod sealed { + use core::ptr; + + use super::*; + + pub struct Pointer { + pub(crate) offset: u32, + pub(crate) size: u32, + pub(crate) height: Option, + } + + impl Pointer { + #[inline] + pub(crate) const fn new(offset: u32, size: u32) -> Self { + Self { + offset, + size, + height: None, + } + } + } + + pub struct Deallocator { + pub(crate) node: Option, + pub(crate) key: Option, + pub(crate) value: Option, + } + + impl Deallocator { + #[inline] + pub fn dealloc(self, arena: &A) { + unsafe { + if let Some(ptr) = self.node { + arena.dealloc(ptr.offset, ptr.size); + } + + if let Some(ptr) = self.key { + arena.dealloc(ptr.offset, ptr.size); + } + + if let Some(ptr) = self.value { + arena.dealloc(ptr.offset, ptr.size); + } + } + } + + #[inline] + pub fn dealloc_node_and_key(self, arena: &A) { + unsafe { + if let Some(ptr) = self.node { + arena.dealloc(ptr.offset, ptr.size); + } + + if let Some(ptr) = self.key { + arena.dealloc(ptr.offset, ptr.size); + } + } + } + + #[inline] + pub fn dealloc_key_by_ref(&mut self, arena: &A) { + if let Some(ptr) = self.key.take() { + unsafe { + arena.dealloc(ptr.offset, ptr.size); + } + } + } + } + + #[derive(Debug)] + pub struct ValuePartPointer { + pub(crate) trailer_offset: u32, + pub(crate) value_offset: u32, + pub(crate) value_len: u32, + _m: core::marker::PhantomData, + } + + impl Clone for ValuePartPointer { + fn clone(&self) -> Self { + *self + } + } + + impl Copy for ValuePartPointer {} + + impl ValuePartPointer { + #[inline] + pub(crate) const fn new(trailer_offset: u32, value_offset: u32, value_len: u32) -> Self { + Self { + trailer_offset, + value_offset, + value_len, + _m: core::marker::PhantomData, + } + } + } + + pub trait ValuePointer { + /// The tombstone value. + const REMOVE: u32; + + fn swap(&self, offset: u32, size: u32) -> (u32, u32); + + fn load(&self) -> (u32, u32); + } + + pub trait Link { + fn new(next_offset: u32, prev_offset: u32) -> Self; + + fn store_next_offset(&self, offset: u32, ordering: Ordering); + + fn store_prev_offset(&self, offset: u32, ordering: Ordering); + } + + #[doc(hidden)] + pub trait WithTrailer: Node {} + + #[doc(hidden)] + pub trait WithVersion: Node {} + + pub trait Node: Sized + core::fmt::Debug { + type Link: Link; + type Trailer: Trailer; + type ValuePointer: ValuePointer; + type Pointer: NodePointer; + + fn full(value_offset: u32, max_height: u8) -> Self; + + fn size(max_height: u8) -> usize { + mem::size_of::() + (max_height as usize) * mem::size_of::() + } + + fn value_pointer(&self) -> &Self::ValuePointer; + + fn set_value_pointer(&mut self, offset: u32, size: u32); + + #[inline] + fn update_value(&self, arena: &A, offset: u32, value_size: u32) { + let (_, old_len) = self.value_pointer().swap(offset, value_size); + if old_len != Self::ValuePointer::REMOVE { + arena.increase_discarded(old_len); + } + } + + fn clear_value( + &self, + arena: &A, + success: Ordering, + failure: Ordering, + ) -> Result<(), (u32, u32)>; + + fn set_key_size_and_height(&mut self, key_size_and_height: u32); + + fn set_key_offset(&mut self, key_offset: u32); + + fn version(&self) -> Version; + + fn set_version(&mut self, version: Version); + + fn key_size_and_height(&self) -> u32; + + fn key_offset(&self) -> u32; + + #[inline] + fn key_size(&self) -> u32 { + decode_key_size_and_height(self.key_size_and_height()).0 + } + + #[inline] + fn height(&self) -> u8 { + decode_key_size_and_height(self.key_size_and_height()).1 + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + unsafe fn get_key<'a, 'b: 'a, A: Allocator>(&'a self, arena: &'b A) -> &'b [u8] { + arena.get_bytes(self.key_offset() as usize, self.key_size() as usize) + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + #[inline] + unsafe fn get_value<'a, 'b: 'a, A: Allocator>(&'a self, arena: &'b A) -> Option<&'b [u8]> { + let (offset, len) = self.value_pointer().load(); + + if len == ::REMOVE { + return None; + } + let align_offset = Self::align_offset(offset); + Some(arena.get_bytes( + align_offset as usize + mem::size_of::(), + len as usize, + )) + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + #[inline] + unsafe fn get_value_by_value_offset<'a, 'b: 'a, A: Allocator>( + &'a self, + arena: &'b A, + offset: u32, + len: u32, + ) -> Option<&'b [u8]> { + if len == ::REMOVE { + return None; + } + + Some(arena.get_bytes(offset as usize, len as usize)) + } + + #[inline] + fn trailer_offset_and_value_size(&self) -> ValuePartPointer { + let (offset, len) = self.value_pointer().load(); + let align_offset = Self::align_offset(offset); + ValuePartPointer::new( + align_offset, + align_offset + mem::size_of::() as u32, + len, + ) + } + + #[inline] + unsafe fn get_trailer<'a, 'b: 'a, A: Allocator>(&'a self, arena: &'b A) -> &'b Self::Trailer { + if mem::size_of::() == 0 { + return dangling_zst_ref(); + } + + let (offset, _) = self.value_pointer().load(); + &*arena.get_aligned_pointer(offset as usize) + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + #[inline] + unsafe fn get_trailer_by_offset<'a, 'b: 'a, A: Allocator>( + &'a self, + arena: &'b A, + offset: u32, + ) -> &'b Self::Trailer { + if mem::size_of::() == 0 { + return dangling_zst_ref(); + } + + &*arena.get_aligned_pointer::(offset as usize) + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + #[inline] + unsafe fn get_value_and_trailer_with_pointer<'a, 'b: 'a, A: Allocator>( + &'a self, + arena: &'b A, + ) -> (Option<&'b [u8]>, ValuePartPointer) { + let (offset, len) = self.value_pointer().load(); + + let align_offset = A::align_offset::(offset); + + if len == ::REMOVE { + return ( + None, + ValuePartPointer::new( + offset, + align_offset + mem::size_of::() as u32, + len, + ), + ); + } + + let value_offset = align_offset + mem::size_of::() as u32; + ( + Some(arena.get_bytes(value_offset as usize, len as usize)), + ValuePartPointer::new(offset, value_offset, len), + ) + } + + #[inline] + fn align_offset(current_offset: u32) -> u32 { + let alignment = mem::align_of::() as u32; + (current_offset + alignment - 1) & !(alignment - 1) + } + } + + pub trait NodePointer: Copy + core::fmt::Debug { + type Node: Node; + + const NULL: Self; + + // fn new(ptr: *mut u8, offset: u32) -> Self; + fn new(offset: u32) -> Self; + + #[inline] + fn is_null(&self) -> bool { + self.offset() == 0 + } + + fn offset(&self) -> u32; + + // fn ptr(&self) -> *mut Self::Node; + + #[inline] + unsafe fn tower(&self, arena: &A, idx: usize) -> &::Link { + let tower_ptr_offset = self.offset() as usize + + mem::size_of::() + + idx * mem::size_of::<::Link>(); + let tower_ptr = arena.get_pointer(tower_ptr_offset); + &*tower_ptr.cast() + } + + #[inline] + unsafe fn write_tower( + &self, + arena: &A, + idx: usize, + prev_offset: u32, + next_offset: u32, + ) { + let tower_ptr_offset = self.offset() as usize + + mem::size_of::() + + idx * mem::size_of::<::Link>(); + let tower_ptr: *mut ::Link = + arena.get_pointer_mut(tower_ptr_offset).cast(); + *tower_ptr = Link::new(next_offset, prev_offset); + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn next_offset(&self, arena: &A, idx: usize) -> u32; + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn prev_offset(&self, arena: &A, idx: usize) -> u32; + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn cas_prev_offset( + &self, + arena: &A, + idx: usize, + current: u32, + new: u32, + success: Ordering, + failure: Ordering, + ) -> Result; + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn cas_next_offset( + &self, + arena: &A, + idx: usize, + current: u32, + new: u32, + success: Ordering, + failure: Ordering, + ) -> Result; + + /// ## Safety + /// - the pointer must be valid + unsafe fn as_ref(&self, arena: &A) -> &Self::Node; + + /// ## Safety + /// - the pointer must be valid + unsafe fn as_mut(&self, arena: &A) -> &mut Self::Node; + } + + pub trait Header: core::fmt::Debug { + fn new(magic_version: u16) -> Self; + + fn magic_version(&self) -> u16; + + fn max_version(&self) -> u64; + + fn min_version(&self) -> u64; + + fn height(&self) -> u8; + + fn len(&self) -> u32; + + fn increase_len(&self); + + fn update_max_version(&self, version: u64); + + fn update_min_version(&self, version: u64); + + fn compare_exchange_height_weak( + &self, + current: u8, + new: u8, + success: Ordering, + failure: Ordering, + ) -> Result; + } + + pub trait Sealed: + Sized + Clone + core::fmt::Debug + core::ops::Deref + { + type Header: Header; + + type Node: Node; + + type Trailer: Trailer; + + type Allocator: ArenaAllocator; + + fn reserved_slice(&self) -> &[u8] { + ArenaAllocator::reserved_slice(core::ops::Deref::deref(self)) + } + + unsafe fn reserved_slice_mut(&self) -> &mut [u8] { + ArenaAllocator::reserved_slice_mut(core::ops::Deref::deref(self)) + } + + fn new(arena_opts: ArenaOptions, opts: Options) -> Result; + + /// Creates a new ARENA backed by an anonymous mmap with the given capacity. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + fn map_anon( + arena_opts: ArenaOptions, + mmap_options: MmapOptions, + opts: Options, + ) -> std::io::Result; + + /// Creates a new ARENA backed by a mmap with the given options. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + unsafe fn map_mut>( + path: P, + arena_opts: ArenaOptions, + open_options: OpenOptions, + mmap_options: MmapOptions, + opts: Options, + ) -> std::io::Result; + + /// Opens a read only ARENA backed by a mmap with the given capacity. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + unsafe fn map>( + path: P, + arena_opts: ArenaOptions, + open_options: OpenOptions, + mmap_options: MmapOptions, + opts: Options, + ) -> std::io::Result; + + /// Creates a new ARENA backed by a mmap with the given options. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + unsafe fn map_mut_with_path_builder( + path_builder: PB, + arena_opts: ArenaOptions, + open_options: OpenOptions, + mmap_options: MmapOptions, + opts: Options, + ) -> Result> + where + PB: FnOnce() -> Result; + + /// Opens a read only ARENA backed by a mmap with the given capacity. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + unsafe fn map_with_path_builder( + path_builder: PB, + arena_opts: ArenaOptions, + open_options: OpenOptions, + mmap_options: MmapOptions, + opts: Options, + ) -> Result> + where + PB: FnOnce() -> Result; + + fn align_offset(offset: u32) -> u32 { + rarena_allocator::align_offset::(offset) + } + + fn fetch_vacant_key<'a, 'b: 'a, E>( + &'a self, + key_size: u32, + key: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, + ) -> Result<(u32, VacantBuffer<'a>), Either> { + let (key_offset, key_size) = self + .alloc_bytes(key_size) + .map(|mut b| { + unsafe { + b.detach(); + } + (b.offset(), b.capacity()) + }) + .map_err(|e| Either::Right(e.into()))?; + + let mut vk = unsafe { + VacantBuffer::new( + key_size, + NonNull::new_unchecked(self.get_pointer_mut(key_offset)), + ) + }; + key(&mut vk) + .map_err(|e| { + unsafe { + self.dealloc(key_offset as u32, key_size as u32); + } + Either::Left(e) + }) + .map(|_| (key_offset as u32, vk)) + } + + #[inline] + unsafe fn fill_vacant_key<'a, E>( + &'a self, + size: u32, + offset: u32, + f: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, + ) -> Result<(u32, Pointer), E> { + let buf = self.get_pointer_mut(offset as usize); + let mut oval = VacantBuffer::new(size as usize, NonNull::new_unchecked(buf)); + if let Err(e) = f(&mut oval) { + self.dealloc(offset, size); + return Err(e); + } + + let len = oval.len(); + let remaining = oval.remaining(); + if remaining != 0 { + #[cfg(feature = "tracing")] + tracing::warn!("vacant value is not fully filled, remaining {remaining} bytes"); + let deallocated = self.dealloc(offset + len as u32, remaining as u32); + if deallocated { + return Ok(( + oval.len() as u32, + Pointer::new(offset, size - remaining as u32), + )); + } + } + Ok((oval.len() as u32, Pointer::new(offset, size))) + } + + #[inline] + unsafe fn fill_vacant_value<'a, E>( + &'a self, + offset: u32, + size: u32, + value_size: u32, + value_offset: u32, + f: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, + ) -> Result<(u32, Pointer), E> { + let buf = self.get_pointer_mut(value_offset as usize); + let mut oval = VacantBuffer::new(value_size as usize, NonNull::new_unchecked(buf)); + if let Err(e) = f(&mut oval) { + self.dealloc(offset, size); + return Err(e); + } + + let len = oval.len(); + let remaining = oval.remaining(); + if remaining != 0 { + #[cfg(feature = "tracing")] + tracing::warn!("vacant value is not fully filled, remaining {remaining} bytes"); + let deallocated = self.dealloc(value_offset + len as u32, remaining as u32); + + if deallocated { + return Ok(( + oval.len() as u32, + Pointer::new(offset, size - remaining as u32), + )); + } + } + + Ok((oval.len() as u32, Pointer::new(offset, size))) + } + + #[inline] + fn allocate_header(&self, magic_version: u16) -> Result, ArenaError> { + // Safety: meta does not need to be dropped, and it is recoverable. + unsafe { + let mut meta = self.alloc::()?; + meta.detach(); + + meta.write(Self::Header::new(magic_version)); + Ok(meta.as_mut_ptr()) + } + } + + #[inline] + fn allocate_pure_node( + &self, + height: u32, + ) -> Result, ArenaError> { + self.alloc_aligned_bytes::( + height * mem::size_of::<::Link>() as u32, + ) + } + + /// Allocates a `Node`, key, trailer and value + fn allocate_entry_node<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: u32, + trailer: ::Trailer, + key_builder: KeyBuilder) -> Result<(), E>>, + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result<(::Pointer, Deallocator), Either> { + let (key_size, kf) = key_builder.into_components(); + let (value_size, vf) = value_builder.into_components(); + + self + .check_node_size(height, key_size.to_u32(), value_size) + .map_err(Either::Right)?; + + unsafe { + let mut node = self + .allocate_pure_node(height) + .map_err(|e| Either::Right(e.into()))?; + let node_ptr = node.as_mut_ptr().cast::(); + let node_offset = node.offset(); + + let mut key = self + .alloc_bytes(key_size.to_u32()) + .map_err(|e| Either::Right(e.into()))?; + let key_offset = key.offset(); + let key_cap = key.capacity(); + let mut trailer_and_value = self + .alloc_aligned_bytes::<::Trailer>(value_size) + .map_err(|e| Either::Right(e.into()))?; + let trailer_offset = trailer_and_value.offset(); + let trailer_ptr = trailer_and_value + .as_mut_ptr() + .cast::<::Trailer>(); + trailer_ptr.write(trailer); + + let value_offset = + (trailer_offset + mem::size_of::<::Trailer>()) as u32; + + // Safety: the node is well aligned + let node_ref = &mut *node_ptr; + node_ref.set_value_pointer(trailer_offset as u32, value_size); + node_ref.set_key_offset(key_offset as u32); + node_ref.set_key_size_and_height(encode_key_size_and_height(key_cap as u32, height as u8)); + node_ref.set_version(version); + + key.detach(); + let (_, key_deallocate_info) = self + .fill_vacant_key(key_cap as u32, key_offset as u32, kf) + .map_err(Either::Left)?; + trailer_and_value.detach(); + let (_, value_deallocate_info) = self + .fill_vacant_value( + trailer_offset as u32, + trailer_and_value.capacity() as u32, + value_size, + value_offset, + vf, + ) + .map_err(Either::Left)?; + node.detach(); + + let deallocator = Deallocator { + node: Some(Pointer::new(node_offset as u32, node.capacity() as u32)), + key: Some(key_deallocate_info), + value: Some(value_deallocate_info), + }; + + Ok((NodePointer::new(node_offset as u32), deallocator)) + } + } + + /// Allocates a `Node` and trailer + fn allocate_node_in<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: u32, + trailer: ::Trailer, + key_offset: u32, + key_size: u32, + value_size: u32, + ) -> Result<(::Pointer, Deallocator), Either> { + self + .check_node_size(height, key_size, value_size) + .map_err(Either::Right)?; + + unsafe { + let mut node = self + .alloc_aligned_bytes::( + height * mem::size_of::<::Link>() as u32, + ) + .map_err(|e| Either::Right(e.into()))?; + let node_ptr = node.as_mut_ptr().cast::(); + let node_offset = node.offset(); + + let mut trailer_ref = self + .alloc::<::Trailer>() + .map_err(|e| Either::Right(e.into()))?; + let trailer_offset = trailer_ref.offset(); + trailer_ref.write(trailer); + + // Safety: the node is well aligned + let node_ref = &mut *node_ptr; + node_ref.set_value_pointer(trailer_offset as u32, value_size); + node_ref.set_key_offset(key_offset); + node_ref.set_key_size_and_height(encode_key_size_and_height(key_size, height as u8)); + node_ref.set_version(version); + + trailer_ref.detach(); + node.detach(); + + let deallocator = Deallocator { + node: Some(Pointer::new(node_offset as u32, node.capacity() as u32)), + key: None, + value: Some(Pointer::new( + trailer_offset as u32, + mem::size_of::<::Trailer>() as u32, + )), + }; + + Ok((NodePointer::new(node_offset as u32), deallocator)) + } + } + + /// Allocates a `Node`, key and trailer + fn allocate_key_node<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: u32, + trailer: ::Trailer, + key_size: u32, + kf: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, + value_size: u32, + ) -> Result<(::Pointer, Deallocator), Either> { + self + .check_node_size(height, key_size, value_size) + .map_err(Either::Right)?; + + unsafe { + let mut node = self + .alloc_aligned_bytes::( + height * mem::size_of::<::Link>() as u32, + ) + .map_err(|e| Either::Right(e.into()))?; + let node_ptr = node.as_mut_ptr().cast::(); + let node_offset = node.offset(); + + let mut key = self + .alloc_bytes(key_size) + .map_err(|e| Either::Right(e.into()))?; + let key_offset = key.offset(); + let key_cap = key.capacity(); + + let mut trailer_ref = self + .alloc::<::Trailer>() + .map_err(|e| Either::Right(e.into()))?; + let trailer_offset = trailer_ref.offset(); + trailer_ref.write(trailer); + + // Safety: the node is well aligned + let node_ref = &mut *node_ptr; + node_ref.set_value_pointer(trailer_offset as u32, value_size); + node_ref.set_key_offset(key_offset as u32); + node_ref.set_key_size_and_height(encode_key_size_and_height(key_cap as u32, height as u8)); + node_ref.set_version(version); + + key.detach(); + let (_, key_deallocate_info) = self + .fill_vacant_key(key_cap as u32, key_offset as u32, kf) + .map_err(Either::Left)?; + + trailer_ref.detach(); + node.detach(); + + let deallocator = Deallocator { + node: Some(Pointer::new(node_offset as u32, node.capacity() as u32)), + key: Some(key_deallocate_info), + value: Some(Pointer::new( + trailer_offset as u32, + mem::size_of::<::Trailer>() as u32, + )), + }; + + Ok((NodePointer::new(node_offset as u32), deallocator)) + } + } + + /// Allocates a `Node`, trailer and value + fn allocate_value_node<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: u32, + trailer: ::Trailer, + key_size: u32, + key_offset: u32, + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result<(::Pointer, Deallocator), Either> { + let (value_size, vf) = value_builder.into_components(); + self + .check_node_size(height, key_size, value_size) + .map_err(Either::Right)?; + + unsafe { + let mut node = self + .alloc_aligned_bytes::( + height * mem::size_of::<::Link>() as u32, + ) + .map_err(|e| Either::Right(e.into()))?; + let node_ptr = node.as_mut_ptr().cast::(); + let node_offset = node.offset(); + + let mut trailer_and_value = self + .alloc_aligned_bytes::<::Trailer>(value_size) + .map_err(|e| Either::Right(e.into()))?; + let trailer_offset = trailer_and_value.offset(); + let trailer_ptr = trailer_and_value + .as_mut_ptr() + .cast::<::Trailer>(); + trailer_ptr.write(trailer); + let value_offset = + (trailer_offset + mem::size_of::<::Trailer>()) as u32; + + // Safety: the node is well aligned + let node_ref = &mut *node_ptr; + node_ref.set_value_pointer(trailer_offset as u32, value_size); + node_ref.set_key_offset(key_offset); + node_ref.set_key_size_and_height(encode_key_size_and_height(key_size, height as u8)); + node_ref.set_version(version); + + trailer_and_value.detach(); + let (_, value_deallocate_info) = self + .fill_vacant_value( + trailer_offset as u32, + trailer_and_value.capacity() as u32, + value_size, + value_offset, + vf, + ) + .map_err(Either::Left)?; + + node.detach(); + + let deallocator = Deallocator { + node: Some(Pointer::new(node_offset as u32, node.capacity() as u32)), + key: None, + value: Some(value_deallocate_info), + }; + + Ok((NodePointer::new(node_offset as u32), deallocator)) + } + } + + fn allocate_full_node( + &self, + max_height: u8, + ) -> Result<::Pointer, ArenaError> { + // Safety: node, links and trailer do not need to be dropped, and they are recoverable. + unsafe { + let mut node = self.alloc_aligned_bytes::( + ((max_height as usize) * mem::size_of::<::Link>()) as u32, + )?; + + // Safety: node and trailer do not need to be dropped. + node.detach(); + let node_offset = node.offset(); + + let trailer_offset = if mem::size_of::<::Trailer>() != 0 { + let mut trailer = self.alloc::<::Trailer>()?; + trailer.detach(); + trailer.offset() + } else { + self.allocated() + }; + + let node_ptr = node.as_mut_ptr().cast::(); + let full_node = ::full(trailer_offset as u32, max_height); + ptr::write(node_ptr, full_node); + + Ok(NodePointer::new(node_offset as u32)) + } + } + + #[inline] + fn allocate_and_update_value<'a, E>( + &'a self, + node: &Self::Node, + trailer: ::Trailer, + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result<(), Either> { + let (value_size, f) = value_builder.into_components(); + let mut bytes = self + .alloc_aligned_bytes::<::Trailer>(value_size) + .map_err(|e| Either::Right(e.into()))?; + let trailer_ptr = bytes.as_mut_ptr().cast::<::Trailer>(); + let trailer_offset = bytes.offset(); + let value_offset = trailer_offset + mem::size_of::<::Trailer>(); + + let mut oval = unsafe { + VacantBuffer::new( + value_size as usize, + NonNull::new_unchecked(self.get_pointer_mut(value_offset)), + ) + }; + f(&mut oval).map_err(Either::Left)?; + + let remaining = oval.remaining(); + let mut discard = 0; + if remaining != 0 + && unsafe { !self.dealloc((value_offset + oval.len()) as u32, remaining as u32) } + { + discard += remaining; + } + + unsafe { + bytes.detach(); + trailer_ptr.write(trailer); + } + + if discard != 0 { + self.increase_discarded(discard as u32); + } + + let (_, old_len) = node.value_pointer().swap(trailer_offset as u32, value_size); + if old_len != <::ValuePointer as ValuePointer>::REMOVE { + self.increase_discarded(old_len); + } + + Ok(()) + } + + fn max_key_size(&self) -> u32; + + fn max_value_size(&self) -> u32; + + fn max_height(&self) -> u32; + + #[inline] + fn check_node_size( + &self, + height: u32, + key_size: u32, + mut value_size: u32, + ) -> Result<(), Error> { + if height < 1 || height > self.max_height() { + panic!("height cannot be less than one or greater than the max height"); + } + + if key_size > self.max_key_size() { + return Err(Error::KeyTooLarge(key_size as u64)); + } + + value_size = if value_size == <::ValuePointer as ValuePointer>::REMOVE { + mem::size_of::() as u32 + } else { + value_size + }; + + if value_size > self.max_value_size() { + return Err(Error::ValueTooLarge(value_size as u64)); + } + + let entry_size = + (value_size as u64 + key_size as u64) + ::size(height as u8) as u64; + if entry_size > u32::MAX as u64 { + return Err(Error::EntryTooLarge(entry_size)); + } + + Ok(()) + } + } +} + +/// Generic ARENA allocator +#[derive(Debug)] +pub struct GenericAllocator { + arena: A, + max_key_size: u32, + max_value_size: u32, + max_height: u32, + _m: PhantomData<(H, N)>, +} + +impl Clone for GenericAllocator { + fn clone(&self) -> Self { + Self { + arena: self.arena.clone(), + max_key_size: self.max_key_size, + max_value_size: self.max_value_size, + max_height: self.max_height, + _m: PhantomData, + } + } +} + +impl core::ops::Deref for GenericAllocator { + type Target = A; + + fn deref(&self) -> &A { + &self.arena + } +} + +impl Sealed + for GenericAllocator +{ + type Header = H; + + type Node = N; + + type Trailer = ::Trailer; + + type Allocator = A; + + fn new(arena_opts: ArenaOptions, opts: Options) -> Result { + A::new(arena_opts) + .map(|arena| Self { + arena, + max_key_size: opts.max_key_size().into(), + max_value_size: opts.max_value_size(), + max_height: opts.max_height().into(), + _m: PhantomData, + }) + .map_err(Into::into) + } + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + fn map_anon( + arena_opts: rarena_allocator::ArenaOptions, + mmap_options: MmapOptions, + opts: Options, + ) -> std::io::Result { + A::map_anon(arena_opts, mmap_options).map(|arena| Self { + arena, + max_key_size: opts.max_key_size().into(), + max_value_size: opts.max_value_size(), + max_height: opts.max_height().into(), + _m: PhantomData, + }) + } + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + unsafe fn map_mut>( + path: P, + arena_opts: rarena_allocator::ArenaOptions, + open_options: OpenOptions, + mmap_options: MmapOptions, + opts: Options, + ) -> std::io::Result { + A::map_mut(path, arena_opts, open_options, mmap_options).map(|arena| Self { + arena, + max_key_size: opts.max_key_size().into(), + max_value_size: opts.max_value_size(), + max_height: opts.max_height().into(), + _m: PhantomData, + }) + } + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + unsafe fn map>( + path: P, + arena_options: rarena_allocator::ArenaOptions, + open_options: OpenOptions, + mmap_options: MmapOptions, + opts: Options, + ) -> std::io::Result { + A::map(path, arena_options, open_options, mmap_options).map(|arena| Self { + arena, + max_key_size: opts.max_key_size().into(), + max_value_size: opts.max_value_size(), + max_height: opts.max_height().into(), + _m: PhantomData, + }) + } + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + unsafe fn map_mut_with_path_builder( + path_builder: PB, + arena_opts: rarena_allocator::ArenaOptions, + open_options: OpenOptions, + mmap_options: MmapOptions, + opts: Options, + ) -> Result> + where + PB: FnOnce() -> Result, + { + A::map_mut_with_path_builder(path_builder, arena_opts, open_options, mmap_options).map( + |arena| Self { + arena, + max_key_size: opts.max_key_size().into(), + max_value_size: opts.max_value_size(), + max_height: opts.max_height().into(), + _m: PhantomData, + }, + ) + } + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + unsafe fn map_with_path_builder( + path_builder: PB, + arena_options: rarena_allocator::ArenaOptions, + open_options: OpenOptions, + mmap_options: MmapOptions, + opts: Options, + ) -> Result> + where + PB: FnOnce() -> Result, + { + A::map_with_path_builder(path_builder, arena_options, open_options, mmap_options).map(|arena| { + Self { + arena, + max_key_size: opts.max_key_size().into(), + max_value_size: opts.max_value_size(), + max_height: opts.max_height().into(), + _m: PhantomData, + } + }) + } + + fn max_key_size(&self) -> u32 { + self.max_key_size + } + + fn max_value_size(&self) -> u32 { + self.max_value_size + } + + fn max_height(&self) -> u32 { + self.max_height + } +} diff --git a/src/base.rs b/src/base.rs new file mode 100644 index 0000000..406ef9f --- /dev/null +++ b/src/base.rs @@ -0,0 +1,1482 @@ +use core::{mem, ptr::NonNull}; +use std::boxed::Box; + +use either::Either; +use options::CompressionPolicy; +use rarena_allocator::Allocator as _; + +use super::{allocator::*, common::*, *}; + +#[cfg(all(feature = "memmap", not(target_family = "wasm")))] +use error::{bad_magic_version, bad_version, invalid_data}; + +mod api; + +mod entry; +pub use entry::*; + +mod iterator; +pub use iterator::*; + +type UpdateOk<'a, 'b, A> = Either< + Option>, + Result, VersionedEntryRef<'a, A>>, +>; + +/// A fast, cocnurrent map implementation based on skiplist that supports forward +/// and backward iteration. +#[derive(Debug)] +pub struct SkipList { + pub(crate) arena: A, + meta: NonNull, + head: ::Pointer, + tail: ::Pointer, + data_offset: u32, + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + on_disk: bool, + opts: Options, + /// If set to true by tests, then extra delays are added to make it easier to + /// detect unusual race conditions. + #[cfg(all(test, feature = "std"))] + yield_now: bool, + + cmp: C, +} + +unsafe impl Send for SkipList +where + A: Allocator + Send, + C: Comparator + Send, +{ +} +unsafe impl Sync for SkipList +where + A: Allocator + Sync, + C: Comparator + Sync, +{ +} + +impl Clone for SkipList +where + A: Allocator, + C: Clone, +{ + fn clone(&self) -> Self { + Self { + arena: self.arena.clone(), + meta: self.meta, + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + on_disk: self.on_disk, + head: self.head, + tail: self.tail, + data_offset: self.data_offset, + opts: self.opts, + #[cfg(all(test, feature = "std"))] + yield_now: self.yield_now, + cmp: self.cmp.clone(), + } + } +} + +impl Drop for SkipList +where + A: Allocator, +{ + #[allow(clippy::collapsible_if)] + fn drop(&mut self) { + if self.arena.refs() == 1 { + if !self.opts.unify() { + unsafe { + let _ = Box::from_raw(self.meta.as_ptr()); + } + } + + #[cfg(all(feature = "memmap", not(target_family = "wasm"), not(windows)))] + if self.arena.is_mmap() { + let _ = unsafe { self.arena.munlock(0, self.arena.page_size()) }; + } + } + } +} + +impl SkipList +where + A: Allocator, +{ + fn new_in(arena: A, cmp: C, opts: Options) -> Result { + let data_offset = Self::check_capacity(&arena, opts.max_height().into())?; + if arena.read_only() { + let (meta, head, tail) = Self::get_pointers(&arena); + + return Ok(Self::construct( + arena, + meta, + head, + tail, + data_offset, + opts, + cmp, + )); + } + + let meta = if opts.unify() { + arena.allocate_header(opts.magic_version())? + } else { + unsafe { + NonNull::new_unchecked(Box::into_raw(Box::new(::new( + opts.magic_version(), + )))) + } + }; + + let max_height: u8 = opts.max_height().into(); + let head = arena.allocate_full_node(max_height)?; + let tail = arena.allocate_full_node(max_height)?; + + // Safety: + // We will always allocate enough space for the head node and the tail node. + unsafe { + // Link all head/tail levels together. + for i in 0..(max_height as usize) { + let head_link = head.tower::(&arena, i); + let tail_link = tail.tower::(&arena, i); + head_link.store_next_offset(tail.offset(), Ordering::Relaxed); + tail_link.store_prev_offset(head.offset(), Ordering::Relaxed); + } + } + + Ok(Self::construct( + arena, + meta, + head, + tail, + data_offset, + opts, + cmp, + )) + } + + /// Checks if the arena has enough capacity to store the skiplist, + /// and returns the data offset. + #[inline] + fn check_capacity(arena: &A, max_height: u8) -> Result { + let offset = arena.data_offset(); + + let alignment = mem::align_of::(); + let meta_offset = (offset + alignment - 1) & !(alignment - 1); + let meta_end = meta_offset + mem::size_of::(); + + let alignment = mem::align_of::(); + let head_offset = (meta_end + alignment - 1) & !(alignment - 1); + let head_end = head_offset + + mem::size_of::() + + mem::size_of::<::Link>() * max_height as usize; + + let trailer_alignment = mem::align_of::(); + let trailer_size = mem::size_of::(); + let trailer_end = if trailer_size != 0 { + let trailer_offset = (head_end + trailer_alignment - 1) & !(trailer_alignment - 1); + trailer_offset + trailer_size + } else { + head_end + }; + + let tail_offset = (trailer_end + alignment - 1) & !(alignment - 1); + let tail_end = tail_offset + + mem::size_of::() + + mem::size_of::<::Link>() * max_height as usize; + let trailer_end = if trailer_size != 0 { + let trailer_offset = (tail_end + trailer_alignment - 1) & !(trailer_alignment - 1); + trailer_offset + trailer_size + } else { + tail_end + }; + if trailer_end > arena.capacity() { + return Err(Error::ArenaTooSmall); + } + + Ok(trailer_end as u32) + } + + #[inline] + fn get_pointers( + arena: &A, + ) -> ( + NonNull, + ::Pointer, + ::Pointer, + ) { + unsafe { + let offset = arena.data_offset(); + let meta = arena.get_aligned_pointer::(offset); + + let offset = arena.offset(meta as _) + mem::size_of::(); + let head_ptr = arena.get_aligned_pointer::(offset); + let head_offset = arena.offset(head_ptr as _); + let head = <::Pointer as NodePointer>::new(head_offset as u32); + + let (trailer_offset, _) = head.as_ref(arena).value_pointer().load(); + let offset = trailer_offset as usize + mem::size_of::(); + let tail_ptr = arena.get_aligned_pointer::(offset); + let tail_offset = arena.offset(tail_ptr as _); + let tail = <::Pointer as NodePointer>::new(tail_offset as u32); + (NonNull::new_unchecked(meta as _), head, tail) + } + } + + #[inline] + fn construct( + arena: A, + meta: NonNull, + head: ::Pointer, + tail: ::Pointer, + data_offset: u32, + opts: Options, + cmp: C, + ) -> Self { + Self { + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + on_disk: arena.is_ondisk(), + arena, + meta, + head, + tail, + data_offset, + opts, + #[cfg(all(test, feature = "std"))] + yield_now: false, + cmp, + } + } + + #[inline] + const fn meta(&self) -> &A::Header { + // Safety: the pointer is well aligned and initialized. + unsafe { self.meta.as_ref() } + } +} + +impl SkipList +where + A: Allocator, +{ + fn new_node<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: u32, + key: &Key<'a, 'b, A>, + value_builder: Option) -> Result<(), E>>>, + trailer: A::Trailer, + ) -> Result<(::Pointer, Deallocator), Either> { + let (nd, deallocator) = match key { + Key::Occupied(key) => { + let kb = KeyBuilder::new(KeySize::from_u32_unchecked(key.len() as u32), |buf| { + buf + .put_slice(key) + .expect("buffer must be large enough for key"); + Ok(()) + }); + let vb = value_builder.unwrap(); + self + .arena + .allocate_entry_node::(version, height, trailer, kb, vb)? + } + Key::Vacant { buf: key, offset } => self.arena.allocate_value_node::( + version, + height, + trailer, + key.len() as u32, + *offset, + value_builder.unwrap(), + )?, + Key::Pointer { offset, len, .. } => self.arena.allocate_value_node::( + version, + height, + trailer, + *len, + *offset, + value_builder.unwrap(), + )?, + Key::Remove(key) => self.arena.allocate_key_node::( + version, + height, + trailer, + key.len() as u32, + |buf| { + buf + .put_slice(key) + .expect("buffer must be large enough for key"); + Ok(()) + }, + ::ValuePointer::REMOVE, + )?, + Key::RemoveVacant { buf: key, offset } => self.arena.allocate_node_in::( + version, + height, + trailer, + *offset, + key.len() as u32, + ::ValuePointer::REMOVE, + )?, + Key::RemovePointer { offset, len, .. } => self.arena.allocate_node_in::( + version, + height, + trailer, + *offset, + *len, + ::ValuePointer::REMOVE, + )?, + }; + + // Try to increase self.height via CAS. + let meta = self.meta(); + let mut list_height = meta.height(); + while height as u8 > list_height { + match meta.compare_exchange_height_weak( + list_height, + height as u8, + Ordering::SeqCst, + Ordering::Acquire, + ) { + // Successfully increased skiplist.height. + Ok(_) => break, + Err(h) => list_height = h, + } + } + Ok((nd, deallocator)) + } +} + +impl SkipList +where + A: Allocator, + C: Comparator, +{ + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + #[inline] + unsafe fn get_prev( + &self, + mut nd: ::Pointer, + height: usize, + ignore_invalid_trailer: bool, + ) -> ::Pointer { + loop { + if nd.is_null() { + return ::Pointer::NULL; + } + + if nd.offset() == self.head.offset() { + return self.head; + } + + let offset = nd.prev_offset(&self.arena, height); + let prev = ::Pointer::new(offset); + let prev_node = prev.as_ref(&self.arena); + + if ignore_invalid_trailer && !prev_node.get_trailer(&self.arena).is_valid() { + nd = prev; + continue; + } + + return prev; + } + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + #[inline] + unsafe fn get_next( + &self, + mut nptr: ::Pointer, + height: usize, + ignore_invalid_trailer: bool, + ) -> ::Pointer { + loop { + if nptr.is_null() { + return ::Pointer::NULL; + } + + if nptr.offset() == self.tail.offset() { + return self.tail; + } + + let offset = nptr.next_offset(&self.arena, height); + let next = ::Pointer::new(offset); + let next_node = next.as_ref(&self.arena); + + if ignore_invalid_trailer && !next_node.get_trailer(&self.arena).is_valid() { + nptr = next; + continue; + } + + return next; + } + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + #[inline] + unsafe fn get_next_allow_invalid( + &self, + nptr: ::Pointer, + height: usize, + ) -> ::Pointer { + if nptr.is_null() { + return ::Pointer::NULL; + } + + let offset = nptr.next_offset(&self.arena, height); + ::Pointer::new(offset) + } + + /// Returns the first entry in the map. + fn first_in( + &self, + version: Version, + ignore_invalid_trailer: bool, + ) -> Option<::Pointer> { + // Safety: head node was definitely allocated by self.arena + let nd = unsafe { self.get_next(self.head, 0, ignore_invalid_trailer) }; + + if nd.is_null() || nd.offset() == self.tail.offset() { + return None; + } + + unsafe { + let node = nd.as_ref(&self.arena); + let curr_key = node.get_key(&self.arena); + self.ge(version, curr_key, ignore_invalid_trailer) + } + } + + /// Returns the last entry in the map. + fn last_in( + &self, + version: Version, + ignore_invalid_trailer: bool, + ) -> Option<::Pointer> { + // Safety: tail node was definitely allocated by self.arena + let nd = unsafe { self.get_prev(self.tail, 0, ignore_invalid_trailer) }; + + if nd.is_null() || nd.offset() == self.head.offset() { + return None; + } + + unsafe { + let node = nd.as_ref(&self.arena); + let curr_key = node.get_key(&self.arena); + self.le(version, curr_key, ignore_invalid_trailer) + } + } + + /// Returns the entry greater or equal to the given key, if it exists. + /// + /// e.g. + /// + /// - If k1 < k2 < k3, key is equal to k1, then the entry contains k2 will be returned. + /// - If k1 < k2 < k3, and k1 < key < k2, then the entry contains k2 will be returned. + pub(crate) fn gt<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ignore_invalid_trailer: bool, + ) -> Option<::Pointer> { + unsafe { + let (n, _) = self.find_near(Version::MIN, key, false, false, ignore_invalid_trailer); // find the key with the max version. + + let n = n?; + + if n.is_null() || n.offset() == self.tail.offset() { + return None; + } + + self.find_next_max_version(n, version, ignore_invalid_trailer) + } + } + + /// Returns the entry less than the given key, if it exists. + /// + /// e.g. + /// + /// - If k1 < k2 < k3, and key is equal to k3, then the entry contains k2 will be returned. + /// - If k1 < k2 < k3, and k2 < key < k3, then the entry contains k2 will be returned. + pub(crate) fn lt<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ignore_invalid_trailer: bool, + ) -> Option<::Pointer> { + unsafe { + let (n, _) = self.find_near(Version::MAX, key, true, false, ignore_invalid_trailer); // find less or equal. + + let n = n?; + if n.is_null() || n.offset() == self.head.offset() { + return None; + } + + self.find_prev_max_version(n, version, ignore_invalid_trailer) + } + } + + /// Returns the entry greater than or equal to the given key, if it exists. + /// + /// e.g. + /// + /// - If k1 < k2 < k3, key is equal to k1, then the entry contains k1 will be returned. + /// - If k1 < k2 < k3, and k1 < key < k2, then the entry contains k2 will be returned. + pub(crate) fn ge<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ignore_invalid_trailer: bool, + ) -> Option<::Pointer> { + unsafe { + let (n, _) = self.find_near(Version::MAX, key, false, true, ignore_invalid_trailer); // find the key with the max version. + + let n = n?; + + if n.is_null() || n.offset() == self.tail.offset() { + return None; + } + + self.find_next_max_version(n, version, ignore_invalid_trailer) + } + } + + /// Returns the entry less than or equal to the given key, if it exists. + /// + /// e.g. + /// + /// - If k1 < k2 < k3, and key is equal to k3, then the entry contains k3 will be returned. + /// - If k1 < k2 < k3, and k2 < key < k3, then the entry contains k2 will be returned. + pub(crate) fn le<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ignore_invalid_trailer: bool, + ) -> Option<::Pointer> { + unsafe { + let (n, _) = self.find_near(Version::MIN, key, true, true, ignore_invalid_trailer); // find less or equal. + + let n = n?; + if n.is_null() || n.offset() == self.head.offset() { + return None; + } + + self.find_prev_max_version(n, version, ignore_invalid_trailer) + } + } + + unsafe fn find_prev_max_version( + &self, + mut curr: ::Pointer, + version: Version, + ignore_invalid_trailer: bool, + ) -> Option<::Pointer> { + let mut prev = self.get_prev(curr, 0, ignore_invalid_trailer); + + loop { + let curr_node = curr.as_ref(&self.arena); + let curr_key = curr_node.get_key(&self.arena); + // if the current version is greater than the given version, we should return. + let version_cmp = curr_node.version().cmp(&version); + if version_cmp == cmp::Ordering::Greater { + return None; + } + + if prev.is_null() || prev.offset() == self.head.offset() { + if let cmp::Ordering::Less | cmp::Ordering::Equal = version_cmp { + return Some(curr); + } + + return None; + } + + let prev_node = prev.as_ref(&self.arena); + let prev_key = prev_node.get_key(&self.arena); + if self.cmp.compare(prev_key, curr_key) == cmp::Ordering::Less { + return Some(curr); + } + + let version_cmp = prev_node.version().cmp(&version); + + if version_cmp == cmp::Ordering::Equal { + return Some(prev); + } + + if version_cmp == cmp::Ordering::Greater { + return Some(curr); + } + + curr = prev; + prev = self.get_prev(curr, 0, ignore_invalid_trailer); + } + } + + unsafe fn find_next_max_version( + &self, + mut curr: ::Pointer, + version: Version, + ignore_invalid_trailer: bool, + ) -> Option<::Pointer> { + let mut next = self.get_next(curr, 0, ignore_invalid_trailer); + + loop { + let curr_node = curr.as_ref(&self.arena); + let curr_key = curr_node.get_key(&self.arena); + // if the current version is less or equal to the given version, we should return. + let version_cmp = curr_node.version().cmp(&version); + if let cmp::Ordering::Less | cmp::Ordering::Equal = version_cmp { + return Some(curr); + } + + if next.is_null() || next.offset() == self.head.offset() { + if let cmp::Ordering::Less | cmp::Ordering::Equal = version_cmp { + return Some(curr); + } + + return None; + } + + let next_node = next.as_ref(&self.arena); + let next_key = next_node.get_key(&self.arena); + let version_cmp = next_node.version().cmp(&version); + if self.cmp.compare(next_key, curr_key) == cmp::Ordering::Greater { + if let cmp::Ordering::Less | cmp::Ordering::Equal = version_cmp { + return Some(curr); + } + + return None; + } + + if let cmp::Ordering::Less | cmp::Ordering::Equal = version_cmp { + if next.offset() == self.tail.offset() { + return None; + } + + return Some(next); + } + + curr = next; + next = self.get_next(curr, 0, ignore_invalid_trailer); + } + } + + /// finds the node near to key. + /// If less=true, it finds rightmost node such that node.key < key (if allow_equal=false) or + /// node.key <= key (if allow_equal=true). + /// If less=false, it finds leftmost node such that node.key > key (if allow_equal=false) or + /// node.key >= key (if allow_equal=true). + /// Returns the node found. The bool returned is true if the node has key equal to given key. + unsafe fn find_near( + &self, + version: Version, + key: &[u8], + less: bool, + allow_equal: bool, + ignore_invalid_trailer: bool, + ) -> (Option<::Pointer>, bool) { + let mut x = self.head; + let mut level = self.meta().height() as usize - 1; + + loop { + // Assume x.key < key. + let next = self.get_next(x, level, ignore_invalid_trailer); + let is_next_null = next.is_null(); + + if is_next_null || next.offset() == self.tail.offset() { + // x.key < key < END OF LIST + if level > 0 { + // Can descend further to iterate closer to the end. + level -= 1; + continue; + } + + // level == 0. Can't descend further. Let's return something that makes sense. + if !less { + return (None, false); + } + + // Try to return x. Make sure it is not a head node. + if x.offset() == self.head.offset() { + return (None, false); + } + + return (Some(x), false); + } + + let next_node = next.as_ref(&self.arena); + let next_key = next_node.get_key(&self.arena); + let cmp = self + .cmp + .compare(key, next_key) + .then_with(|| next_node.version().cmp(&version)); + + match cmp { + cmp::Ordering::Greater => { + // x.key < next.key < key. We can continue to move right. + x = next; + continue; + } + cmp::Ordering::Equal => { + // x.key < key == next.key. + if allow_equal { + return (Some(next), true); + } + + if !less { + // We want >, so go to base level to grab the next bigger node. + return (Some(self.get_next(next, 0, ignore_invalid_trailer)), false); + } + + // We want <. If not base level, we should go closer in the next level. + if level > 0 { + level -= 1; + continue; + } + + // On base level, Return x. + return (Some(x), false); + } + // In other words, x.key < key < next. + cmp::Ordering::Less => { + if level > 0 { + level -= 1; + continue; + } + + // On base level. Need to return something. + if !less { + return (Some(next), false); + } + + // Try to return x. Make sure it is not a head node. + if x.offset() == self.head.offset() { + return (None, false); + } + + return (Some(x), false); + } + } + } + } + + /// Find the place to insert the key. + /// + /// ## Safety: + /// - All of splices in the inserter must be contains node ptrs are allocated by the current skip map. + unsafe fn find_splice<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ins: &mut Inserter<::Pointer>, + returned_when_found: bool, + ) -> (bool, Option, Option<::Pointer>) { + let list_height = self.meta().height() as u32; + let mut level = 0; + + let mut prev = self.head; + if ins.height < list_height { + // Our cached height is less than the list height, which means there were + // inserts that increased the height of the list. Recompute the splice from + // scratch. + ins.height = list_height; + level = ins.height as usize; + } else { + // Our cached height is equal to the list height. + while level < list_height as usize { + let spl = &ins.spl[level]; + if self.get_next_allow_invalid(spl.prev, level).offset() != spl.next.offset() { + level += 1; + // One or more nodes have been inserted between the splice at this + // level. + continue; + } + + if spl.prev.offset() != self.head.offset() + && !self.key_is_after_node(spl.prev, version, key) + { + // Key lies before splice. + level = list_height as usize; + break; + } + + if spl.next.offset() != self.tail.offset() + && !self.key_is_after_node(spl.next, version, key) + { + // Key lies after splice. + level = list_height as usize; + break; + } + + // The splice brackets the key! + prev = spl.prev; + break; + } + } + + let mut found = false; + let mut found_key = None; + for lvl in (0..level).rev() { + let mut fr = self.find_splice_for_level(version, key, lvl, prev); + if fr.splice.next.is_null() { + fr.splice.next = self.tail; + } + + found = fr.found; + if let Some(key) = fr.found_key { + found_key.get_or_insert(key); + } + if found && returned_when_found { + return (found, found_key, fr.curr); + } + ins.spl[lvl] = fr.splice; + } + + (found, found_key, None) + } + + /// Find the splice for the given level. + /// + /// # Safety + /// - `level` is less than `MAX_HEIGHT`. + /// - `start` must be allocated by self's arena. + unsafe fn find_splice_for_level( + &self, + version: Version, + key: &[u8], + level: usize, + start: ::Pointer, + ) -> FindResult<::Pointer> { + let mut prev = start; + + loop { + // Assume prev.key < key. + let next = self.get_next_allow_invalid(prev, level); + if next.offset() == self.tail.offset() { + // Tail node, so done. + return FindResult { + splice: Splice { prev, next }, + found: false, + found_key: None, + curr: None, + }; + } + + // offset is not zero, so we can safely dereference the next node ptr. + let next_node = next.as_ref(&self.arena); + let next_key = next_node.get_key(&self.arena); + + let cmp = self.cmp.compare(key, next_key); + + let mut found_key = None; + + match cmp { + cmp::Ordering::Equal => { + found_key = Some(Pointer { + offset: next_node.key_offset(), + size: next_node.key_size(), + height: Some(next_node.height()), + }); + } + cmp::Ordering::Greater | cmp::Ordering::Less if found_key.is_none() => { + found_key = self.try_get_pointer(next_node, next_key, key); + } + _ => {} + } + + match cmp.then_with(|| next_node.version().cmp(&version)) { + // We are done for this level, since prev.key < key < next.key. + cmp::Ordering::Less => { + return FindResult { + splice: Splice { prev, next }, + found: false, + found_key, + curr: None, + }; + } + // Keep moving right on this level. + cmp::Ordering::Greater => prev = next, + cmp::Ordering::Equal => { + return FindResult { + splice: Splice { prev, next }, + found: true, + found_key, + curr: Some(next), + }; + } + } + } + } + + fn try_get_pointer(&self, next_node: &A::Node, next_key: &[u8], key: &[u8]) -> Option { + match self.opts.compression_policy() { + CompressionPolicy::Fast => { + if next_key.starts_with(key) { + return Some(Pointer { + offset: next_node.key_offset(), + size: key.len() as u32, + height: Some(next_node.height()), + }); + } + } + CompressionPolicy::High => { + if let Some(idx) = memchr::memmem::find(next_key, key) { + return Some(Pointer { + offset: next_node.key_offset() + idx as u32, + size: key.len() as u32, + height: Some(next_node.height()), + }); + } + } + } + None + } + + /// ## Safety + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the node is not null. + unsafe fn key_is_after_node( + &self, + nd: ::Pointer, + version: Version, + key: &[u8], + ) -> bool { + let nd = nd.as_ref(&self.arena); + let nd_key = self + .arena + .get_bytes(nd.key_offset() as usize, nd.key_size() as usize); + + match self.cmp.compare(nd_key, key) { + cmp::Ordering::Less => true, + cmp::Ordering::Greater => false, + cmp::Ordering::Equal => { + matches!(version.cmp(&nd.version()), cmp::Ordering::Less) + } + } + } + + #[inline] + fn check_height_and_ro(&self, height: Height) -> Result<(), Error> { + if self.arena.read_only() { + return Err(Error::read_only()); + } + + let max_height = self.opts.max_height(); + + if height > max_height { + return Err(Error::invalid_height(height, max_height)); + } + Ok(()) + } + + #[allow(clippy::too_many_arguments)] + fn update<'a, 'b: 'a, E>( + &'a self, + version: Version, + trailer: A::Trailer, + height: u32, + key: Key<'a, 'b, A>, + value_builder: Option) -> Result<(), E>>>, + success: Ordering, + failure: Ordering, + mut ins: Inserter<'a, ::Pointer>, + upsert: bool, + ) -> Result, Either> { + let is_remove = key.is_remove(); + + // Safety: a fresh new Inserter, so safe here + let found_key = unsafe { + let (found, found_key, ptr) = self.find_splice(version, key.as_ref(), &mut ins, true); + if found_key.is_some() { + key.on_fail(&self.arena); + } + + if found { + let node_ptr = ptr.expect("the NodePtr cannot be `None` when we found"); + let k = found_key.expect("the key cannot be `None` when we found"); + let old = VersionedEntryRef::from_node(node_ptr, &self.arena); + + if upsert { + return self.upsert( + old, + node_ptr, + &if is_remove { + Key::remove_pointer(&self.arena, k) + } else { + Key::pointer(&self.arena, k) + }, + trailer, + value_builder, + success, + failure, + ); + } + + return Ok(Either::Left(if old.is_removed() { + None + } else { + Some(old) + })); + } + + found_key + }; + + #[cfg(all(test, feature = "std"))] + if self.yield_now { + // Add delay to make it easier to test race between this thread + // and another thread that sees the intermediate state between + // finding the splice and using it. + std::thread::yield_now(); + } + + let k = match found_key { + None => key, + Some(k) => { + if is_remove { + Key::remove_pointer(&self.arena, k) + } else { + Key::pointer(&self.arena, k) + } + } + }; + + let (nd, mut deallocator) = self + .new_node(version, height, &k, value_builder, trailer) + .inspect_err(|_| { + k.on_fail(&self.arena); + })?; + + let is_removed = unsafe { nd.as_ref(&self.arena).get_value(&self.arena).is_none() }; + + // We always insert from the base level and up. After you add a node in base + // level, we cannot create a node in the level above because it would have + // discovered the node in the base level. + let mut invalid_data_splice = false; + + for i in 0..(height as usize) { + let mut prev = ins.spl[i].prev; + let mut next = ins.spl[i].next; + + if prev.is_null() { + // New node increased the height of the skiplist, so assume that the + // new level has not yet been populated. + if !next.is_null() { + panic!("next is expected to be nil, since prev is nil"); + } + + prev = self.head; + next = self.tail; + } + + // +----------------+ +------------+ +----------------+ + // | prev | | nd | | next | + // | prevNextOffset |---->| | | | + // | |<----| prevOffset | | | + // | | | nextOffset |---->| | + // | | | |<----| nextPrevOffset | + // +----------------+ +------------+ +----------------+ + // + // 1. Initialize prevOffset and nextOffset to point to prev and next. + // 2. CAS prevNextOffset to repoint from next to nd. + // 3. CAS nextPrevOffset to repoint from prev to nd. + unsafe { + loop { + let prev_offset = prev.offset(); + let next_offset = next.offset(); + nd.write_tower(&self.arena, i, prev_offset, next_offset); + + // Check whether next has an updated link to prev. If it does not, + // that can mean one of two things: + // 1. The thread that added the next node hasn't yet had a chance + // to add the prev link (but will shortly). + // 2. Another thread has added a new node between prev and next. + // + // Safety: we already check next is not null + let next_prev_offset = next.prev_offset(&self.arena, i); + if next_prev_offset != prev_offset { + // Determine whether #1 or #2 is true by checking whether prev + // is still pointing to next. As long as the atomic operations + // have at least acquire/release semantics (no need for + // sequential consistency), this works, as it is equivalent to + // the "publication safety" pattern. + let prev_next_offset = prev.next_offset(&self.arena, i); + if prev_next_offset == next_offset { + // Ok, case #1 is true, so help the other thread along by + // updating the next node's prev link. + + let _ = next.cas_prev_offset( + &self.arena, + i, + next_prev_offset, + prev_offset, + Ordering::SeqCst, + Ordering::Acquire, + ); + } + } + + match prev.cas_next_offset( + &self.arena, + i, + next.offset(), + nd.offset(), + Ordering::SeqCst, + Ordering::Acquire, + ) { + Ok(_) => { + // Managed to insert nd between prev and next, so update the next + // node's prev link and go to the next level. + #[cfg(all(test, feature = "std"))] + if self.yield_now { + // Add delay to make it easier to test race between this thread + // and another thread that sees the intermediate state between + // setting next and setting prev. + std::thread::yield_now(); + } + + let _ = next.cas_prev_offset( + &self.arena, + i, + prev_offset, + nd.offset(), + Ordering::SeqCst, + Ordering::Acquire, + ); + + break; + } + + Err(_) => { + let unlinked_node = nd.as_ref(&self.arena); + + // CAS failed. We need to recompute prev and next. It is unlikely to + // be helpful to try to use a different level as we redo the search, + // because it is unlikely that lots of nodes are inserted between prev + // and next. + let fr = + self.find_splice_for_level(version, unlinked_node.get_key(&self.arena), i, prev); + if fr.found { + if i != 0 { + panic!("how can another thread have inserted a node at a non-base level?"); + } + + let node_ptr = fr + .curr + .expect("the current should not be `None` when we found"); + let old = VersionedEntryRef::from_node(node_ptr, &self.arena); + + if upsert { + let curr = nd.as_ref(&self.arena); + let (new_value_offset, new_value_size) = curr.value_pointer().load(); + deallocator.dealloc_node_and_key(&self.arena); + + return self + .upsert_value( + old, + node_ptr, + &if is_removed { + Key::remove_pointer(&self.arena, fr.found_key.unwrap()) + } else { + Key::pointer(&self.arena, fr.found_key.unwrap()) + }, + new_value_offset, + new_value_size, + success, + failure, + ) + .map_err(Either::Right); + } + + deallocator.dealloc(&self.arena); + return Ok(Either::Left(if old.is_removed() { + None + } else { + Some(old) + })); + } + + if let Some(p) = fr.found_key { + // if key is already in the underlying allocator, we should deallocate the key + // in deallocator, and let the underlying allocator reclaim it, so that we do not store the same key twice. + if deallocator.key.is_some() { + let node = nd.as_mut(&self.arena); + node.set_key_offset(p.offset); + node + .set_key_size_and_height(encode_key_size_and_height(p.size, p.height.unwrap())); + deallocator.dealloc_key_by_ref(&self.arena) + } + } + + invalid_data_splice = true; + prev = fr.splice.prev; + next = fr.splice.next; + } + } + } + } + } + + // If we had to recompute the splice for a level, invalidate the entire + // cached splice. + if invalid_data_splice { + ins.height = 0; + } else { + // The splice was valid. We inserted a node between spl[i].prev and + // spl[i].next. Optimistically update spl[i].prev for use in a subsequent + // call to add. + for i in 0..(height as usize) { + ins.spl[i].prev = nd; + } + } + let meta = self.meta(); + meta.increase_len(); + meta.update_max_version(version); + meta.update_min_version(version); + + Ok(Either::Left(None)) + } + + #[allow(clippy::too_many_arguments)] + unsafe fn upsert_value<'a, 'b: 'a>( + &'a self, + old: VersionedEntryRef<'a, A>, + old_node_ptr: ::Pointer, + key: &Key<'a, 'b, A>, + value_offset: u32, + value_size: u32, + success: Ordering, + failure: Ordering, + ) -> Result, Error> { + match key { + Key::Occupied(_) | Key::Vacant { .. } | Key::Pointer { .. } => { + let old_node = old_node_ptr.as_ref(&self.arena); + old_node.update_value(&self.arena, value_offset, value_size); + + Ok(Either::Left(if old.is_removed() { + None + } else { + Some(old) + })) + } + Key::Remove(_) | Key::RemoveVacant { .. } | Key::RemovePointer { .. } => { + let node = old_node_ptr.as_ref(&self.arena); + match node.clear_value(&self.arena, success, failure) { + Ok(_) => Ok(Either::Left(None)), + Err((offset, len)) => Ok(Either::Right(Err( + VersionedEntryRef::from_node_with_pointer( + old_node_ptr, + &self.arena, + ValuePartPointer::new( + offset, + A::align_offset::(offset) + mem::size_of::() as u32, + len, + ), + ), + ))), + } + } + } + } + + #[allow(clippy::too_many_arguments)] + unsafe fn upsert<'a, 'b: 'a, E>( + &'a self, + old: VersionedEntryRef<'a, A>, + old_node_ptr: ::Pointer, + key: &Key<'a, 'b, A>, + trailer: A::Trailer, + value_builder: Option) -> Result<(), E>>>, + success: Ordering, + failure: Ordering, + ) -> Result, Either> { + match key { + Key::Occupied(_) | Key::Vacant { .. } | Key::Pointer { .. } => self + .arena + .allocate_and_update_value( + old_node_ptr.as_ref(&self.arena), + trailer, + value_builder.unwrap(), + ) + .map(|_| Either::Left(if old.is_removed() { None } else { Some(old) })), + Key::Remove(_) | Key::RemoveVacant { .. } | Key::RemovePointer { .. } => { + let node = old_node_ptr.as_ref(&self.arena); + match node.clear_value(&self.arena, success, failure) { + Ok(_) => Ok(Either::Left(None)), + Err((offset, len)) => Ok(Either::Right(Err( + VersionedEntryRef::from_node_with_pointer( + old_node_ptr, + &self.arena, + ValuePartPointer::new( + offset, + A::align_offset::(offset) + mem::size_of::() as u32, + len, + ), + ), + ))), + } + } + } + } +} + +/// A helper struct for caching splice information +pub struct Inserter<'a, P> { + spl: [Splice

; super::MAX_HEIGHT], + height: u32, + _m: core::marker::PhantomData<&'a ()>, +} + +impl<'a, P: NodePointer> Default for Inserter<'a, P> { + #[inline] + fn default() -> Self { + Self { + spl: [ + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + ], + height: 0, + _m: core::marker::PhantomData, + } + } +} + +#[derive(Debug, Clone, Copy)] +struct Splice

{ + prev: P, + next: P, +} + +impl Default for Splice

{ + #[inline] + fn default() -> Self { + Self { + prev: P::NULL, + next: P::NULL, + } + } +} + +pub(crate) enum Key<'a, 'b: 'a, A> { + Occupied(&'b [u8]), + Vacant { + buf: VacantBuffer<'a>, + offset: u32, + }, + Pointer { + arena: &'a A, + offset: u32, + len: u32, + }, + Remove(&'b [u8]), + #[allow(dead_code)] + RemoveVacant { + buf: VacantBuffer<'a>, + offset: u32, + }, + RemovePointer { + arena: &'a A, + offset: u32, + len: u32, + }, +} + +impl<'a, 'b: 'a, A: Allocator> Key<'a, 'b, A> { + #[inline] + pub(crate) fn on_fail(&self, arena: &A) { + match self { + Self::Occupied(_) | Self::Remove(_) | Self::Pointer { .. } | Self::RemovePointer { .. } => {} + Self::Vacant { buf, offset } | Self::RemoveVacant { buf, offset } => unsafe { + arena.dealloc(*offset, buf.capacity() as u32); + }, + } + } +} + +impl<'a, 'b: 'a, A> Key<'a, 'b, A> { + /// Returns `true` if the key is a remove operation. + #[inline] + pub(crate) fn is_remove(&self) -> bool { + matches!( + self, + Self::Remove(_) | Self::RemoveVacant { .. } | Self::RemovePointer { .. } + ) + } +} + +impl<'a, 'b: 'a, A: Allocator> AsRef<[u8]> for Key<'a, 'b, A> { + #[inline] + fn as_ref(&self) -> &[u8] { + match self { + Self::Occupied(key) | Self::Remove(key) => key, + Self::Vacant { buf, .. } | Self::RemoveVacant { buf, .. } => buf.as_ref(), + Self::Pointer { arena, offset, len } | Self::RemovePointer { arena, offset, len } => unsafe { + arena.get_bytes(*offset as usize, *len as usize) + }, + } + } +} + +impl<'a, 'b: 'a, A> Key<'a, 'b, A> { + #[inline] + const fn pointer(arena: &'a A, pointer: Pointer) -> Self { + Self::Pointer { + arena, + offset: pointer.offset, + len: pointer.size, + } + } + + #[inline] + const fn remove_pointer(arena: &'a A, pointer: Pointer) -> Self { + Self::RemovePointer { + arena, + offset: pointer.offset, + len: pointer.size, + } + } +} + +struct FindResult

{ + // both key and version are equal. + found: bool, + // only key is equal. + found_key: Option, + splice: Splice

, + curr: Option

, +} diff --git a/src/base/api.rs b/src/base/api.rs new file mode 100644 index 0000000..060d290 --- /dev/null +++ b/src/base/api.rs @@ -0,0 +1,683 @@ +use core::borrow::Borrow; + +use rarena_allocator::ArenaOptions; + +use super::*; + +mod update; + +type RemoveValueBuilder = + ValueBuilder Result<(), E>>>; + +impl SkipList { + /// Create a new skipmap with default options. + /// + /// **Note:** The capacity stands for how many memory allocated, + /// it does not mean the skiplist can store `cap` entries. + /// + /// + /// + /// **What the difference between this method and [`SkipList::mmap_anon`]?** + /// + /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// 2. Where as [`SkipList::mmap_anon`] will use mmap anonymous to require memory from the OS. + /// If you require very large contiguous memory regions, `mmap` might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// [`SkipList::mmap_anon`]: #method.mmap_anon + #[allow(dead_code)] + pub fn new(opts: Options) -> Result { + Self::with_comparator(opts, Ascend) + } + + /// Create a new memory map file backed with default options. + /// + /// **Note:** The capacity stands for how many memory mmaped, + /// it does not mean the skipmap can store `cap` entries. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[allow(dead_code)] + #[inline] + pub unsafe fn map_mut>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_mut_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Open an exist file and mmap it to create skipmap. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[allow(dead_code)] + pub unsafe fn map>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Create a new memory map backed skipmap with default options. + /// + /// **What the difference between this method and [`SkipList::new`]?** + /// + /// 1. This method will use mmap anonymous to require memory from the OS directly. + /// If you require very large contiguous memory regions, this method might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// 2. Where as [`SkipList::new`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// [`SkipList::new`]: #method.new + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[allow(dead_code)] + #[inline] + pub fn map_anon(opts: Options, mmap_options: MmapOptions) -> std::io::Result { + Self::map_anon_with_comparator(opts, mmap_options, Ascend) + } +} + +impl SkipList { + /// Sets remove on drop, only works on mmap with a file backend. + /// + /// Default is `false`. + /// + /// > **WARNING:** Once set to `true`, the backed file will be removed when the allocator is dropped, even though the file is opened in + /// > read-only mode. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn remove_on_drop(&self, val: bool) { + self.arena.remove_on_drop(val); + } + + /// Returns the offset of the data section in the `SkipList`. + /// + /// By default, `SkipList` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the offset of the data section in the ARENA. + #[inline] + pub const fn data_offset(&self) -> usize { + self.data_offset as usize + } + + /// Returns the version number of the [`SkipList`]. + #[inline] + pub fn version(&self) -> u16 { + self.arena.magic_version() + } + + /// Returns the magic version number of the [`SkipList`]. + /// + /// This value can be used to check the compatibility for application using [`SkipList`]. + #[inline] + pub fn magic_version(&self) -> u16 { + self.meta().magic_version() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + pub fn height(&self) -> u8 { + self.meta().height() + } + + /// Returns the number of remaining bytes can be allocated by the arena. + #[inline] + pub fn remaining(&self) -> usize { + self.arena.remaining() + } + + /// Returns how many bytes are discarded by the ARENA. + #[inline] + pub fn discarded(&self) -> u32 { + self.arena.discarded() + } + + /// Returns the number of bytes that have allocated from the arena. + #[inline] + pub fn allocated(&self) -> usize { + self.arena.allocated() + } + + /// Returns the capacity of the arena. + #[inline] + pub fn capacity(&self) -> usize { + self.arena.capacity() + } + + /// Returns the number of entries in the skipmap. + #[inline] + pub fn len(&self) -> usize { + self.meta().len() as usize + } + + /// Returns true if the skipmap is empty. + #[inline] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Gets the number of pointers to this `SkipList` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). + #[inline] + pub fn refs(&self) -> usize { + self.arena.refs() + } + + /// Returns the maximum version of all entries in the map. + #[inline] + pub fn max_version(&self) -> u64 { + self.meta().max_version() + } + + /// Returns the minimum version of all entries in the map. + #[inline] + pub fn min_version(&self) -> u64 { + self.meta().min_version() + } + + /// Returns the comparator used to compare keys. + #[inline] + pub const fn comparator(&self) -> &C { + &self.cmp + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + #[inline] + pub fn random_height(&self) -> Height { + random_height(self.opts.max_height()) + } + + /// Returns the estimated size of a node with the given height and key/value sizes. + /// + /// **Note**: The returned size is only an estimate and may not be accurate, which means that the actual size is less than or equal to the returned size. + #[inline] + pub fn estimated_node_size(height: Height, key_size: usize, value_size: usize) -> usize { + let height: usize = height.into(); + 7 // max padding + + mem::size_of::() + + mem::size_of::<::Link>() * height + + key_size + + mem::align_of::() - 1 // max trailer padding + + mem::size_of::() + + value_size + } + + /// Like [`SkipList::new`], but with a custom [`Comparator`]. + #[inline] + pub fn with_comparator(opts: Options, cmp: C) -> Result { + let arena_opts = ArenaOptions::new() + .with_capacity(opts.capacity()) + .with_maximum_alignment(mem::align_of::()) + .with_unify(opts.unify()) + .with_magic_version(CURRENT_VERSION) + .with_freelist(opts.freelist()) + .with_reserved(opts.reserved()); + let arena = A::new(arena_opts, opts)?; + Self::new_in(arena, cmp, opts) + } + + /// Like [`SkipList::map_mut`], but with [`Options`] and a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + let alignment = mem::align_of::(); + let arena_opts = ArenaOptions::new() + .with_maximum_alignment(alignment) + .with_magic_version(CURRENT_VERSION) + .with_freelist(opts.freelist()) + .with_reserved(opts.reserved()); + let arena = A::map_mut(path, arena_opts, open_options, mmap_options, opts)?; + Self::new_in(arena, cmp, opts.with_unify(true)) + .map_err(invalid_data) + .and_then(|map| { + if map.magic_version() != opts.magic_version() { + Err(bad_magic_version()) + } else if map.version() != CURRENT_VERSION { + Err(bad_version()) + } else { + // Lock the memory of first page to prevent it from being swapped out. + #[cfg(not(windows))] + unsafe { + map + .arena + .mlock(0, map.arena.page_size().min(map.arena.capacity()))?; + } + + Ok(map) + } + }) + } + + /// Like [`SkipList::map_mut`], but with [`Options`], a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + let alignment = mem::align_of::(); + let arena_opts = ArenaOptions::new() + .with_maximum_alignment(alignment) + .with_magic_version(CURRENT_VERSION) + .with_freelist(opts.freelist()) + .with_reserved(opts.reserved()); + let arena = + A::map_mut_with_path_builder(path_builder, arena_opts, open_options, mmap_options, opts)?; + Self::new_in(arena, cmp, opts.with_unify(true)) + .map_err(invalid_data) + .and_then(|map| { + if map.magic_version() != opts.magic_version() { + Err(bad_magic_version()) + } else if map.version() != CURRENT_VERSION { + Err(bad_version()) + } else { + // Lock the memory of first page to prevent it from being swapped out. + #[cfg(not(windows))] + unsafe { + map + .arena + .mlock(0, map.arena.page_size().min(map.arena.capacity()))?; + } + + Ok(map) + } + }) + .map_err(Either::Right) + } + + /// Like [`SkipList::map`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + let opts = opts.with_unify(true); + let magic_version = opts.magic_version(); + let arena_opts = ArenaOptions::new() + .with_magic_version(CURRENT_VERSION) + .with_reserved(opts.reserved()); + let arena = A::map(path, arena_opts, open_options, mmap_options, opts)?; + Self::new_in(arena, cmp, opts) + .map_err(invalid_data) + .and_then(|map| { + if map.magic_version() != magic_version { + Err(bad_magic_version()) + } else if map.version() != CURRENT_VERSION { + Err(bad_version()) + } else { + // Lock the memory of first page to prevent it from being swapped out. + #[cfg(not(windows))] + unsafe { + map + .arena + .mlock(0, map.arena.page_size().min(map.arena.capacity()))?; + } + + Ok(map) + } + }) + } + + /// Like [`SkipList::map`], but with a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + let opts = opts.with_unify(true); + + let magic_version = opts.magic_version(); + let arena_opts = ArenaOptions::new() + .with_magic_version(CURRENT_VERSION) + .with_reserved(opts.reserved()); + let arena = + A::map_with_path_builder(path_builder, arena_opts, open_options, mmap_options, opts)?; + Self::new_in(arena, cmp, opts) + .map_err(invalid_data) + .and_then(|map| { + if map.magic_version() != magic_version { + Err(bad_magic_version()) + } else if map.version() != CURRENT_VERSION { + Err(bad_version()) + } else { + // Lock the memory of first page to prevent it from being swapped out. + #[cfg(not(windows))] + unsafe { + map + .arena + .mlock(0, map.arena.page_size().min(map.arena.capacity()))?; + } + Ok(map) + } + }) + .map_err(Either::Right) + } + + /// Like [`SkipList::map_anon`], but with a custom [`Comparator`]. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn map_anon_with_comparator( + opts: Options, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + let alignment = mem::align_of::(); + let arena_opts = ArenaOptions::new() + .with_maximum_alignment(alignment) + .with_unify(opts.unify()) + .with_magic_version(CURRENT_VERSION) + .with_reserved(opts.reserved()); + let arena = A::map_anon(arena_opts, mmap_options, opts)?; + + // TODO: remove this when support mlock on windows + #[allow(clippy::bind_instead_of_map)] + Self::new_in(arena, cmp, opts) + .map_err(invalid_data) + .and_then(|map| { + // Lock the memory of first page to prevent it from being swapped out. + #[cfg(not(windows))] + unsafe { + map + .arena + .mlock(0, map.arena.page_size().min(map.arena.capacity()))?; + } + Ok(map) + }) + } + + /// Clear the skiplist to empty and re-initialize. + /// + /// # Safety + /// - The current pointers get from the ARENA cannot be used anymore after calling this method. + /// - This method is not thread-safe. + pub unsafe fn clear(&mut self) -> Result<(), Error> { + self.arena.clear()?; + + let meta = if self.opts.unify() { + self.arena.allocate_header(self.meta().magic_version())? + } else { + unsafe { + let magic_version = self.meta().magic_version(); + let _ = Box::from_raw(self.meta.as_ptr()); + NonNull::new_unchecked(Box::into_raw(Box::new(::new( + magic_version, + )))) + } + }; + + self.meta = meta; + + let max_height: u8 = self.opts.max_height().into(); + let head = self.arena.allocate_full_node(max_height)?; + let tail = self.arena.allocate_full_node(max_height)?; + + // Safety: + // We will always allocate enough space for the head node and the tail node. + unsafe { + // Link all head/tail levels together. + for i in 0..(max_height as usize) { + let head_link = head.tower(&self.arena, i); + let tail_link = tail.tower(&self.arena, i); + head_link.store_next_offset(tail.offset(), Ordering::Relaxed); + tail_link.store_prev_offset(head.offset(), Ordering::Relaxed); + } + } + + self.head = head; + self.tail = tail; + Ok(()) + } + + /// Flushes outstanding memory map modifications to disk. + /// + /// When this method returns with a non-error result, + /// all outstanding changes to a file-backed memory map are guaranteed to be durably stored. + /// The file's metadata (including last modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush(&self) -> std::io::Result<()> { + self.arena.flush() + } + + /// Asynchronously flushes outstanding memory map modifications to disk. + /// + /// This method initiates flushing modified pages to durable storage, but it will not wait for + /// the operation to complete before returning. The file's metadata (including last + /// modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush_async(&self) -> std::io::Result<()> { + self.arena.flush_async() + } + + #[cfg(all(test, feature = "std"))] + #[inline] + pub(crate) fn with_yield_now(mut self) -> Self { + self.yield_now = true; + self + } +} + +impl SkipList { + /// Returns `true` if the key exists in the map. + /// + /// This method will return `false` if the entry is marked as removed. If you want to check if the key exists even if it is marked as removed, + /// you can use [`contains_key_versioned`](SkipList::contains_key_versioned). + #[inline] + pub fn contains_key<'a, 'b: 'a>(&'a self, version: Version, key: &'b [u8]) -> bool { + self.get(version, key).is_some() + } + + /// Returns `true` if the key exists in the map, even if it is marked as removed. + #[inline] + pub fn contains_key_versioned<'a, 'b: 'a>(&'a self, version: Version, key: &'b [u8]) -> bool { + self.get_versioned(version, key).is_some() + } + + /// Returns the first entry in the map. + pub fn first(&self, version: Version) -> Option> { + self.iter(version).seek_lower_bound(Bound::Unbounded) + } + + /// Returns the last entry in the map. + pub fn last(&self, version: Version) -> Option> { + self.iter(version).seek_upper_bound(Bound::Unbounded) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// This method will return `None` if the entry is marked as removed. If you want to get the entry even if it is marked as removed, + /// you can use [`get_versioned`](SkipList::get_versioned). + pub fn get<'a, 'b: 'a>(&'a self, version: Version, key: &'b [u8]) -> Option> { + unsafe { + let (n, eq) = self.find_near(version, key, false, true, true); // findLessOrEqual. + + let n = n?; + let node = n.as_ref(&self.arena); + let node_key = node.get_key(&self.arena); + let (value, pointer) = node.get_value_and_trailer_with_pointer(&self.arena); + if eq { + return value.map(|_| { + EntryRef(VersionedEntryRef::from_node_with_pointer( + n, + &self.arena, + pointer, + )) + }); + } + + if !matches!(self.cmp.compare(key, node_key), cmp::Ordering::Equal) { + return None; + } + + if node.version() > version { + return None; + } + + value.map(|_| { + EntryRef(VersionedEntryRef::from_node_with_pointer( + n, + &self.arena, + pointer, + )) + }) + } + } + + /// Returns the value associated with the given key, if it exists. + /// + /// The difference between `get` and `get_versioned` is that `get_versioned` will return the value even if the entry is removed. + pub fn get_versioned<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Option> { + unsafe { + let (n, eq) = self.find_near(version, key, false, true, false); // findLessOrEqual. + + let n = n?; + let node = n.as_ref(&self.arena); + let node_key = node.get_key(&self.arena); + let (_, pointer) = node.get_value_and_trailer_with_pointer(&self.arena); + if eq { + return Some(VersionedEntryRef::from_node_with_pointer( + n, + &self.arena, + pointer, + )); + } + + if !matches!(self.cmp.compare(key, node_key), cmp::Ordering::Equal) { + return None; + } + + if node.version() > version { + return None; + } + + Some(VersionedEntryRef::from_node_with_pointer( + n, + &self.arena, + pointer, + )) + } + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + pub fn upper_bound<'a, 'b: 'a>( + &'a self, + version: Version, + upper: Bound<&'b [u8]>, + ) -> Option> { + self.iter(version).seek_upper_bound(upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + pub fn lower_bound<'a, 'b: 'a>( + &'a self, + version: Version, + lower: Bound<&'b [u8]>, + ) -> Option> { + self.iter(version).seek_lower_bound(lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + pub fn iter(&self, version: Version) -> iterator::Iter { + iterator::Iter::new(version, self) + } + + /// Returns a new iterator, this iterator will yield all versions for all entries in the map less or equal to the given version. + #[inline] + pub fn iter_all_versions(&self, version: Version) -> iterator::AllVersionsIter { + iterator::AllVersionsIter::new(version, self, true) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + pub fn range<'a, Q, R>(&'a self, version: Version, range: R) -> iterator::Iter<'a, A, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + iterator::Iter::range(version, self, range) + } + + /// Returns a iterator that within the range, this iterator will yield all versions for all entries in the range less or equal to the given version. + #[inline] + pub fn range_all_versions<'a, Q, R>( + &'a self, + version: Version, + range: R, + ) -> iterator::AllVersionsIter<'a, A, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + iterator::AllVersionsIter::range(version, self, range, true) + } +} diff --git a/src/base/api/update.rs b/src/base/api/update.rs new file mode 100644 index 0000000..f52135b --- /dev/null +++ b/src/base/api/update.rs @@ -0,0 +1,661 @@ +use core::convert::Infallible; + +use among::Among; + +use super::*; + +impl SkipList { + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](SkipList::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + pub fn insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + trailer: A::Trailer, + ) -> Result>, Error> { + self.insert_at_height(version, self.random_height(), key, value, trailer) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](SkipList::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + pub fn insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + trailer: A::Trailer, + ) -> Result>, Error> { + self.check_height_and_ro(height)?; + + let copy = |buf: &mut VacantBuffer| { + buf.put_slice_unchecked(value); + Ok(()) + }; + let val_len = value.len() as u32; + + self + .update::( + version, + trailer, + height.into(), + Key::Occupied(key), + Some(ValueBuilder::new(val_len, copy)), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + true, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + .map_err(|e| e.expect_right("must be map::Error")) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipList::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[allow(dead_code)] + #[inline] + pub fn insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: A::Trailer, + ) -> Result>, Either> { + self.insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height_with_value_builder`](SkipList::get_or_insert_at_height_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + pub fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: A::Trailer, + ) -> Result>, Either> { + self.check_height_and_ro(height).map_err(Either::Right)?; + + self + .update( + version, + trailer, + height.into(), + Key::Occupied(key), + Some(value_builder), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + true, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + } + + /// Inserts a new key-value pair if it does not yet exist. + /// + /// Unlike [`insert`](SkipList::insert), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[allow(dead_code)] + #[inline] + pub fn get_or_insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + trailer: A::Trailer, + ) -> Result>, Error> { + self.get_or_insert_at_height(version, self.random_height(), key, value, trailer) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](SkipList::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + pub fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + trailer: A::Trailer, + ) -> Result>, Error> { + self.check_height_and_ro(height)?; + + let copy = |buf: &mut VacantBuffer| { + buf.put_slice_unchecked(value); + Ok(()) + }; + let val_len = value.len() as u32; + + self + .update::( + version, + trailer, + height.into(), + Key::Occupied(key), + Some(ValueBuilder::new(val_len, copy)), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + false, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + .map_err(|e| e.expect_right("must be map::Error")) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_value_builder`](SkipList::insert_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[allow(dead_code)] + #[inline] + pub fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: A::Trailer, + ) -> Result>, Either> { + self.get_or_insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + trailer, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](SkipList::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + pub fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: A::Trailer, + ) -> Result>, Either> { + self.check_height_and_ro(height).map_err(Either::Right)?; + + self + .update( + version, + trailer, + height.into(), + Key::Occupied(key), + Some(value_builder), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + false, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipList::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[allow(dead_code)] + #[inline] + pub fn insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: A::Trailer, + ) -> Result>, Among> { + self.insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipList::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + pub fn insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: A::Trailer, + ) -> Result>, Among> { + self.check_height_and_ro(height).map_err(Among::Right)?; + + let (key_size, key) = key_builder.into_components(); + let (offset, vk) = self + .arena + .fetch_vacant_key(u32::from(key_size), key) + .map_err(|e| match e { + Either::Left(e) => Among::Left(e), + Either::Right(e) => Among::Right(e), + })?; + + self + .update( + version, + trailer, + height.into(), + Key::Vacant { offset, buf: vk }, + Some(value_builder), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + true, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + .map_err(|e| match e { + Either::Left(e) => Among::Middle(e), + Either::Right(e) => Among::Right(e), + }) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_builders`](SkipList::insert_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + #[allow(dead_code)] + #[inline] + pub fn get_or_insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: A::Trailer, + ) -> Result>, Among> { + self.get_or_insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + trailer, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](SkipList::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + pub fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: A::Trailer, + ) -> Result>, Among> { + if self.arena.read_only() { + return Err(Among::Right(Error::read_only())); + } + + let (key_size, key) = key_builder.into_components(); + let (offset, vk) = self + .arena + .fetch_vacant_key(u32::from(key_size), key) + .map_err(|e| match e { + Either::Left(e) => Among::Left(e), + Either::Right(e) => Among::Right(e), + })?; + + self + .update( + version, + trailer, + height.into(), + Key::Vacant { offset, buf: vk }, + Some(value_builder), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + false, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + .map_err(|e| match e { + Either::Left(e) => Among::Middle(e), + Either::Right(e) => Among::Right(e), + }) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove`](SkipList::get_or_remove), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + #[allow(dead_code)] + #[inline] + pub fn compare_remove<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + trailer: A::Trailer, + success: Ordering, + failure: Ordering, + ) -> Result>, Error> { + self.compare_remove_at_height( + version, + self.random_height(), + key, + trailer, + success, + failure, + ) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove_at_height`](SkipList::get_or_remove_at_height), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + pub fn compare_remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + trailer: A::Trailer, + success: Ordering, + failure: Ordering, + ) -> Result>, Error> { + self.check_height_and_ro(height)?; + + self + .update( + version, + trailer, + height.into(), + Key::Remove(key), + Option::>::None, + success, + failure, + Inserter::default(), + true, + ) + .map(|res| match res { + Either::Left(_) => None, + Either::Right(res) => match res { + Ok(old) => { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + } + Err(current) => { + if current.is_removed() { + None + } else { + Some(EntryRef(current)) + } + } + }, + }) + .map_err(|e| e.expect_right("must be map::Error")) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove`](SkipList::compare_remove), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + #[allow(dead_code)] + #[inline] + pub fn get_or_remove<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + trailer: A::Trailer, + ) -> Result>, Error> { + self.get_or_remove_at_height(version, self.random_height(), key, trailer) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height`](SkipList::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + pub fn get_or_remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + trailer: A::Trailer, + ) -> Result>, Error> { + self.check_height_and_ro(height)?; + + self + .update( + version, + trailer, + height.into(), + Key::Remove(key), + Option::>::None, + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + false, + ) + .map(|res| match res { + Either::Left(old) => match old { + Some(old) => { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + } + None => None, + }, + _ => unreachable!("get_or_remove does not use CAS, so it must return `Either::Left`"), + }) + .map_err(|e| e.expect_right("must be map::Error")) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove`](SkipList::compare_remove), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + #[allow(dead_code)] + pub fn get_or_remove_with_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), E>>, + trailer: A::Trailer, + ) -> Result>, Either> { + self.get_or_remove_at_height_with_builder(version, self.random_height(), key_builder, trailer) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height`](SkipList::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + pub fn get_or_remove_at_height_with_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), E>>, + trailer: A::Trailer, + ) -> Result>, Either> { + self.check_height_and_ro(height).map_err(Either::Right)?; + + let (key_size, key) = key_builder.into_components(); + let (offset, vk) = self.arena.fetch_vacant_key(u32::from(key_size), key)?; + let key = Key::RemoveVacant { offset, buf: vk }; + self + .update( + version, + trailer, + height.into(), + key, + Option::>::None, + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + false, + ) + .map(|res| match res { + Either::Left(old) => match old { + Some(old) => { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + } + None => None, + }, + _ => unreachable!("get_or_remove does not use CAS, so it must return `Either::Left`"), + }) + .map_err(|e| Either::Right(e.expect_right("must be map::Error"))) + } +} diff --git a/src/base/entry.rs b/src/base/entry.rs new file mode 100644 index 0000000..7949335 --- /dev/null +++ b/src/base/entry.rs @@ -0,0 +1,346 @@ +use super::*; + +/// A versioned entry reference of the skipmap. +/// +/// Compared to the [`EntryRef`], this one's value can be `None` which means the entry is removed. +#[derive(Debug)] +pub struct VersionedEntryRef<'a, A: Allocator> { + pub(super) arena: &'a A, + pub(super) key: &'a [u8], + pub(super) value_part_pointer: ValuePartPointer, + pub(super) version: Version, + pub(super) ptr: ::Pointer, +} + +impl<'a, A: Allocator> Clone for VersionedEntryRef<'a, A> { + fn clone(&self) -> Self { + *self + } +} + +impl<'a, A: Allocator> Copy for VersionedEntryRef<'a, A> {} + +impl<'a, A> VersionedEntryRef<'a, A> +where + A: Allocator, + A::Node: WithTrailer, +{ + /// Returns the trailer of the entry + #[inline] + pub fn trailer(&self) -> &A::Trailer { + unsafe { + let node = self.ptr.as_ref(self.arena); + let trailer = node.get_trailer_by_offset(self.arena, self.value_part_pointer.trailer_offset); + trailer + } + } +} + +impl<'a, A: Allocator> VersionedEntryRef<'a, A> { + /// Returns the reference to the key + #[inline] + pub const fn key(&self) -> &[u8] { + self.key + } + + /// Returns the reference to the value, `None` means the entry is removed. + #[inline] + pub fn value(&self) -> Option<&[u8]> { + unsafe { + let node = self.ptr.as_ref(self.arena); + let value = node.get_value_by_value_offset( + self.arena, + self.value_part_pointer.value_offset, + self.value_part_pointer.value_len, + ); + value + } + } + + /// Returns if the entry is marked as removed + #[inline] + pub fn is_removed(&self) -> bool { + self.value().is_none() + } + + /// Returns the owned versioned entry, + /// feel free to clone the entry if needed, no allocation and no deep clone will be made. + #[inline] + pub fn to_owned(self) -> VersionedEntry { + VersionedEntry { + arena: self.arena.clone(), + ptr: self.ptr, + value_part_pointer: self.value_part_pointer, + } + } + + /// Returns the version of the entry + #[inline] + pub fn version(&self) -> Version { + self.version + } +} + +impl<'a, A: Allocator> From> for VersionedEntry { + fn from(entry: VersionedEntryRef<'a, A>) -> Self { + entry.to_owned() + } +} + +impl<'a, A: Allocator> VersionedEntryRef<'a, A> { + #[inline] + pub(super) fn from_node( + node_ptr: ::Pointer, + arena: &'a A, + ) -> VersionedEntryRef<'a, A> { + unsafe { + let node = node_ptr.as_ref(arena); + let vp = node.trailer_offset_and_value_size(); + VersionedEntryRef { + key: node.get_key(arena), + value_part_pointer: vp, + arena, + ptr: node_ptr, + version: node.version(), + } + } + } + + #[inline] + pub(super) fn from_node_with_pointer( + node_ptr: ::Pointer, + arena: &'a A, + pointer: ValuePartPointer, + ) -> VersionedEntryRef<'a, A> { + unsafe { + let node = node_ptr.as_ref(arena); + VersionedEntryRef { + key: node.get_key(arena), + value_part_pointer: pointer, + arena, + ptr: node_ptr, + version: node.version(), + } + } + } +} + +/// An owned versioned entry of the skipmap. +/// +/// Compared to the [`Entry`], this one's value can be `None` which means the entry is removed. +#[derive(Debug)] +pub struct VersionedEntry { + pub(super) arena: A, + pub(super) ptr: ::Pointer, + pub(super) value_part_pointer: ValuePartPointer, +} + +impl Clone for VersionedEntry { + fn clone(&self) -> Self { + Self { + arena: self.arena.clone(), + ptr: self.ptr, + value_part_pointer: self.value_part_pointer, + } + } +} + +impl<'a, A: Allocator> From<&'a VersionedEntry> for VersionedEntryRef<'a, A> { + fn from(entry: &'a VersionedEntry) -> VersionedEntryRef<'a, A> { + entry.borrow() + } +} + +impl VersionedEntry +where + A: Allocator, + A::Node: WithTrailer, +{ + /// Returns the trailer of the entry + #[inline] + pub fn trailer(&self) -> &A::Trailer { + unsafe { + let node = self.ptr.as_ref(&self.arena); + let trailer = node.get_trailer_by_offset(&self.arena, self.value_part_pointer.trailer_offset); + trailer + } + } +} + +impl VersionedEntry { + /// Returns the reference to the key + #[inline] + pub fn key(&self) -> &[u8] { + unsafe { + let node = self.ptr.as_ref(&self.arena); + node.get_key(&self.arena) + } + } + + /// Returns the reference to the value, `None` means the entry is removed. + #[inline] + pub fn value(&self) -> Option<&[u8]> { + unsafe { + let node = self.ptr.as_ref(&self.arena); + let value = node.get_value_by_value_offset( + &self.arena, + self.value_part_pointer.value_offset, + self.value_part_pointer.value_len, + ); + value + } + } + + /// Returns the borrowed entry reference + #[inline] + pub fn borrow(&self) -> VersionedEntryRef<'_, A> { + VersionedEntryRef { + arena: &self.arena, + key: self.key(), + value_part_pointer: self.value_part_pointer, + ptr: self.ptr, + version: self.version(), + } + } + + /// Returns the version of the entry + #[inline] + pub fn version(&self) -> Version { + unsafe { + let node = self.ptr.as_ref(&self.arena); + node.version() + } + } +} + +/// An owned entry of the skipmap. +/// +/// Compared to the [`VersionedEntry`], this one's value cannot be `None`. +#[derive(Debug)] +pub struct Entry(VersionedEntry); + +impl Clone for Entry { + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl<'a, A: Allocator> From<&'a Entry> for EntryRef<'a, A> { + fn from(entry: &'a Entry) -> Self { + entry.borrow() + } +} + +impl Entry +where + A: Allocator, + A::Node: WithTrailer, +{ + /// Returns the trailer of the entry + #[inline] + pub fn trailer(&self) -> &A::Trailer { + self.0.trailer() + } +} + +impl Entry +where + A: Allocator, + A::Node: WithVersion, +{ + /// Returns the version of the entry + #[inline] + pub fn version(&self) -> Version { + self.0.version() + } +} + +impl Entry { + /// Returns the reference to the key + #[inline] + pub fn key(&self) -> &[u8] { + self.0.key() + } + + /// Returns the reference to the value + #[inline] + pub fn value(&self) -> &[u8] { + match self.0.value() { + Some(value) => value, + None => panic!("Entry's value cannot be `None`"), + } + } + + /// Returns the borrowed entry reference + #[inline] + pub fn borrow(&self) -> EntryRef<'_, A> { + EntryRef(self.0.borrow()) + } +} + +/// An entry reference to the skipmap's entry. +/// +/// Compared to the [`VersionedEntryRef`], this one's value cannot be `None`. +#[derive(Debug)] +pub struct EntryRef<'a, A: Allocator>(pub(crate) VersionedEntryRef<'a, A>); + +impl<'a, A: Allocator> Clone for EntryRef<'a, A> { + fn clone(&self) -> Self { + *self + } +} + +impl<'a, A: Allocator> Copy for EntryRef<'a, A> {} + +impl<'a, A: Allocator> From> for Entry { + fn from(entry: EntryRef<'a, A>) -> Self { + entry.to_owned() + } +} + +impl<'a, A> EntryRef<'a, A> +where + A: Allocator, + A::Node: WithTrailer, +{ + /// Returns the trailer of the entry + #[inline] + pub fn trailer(&self) -> &A::Trailer { + self.0.trailer() + } +} + +impl<'a, A> EntryRef<'a, A> +where + A: Allocator, + A::Node: WithVersion, +{ + /// Returns the version of the entry + #[inline] + pub fn version(&self) -> Version { + self.0.version() + } +} + +impl<'a, A: Allocator> EntryRef<'a, A> { + /// Returns the reference to the key + #[inline] + pub const fn key(&self) -> &[u8] { + self.0.key() + } + + /// Returns the reference to the value, `None` means the entry is removed. + #[inline] + pub fn value(&self) -> &[u8] { + match self.0.value() { + Some(value) => value, + None => panic!("EntryRef's value cannot be `None`"), + } + } + + /// Returns the owned entry, feel free to clone the entry if needed, no allocation and no deep clone will be made. + #[inline] + pub fn to_owned(self) -> Entry { + Entry(self.0.to_owned()) + } +} diff --git a/src/map/iterator.rs b/src/base/iterator.rs similarity index 66% rename from src/map/iterator.rs rename to src/base/iterator.rs index aae9355..f2bf050 100644 --- a/src/map/iterator.rs +++ b/src/base/iterator.rs @@ -1,4 +1,4 @@ -use core::ops::RangeFull; +use core::{borrow::Borrow, ops::RangeFull}; use super::*; diff --git a/src/map/iterator/all_versions.rs b/src/base/iterator/all_versions.rs similarity index 57% rename from src/map/iterator/all_versions.rs rename to src/base/iterator/all_versions.rs index 2f1e976..5375b24 100644 --- a/src/map/iterator/all_versions.rs +++ b/src/base/iterator/all_versions.rs @@ -2,38 +2,40 @@ use super::*; /// An iterator over the skipmap. The current state of the iterator can be cloned by /// simply value copying the struct. -pub struct AllVersionsIter<'a, T, C, Q: ?Sized = &'static [u8], R = core::ops::RangeFull> { - pub(super) map: &'a SkipMap, - pub(super) nd: NodePtr, - pub(super) version: u64, +pub struct AllVersionsIter<'a, A: Allocator, C, Q: ?Sized = &'static [u8], R = core::ops::RangeFull> +{ + pub(super) map: &'a SkipList, + pub(super) nd: ::Pointer, + pub(super) version: Version, pub(super) range: R, pub(super) all_versions: bool, - pub(super) last: Option>, + pub(super) last: Option>, pub(super) _phantom: core::marker::PhantomData, } -impl<'a, R: Clone, Q: Clone, T: Clone, C> Clone for AllVersionsIter<'a, T, C, Q, R> { +impl<'a, A: Allocator, C, Q: Clone, R: Clone> Clone for AllVersionsIter<'a, A, C, Q, R> { fn clone(&self) -> Self { Self { map: self.map, nd: self.nd, version: self.version, range: self.range.clone(), - last: self.last.clone(), + last: self.last, all_versions: self.all_versions, _phantom: core::marker::PhantomData, } } } -impl<'a, R: Copy, Q: Copy, T: Copy, C> Copy for AllVersionsIter<'a, T, C, Q, R> {} +impl<'a, A: Allocator, C, Q: Copy, R: Copy> Copy for AllVersionsIter<'a, A, C, Q, R> {} -impl<'a, T, C> AllVersionsIter<'a, T, C> +impl<'a, A, C> AllVersionsIter<'a, A, C> where + A: Allocator, C: Comparator, { #[inline] - pub(crate) const fn new(version: u64, map: &'a SkipMap, all_versions: bool) -> Self { + pub(crate) const fn new(version: Version, map: &'a SkipList, all_versions: bool) -> Self { Self { map, nd: map.head, @@ -46,13 +48,13 @@ where } } -impl<'a, Q, R, T, C> AllVersionsIter<'a, T, C, Q, R> +impl<'a, A, C, Q, R> AllVersionsIter<'a, A, C, Q, R> where - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, + A: Allocator, + Q: ?Sized + Borrow<[u8]>, { #[inline] - pub(crate) fn range(version: u64, map: &'a SkipMap, r: R, all_versions: bool) -> Self { + pub(crate) fn range(version: Version, map: &'a SkipList, r: R, all_versions: bool) -> Self { Self { map, nd: map.head, @@ -65,7 +67,7 @@ where } } -impl<'a, Q: ?Sized, R, T, C> AllVersionsIter<'a, T, C, Q, R> { +impl<'a, A: Allocator, C, Q: ?Sized, R> AllVersionsIter<'a, A, C, Q, R> { /// Returns the bounds of the iterator. #[inline] pub const fn bounds(&self) -> &R { @@ -74,22 +76,21 @@ impl<'a, Q: ?Sized, R, T, C> AllVersionsIter<'a, T, C, Q, R> { /// Returns the entry at the current position of the iterator. #[inline] - pub const fn entry(&self) -> Option<&VersionedEntryRef<'a, T>> { + pub const fn entry(&self) -> Option<&VersionedEntryRef<'a, A>> { self.last.as_ref() } } -impl<'a, Q, R, T, C> AllVersionsIter<'a, T, C, Q, R> +impl<'a, A, C, Q, R> AllVersionsIter<'a, A, C, Q, R> where + A: Allocator, C: Comparator, - T: Trailer, - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, + Q: ?Sized + Borrow<[u8]>, R: RangeBounds, { /// Moves the iterator to the highest element whose key is below the given bound. /// If no such element is found then `None` is returned. - pub fn seek_upper_bound(&mut self, upper: Bound<&[u8]>) -> Option> { + pub fn seek_upper_bound(&mut self, upper: Bound<&[u8]>) -> Option> { match upper { Bound::Included(key) => self.seek_le(key).map(|n| { let ent = VersionedEntryRef::from_node(n, &self.map.arena); @@ -107,7 +108,7 @@ where /// Moves the iterator to the lowest element whose key is above the given bound. /// If no such element is found then `None` is returned. - pub fn seek_lower_bound(&mut self, lower: Bound<&[u8]>) -> Option> { + pub fn seek_lower_bound(&mut self, lower: Bound<&[u8]>) -> Option> { match lower { Bound::Included(key) => self.seek_ge(key).map(|n| { let ent = VersionedEntryRef::from_node(n, &self.map.arena); @@ -125,18 +126,18 @@ where /// Advances to the next position. Returns the key and value if the /// iterator is pointing at a valid entry, and `None` otherwise. - fn next_in(&mut self) -> Option> { + fn next_in(&mut self) -> Option> { loop { unsafe { - self.nd = self.map.get_next(self.nd, 0); + self.nd = self.map.get_next(self.nd, 0, !self.all_versions); - if self.nd.is_null() || self.nd.ptr == self.map.tail.ptr { + if self.nd.is_null() || self.nd.offset() == self.map.tail.offset() { return None; } - let node = self.nd.as_ref(); - let (trailer, value) = node.get_value_and_trailer(&self.map.arena); - if trailer.version() > self.version { + let node = self.nd.as_ref(&self.map.arena); + let (value, pointer) = node.get_value_and_trailer_with_pointer(&self.map.arena); + if node.version() > self.version { continue; } @@ -154,14 +155,12 @@ where } } - if self.map.cmp.contains(&self.range, nk) { - let ent = VersionedEntryRef { - arena: &self.map.arena, - key: nk, - trailer, - value, - ptr: self.nd, - }; + if self.map.cmp.contains( + self.range.start_bound().map(|b| b.borrow()), + self.range.end_bound().map(|b| b.borrow()), + nk, + ) { + let ent = VersionedEntryRef::from_node_with_pointer(self.nd, &self.map.arena, pointer); self.last = Some(ent); return Some(ent); } @@ -171,18 +170,18 @@ where /// Advances to the prev position. Returns the key and value if the /// iterator is pointing at a valid entry, and `None` otherwise. - fn prev(&mut self) -> Option> { + fn prev(&mut self) -> Option> { loop { unsafe { - self.nd = self.map.get_prev(self.nd, 0); + self.nd = self.map.get_prev(self.nd, 0, !self.all_versions); - if self.nd.is_null() || self.nd.ptr == self.map.head.ptr { + if self.nd.is_null() || self.nd.offset() == self.map.head.offset() { return None; } - let node = self.nd.as_ref(); - let (trailer, value) = node.get_value_and_trailer(&self.map.arena); - if trailer.version() > self.version { + let node = self.nd.as_ref(&self.map.arena); + let (value, pointer) = node.get_value_and_trailer_with_pointer(&self.map.arena); + if node.version() > self.version { continue; } @@ -200,14 +199,12 @@ where } } - if self.map.cmp.contains(&self.range, nk) { - let ent = VersionedEntryRef { - arena: &self.map.arena, - key: nk, - trailer, - value, - ptr: self.nd, - }; + if self.map.cmp.contains( + self.range.start_bound().map(Borrow::borrow), + self.range.end_bound().map(Borrow::borrow), + nk, + ) { + let ent = VersionedEntryRef::from_node_with_pointer(self.nd, &self.map.arena, pointer); self.last = Some(ent); return Some(ent); } @@ -218,38 +215,42 @@ where /// Moves the iterator to the first entry whose key is greater than or /// equal to the given key. Returns the key and value if the iterator is /// pointing at a valid entry, and `None` otherwise. - fn seek_ge(&mut self, key: &[u8]) -> Option> { - self.nd = self.map.ge(self.version, key)?; - if self.nd.is_null() || self.nd.ptr == self.map.tail.ptr { + fn seek_ge(&mut self, key: &[u8]) -> Option<::Pointer> { + self.nd = self.map.ge(self.version, key, !self.all_versions)?; + if self.nd.is_null() || self.nd.offset() == self.map.tail.offset() { return None; } loop { unsafe { // Safety: the nd is valid, we already check this - let node = self.nd.as_ref(); + let node = self.nd.as_ref(&self.map.arena); // Safety: the node is allocated by the map's arena, so the key is valid let nk = node.get_key(&self.map.arena); - if self.map.cmp.contains(&self.range, nk) { + if self.map.cmp.contains( + self.range.start_bound().map(Borrow::borrow), + self.range.end_bound().map(Borrow::borrow), + nk, + ) { return Some(self.nd); } else { let upper = self.range.end_bound(); match upper { Bound::Included(upper) => { - if upper.lt(&nk) { + if self.map.cmp.compare(upper.borrow(), nk).is_lt() { return None; } } Bound::Excluded(upper) => { - if upper.le(&nk) { + if self.map.cmp.compare(upper.borrow(), nk).is_le() { return None; } } Bound::Unbounded => {} } - self.nd = self.map.get_next(self.nd, 0); + self.nd = self.map.get_next(self.nd, 0, !self.all_versions); } } } @@ -258,39 +259,43 @@ where /// Moves the iterator to the first entry whose key is greater than /// the given key. Returns the key and value if the iterator is /// pointing at a valid entry, and `None` otherwise. - fn seek_gt(&mut self, key: &[u8]) -> Option> { - self.nd = self.map.gt(self.version, key)?; + fn seek_gt(&mut self, key: &[u8]) -> Option<::Pointer> { + self.nd = self.map.gt(self.version, key, self.all_versions)?; - if self.nd.is_null() || self.nd.ptr == self.map.tail.ptr { + if self.nd.is_null() || self.nd.offset() == self.map.tail.offset() { return None; } loop { unsafe { // Safety: the nd is valid, we already check this - let node = self.nd.as_ref(); + let node = self.nd.as_ref(&self.map.arena); // Safety: the node is allocated by the map's arena, so the key is valid let nk = node.get_key(&self.map.arena); - if self.map.cmp.contains(&self.range, nk) { + if self.map.cmp.contains( + self.range.start_bound().map(Borrow::borrow), + self.range.end_bound().map(Borrow::borrow), + nk, + ) { return Some(self.nd); } else { let upper = self.range.end_bound(); match upper { Bound::Included(upper) => { - if upper.lt(&nk) { + if self.map.cmp.compare(upper.borrow(), nk).is_lt() { return None; } } Bound::Excluded(upper) => { - if upper.le(&nk) { + if self.map.cmp.compare(upper.borrow(), nk).is_le() { return None; } } Bound::Unbounded => {} } - self.nd = self.map.get_next(self.nd, 0); + self.nd = self.map.get_next(self.nd, 0, !self.all_versions); } } } @@ -299,36 +304,40 @@ where /// Moves the iterator to the first entry whose key is less than or /// equal to the given key. Returns the key and value if the iterator is /// pointing at a valid entry, and `None` otherwise. - fn seek_le(&mut self, key: &[u8]) -> Option> { - self.nd = self.map.le(self.version, key)?; + fn seek_le(&mut self, key: &[u8]) -> Option<::Pointer> { + self.nd = self.map.le(self.version, key, self.all_versions)?; loop { unsafe { // Safety: the nd is valid, we already check this on line 75 - let node = self.nd.as_ref(); + let node = self.nd.as_ref(&self.map.arena); // Safety: the node is allocated by the map's arena, so the key is valid let nk = node.get_key(&self.map.arena); - if self.map.cmp.contains(&self.range, nk) { + if self.map.cmp.contains( + self.range.start_bound().map(Borrow::borrow), + self.range.end_bound().map(Borrow::borrow), + nk, + ) { return Some(self.nd); } else { let lower = self.range.start_bound(); match lower { Bound::Included(lower) => { - if lower.gt(&nk) { + if self.map.cmp.compare(lower.borrow(), nk).is_gt() { return None; } } Bound::Excluded(lower) => { - if lower.ge(&nk) { + if self.map.cmp.compare(lower.borrow(), nk).is_ge() { return None; } } Bound::Unbounded => {} } - self.nd = self.map.get_prev(self.nd, 0); + self.nd = self.map.get_prev(self.nd, 0, !self.all_versions); } } } @@ -337,37 +346,41 @@ where /// Moves the iterator to the last entry whose key is less than the given /// key. Returns the key and value if the iterator is pointing at a valid entry, /// and `None` otherwise. - fn seek_lt(&mut self, key: &[u8]) -> Option> { + fn seek_lt(&mut self, key: &[u8]) -> Option<::Pointer> { // NB: the top-level AllVersionsIter has already adjusted key based on // the upper-bound. - self.nd = self.map.lt(self.version, key)?; + self.nd = self.map.lt(self.version, key, self.all_versions)?; loop { unsafe { // Safety: the nd is valid, we already check this on line 75 - let node = self.nd.as_ref(); + let node = self.nd.as_ref(&self.map.arena); // Safety: the node is allocated by the map's arena, so the key is valid let nk = node.get_key(&self.map.arena); - if self.map.cmp.contains(&self.range, nk) { + if self.map.cmp.contains( + self.range.start_bound().map(Borrow::borrow), + self.range.end_bound().map(Borrow::borrow), + nk, + ) { return Some(self.nd); } else { let lower = self.range.start_bound(); match lower { Bound::Included(lower) => { - if lower.gt(&nk) { + if self.map.cmp.compare(lower.borrow(), nk).is_gt() { return None; } } Bound::Excluded(lower) => { - if lower.ge(&nk) { + if self.map.cmp.compare(lower.borrow(), nk).is_ge() { return None; } } Bound::Unbounded => {} } - self.nd = self.map.get_prev(self.nd, 0); + self.nd = self.map.get_prev(self.nd, 0, !self.all_versions); } } } @@ -375,97 +388,92 @@ where /// Seeks position at the first entry in map. Returns the key and value /// if the iterator is pointing at a valid entry, and `None` otherwise. - fn first(&mut self) -> Option> { - self.nd = self.map.first_in(self.version)?; + fn first(&mut self) -> Option> { + self.nd = self.map.first_in(self.version, self.all_versions)?; loop { - if self.nd.is_null() || self.nd.ptr == self.map.tail.ptr { + if self.nd.is_null() || self.nd.offset() == self.map.tail.offset() { return None; } unsafe { - let node = self.nd.as_ref(); + let node = self.nd.as_ref(&self.map.arena); let nk = node.get_key(&self.map.arena); - let (trailer, value) = node.get_value_and_trailer(&self.map.arena); + let (value, pointer) = node.get_value_and_trailer_with_pointer(&self.map.arena); - if trailer.version() > self.version { - self.nd = self.map.get_next(self.nd, 0); + if node.version() > self.version { + self.nd = self.map.get_next(self.nd, 0, !self.all_versions); continue; } if !self.all_versions && value.is_none() { - self.nd = self.map.get_next(self.nd, 0); + self.nd = self.map.get_next(self.nd, 0, !self.all_versions); continue; } - if self.map.cmp.contains(&self.range, nk) { - let ent = VersionedEntryRef { - arena: &self.map.arena, - key: nk, - trailer, - value, - ptr: self.nd, - }; + if self.map.cmp.contains( + self.range.start_bound().map(Borrow::borrow), + self.range.end_bound().map(Borrow::borrow), + nk, + ) { + let ent = VersionedEntryRef::from_node_with_pointer(self.nd, &self.map.arena, pointer); self.last = Some(ent); return Some(ent); } - self.nd = self.map.get_next(self.nd, 0); + self.nd = self.map.get_next(self.nd, 0, !self.all_versions); } } } /// Seeks position at the last entry in the iterator. Returns the key and value if /// the iterator is pointing at a valid entry, and `None` otherwise. - fn last(&mut self) -> Option> { - self.nd = self.map.last_in(self.version)?; + fn last(&mut self) -> Option> { + self.nd = self.map.last_in(self.version, self.all_versions)?; loop { unsafe { - if self.nd.is_null() || self.nd.ptr == self.map.head.ptr { + if self.nd.is_null() || self.nd.offset() == self.map.head.offset() { return None; } - let node = self.nd.as_ref(); - let (trailer, value) = node.get_value_and_trailer(&self.map.arena); + let node = self.nd.as_ref(&self.map.arena); + let (value, pointer) = node.get_value_and_trailer_with_pointer(&self.map.arena); - if trailer.version() > self.version { - self.nd = self.map.get_prev(self.nd, 0); + if node.version() > self.version { + self.nd = self.map.get_prev(self.nd, 0, !self.all_versions); continue; } if !self.all_versions && value.is_none() { - self.nd = self.map.get_prev(self.nd, 0); + self.nd = self.map.get_prev(self.nd, 0, !self.all_versions); continue; } let nk = node.get_key(&self.map.arena); - if self.map.cmp.contains(&self.range, nk) { - let ent = VersionedEntryRef { - arena: &self.map.arena, - key: nk, - trailer, - value, - ptr: self.nd, - }; + if self.map.cmp.contains( + self.range.start_bound().map(Borrow::borrow), + self.range.end_bound().map(Borrow::borrow), + nk, + ) { + let ent = VersionedEntryRef::from_node_with_pointer(self.nd, &self.map.arena, pointer); return Some(ent); } - self.nd = self.map.get_prev(self.nd, 0); + self.nd = self.map.get_prev(self.nd, 0, !self.all_versions); } } } } -impl<'a, Q, R, T, C> Iterator for AllVersionsIter<'a, T, C, Q, R> +impl<'a, A, C, Q, R> Iterator for AllVersionsIter<'a, A, C, Q, R> where + A: Allocator, C: Comparator, - T: Trailer, - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, + Q: ?Sized + Borrow<[u8]>, R: RangeBounds, { - type Item = VersionedEntryRef<'a, T>; + type Item = VersionedEntryRef<'a, A>; #[inline] fn next(&mut self) -> Option { @@ -508,12 +516,11 @@ where } } -impl<'a, Q, R, T, C> DoubleEndedIterator for AllVersionsIter<'a, T, C, Q, R> +impl<'a, A, C, Q, R> DoubleEndedIterator for AllVersionsIter<'a, A, C, Q, R> where + A: Allocator, C: Comparator, - T: Trailer, - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, + Q: ?Sized + Borrow<[u8]>, R: RangeBounds, { fn next_back(&mut self) -> Option { diff --git a/src/map/iterator/iter.rs b/src/base/iterator/iter.rs similarity index 56% rename from src/map/iterator/iter.rs rename to src/base/iterator/iter.rs index 67fa4b8..a84edcf 100644 --- a/src/map/iterator/iter.rs +++ b/src/base/iterator/iter.rs @@ -2,40 +2,44 @@ use super::*; /// An iterator over the skipmap. The current state of the iterator can be cloned by /// simply value copying the struct. -pub struct Iter<'a, T, C, Q: ?Sized = &'static [u8], R = core::ops::RangeFull>( - AllVersionsIter<'a, T, C, Q, R>, +pub struct Iter<'a, A: Allocator, C, Q: ?Sized = &'static [u8], R = core::ops::RangeFull>( + AllVersionsIter<'a, A, C, Q, R>, ); -impl<'a, R: Clone, Q: Clone, T: Clone, C> Clone for Iter<'a, T, C, Q, R> { +impl<'a, A: Allocator, C, R: Clone, Q: Clone> Clone for Iter<'a, A, C, Q, R> { fn clone(&self) -> Self { Self(self.0.clone()) } } -impl<'a, R: Copy, Q: Copy, T: Copy, C> Copy for Iter<'a, T, C, Q, R> {} +impl<'a, A: Allocator, C, R: Copy, Q: Copy> Copy for Iter<'a, A, C, Q, R> {} -impl<'a, T, C> Iter<'a, T, C> +impl<'a, A, C> Iter<'a, A, C> where + A: Allocator, C: Comparator, { #[inline] - pub(crate) const fn new(version: u64, map: &'a SkipMap) -> Self { + pub(crate) const fn new(version: Version, map: &'a SkipList) -> Self { Self(AllVersionsIter::new(version, map, false)) } } -impl<'a, Q, R, T, C> Iter<'a, T, C, Q, R> +impl<'a, A, C, Q, R> Iter<'a, A, C, Q, R> where - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, + A: Allocator, + Q: ?Sized + Borrow<[u8]>, { #[inline] - pub(crate) fn range(version: u64, map: &'a SkipMap, r: R) -> Self { + pub(crate) fn range(version: Version, map: &'a SkipList, r: R) -> Self { Self(AllVersionsIter::range(version, map, r, false)) } } -impl<'a, Q: ?Sized, R, T, C> Iter<'a, T, C, Q, R> { +impl<'a, A, C, Q: ?Sized, R> Iter<'a, A, C, Q, R> +where + A: Allocator, +{ /// Returns the bounds of the iterator. #[inline] pub const fn bounds(&self) -> &R { @@ -43,44 +47,45 @@ impl<'a, Q: ?Sized, R, T, C> Iter<'a, T, C, Q, R> { } } -impl<'a, Q: ?Sized, R, T: Clone, C> Iter<'a, T, C, Q, R> { +impl<'a, A, C, Q: ?Sized, R> Iter<'a, A, C, Q, R> +where + A: Allocator, +{ /// Returns the entry at the current position of the iterator. #[inline] - pub fn entry(&self) -> Option> { - self.0.last.clone().map(EntryRef) + pub fn entry(&self) -> Option> { + self.0.last.map(EntryRef) } } -impl<'a, Q, R, T, C> Iter<'a, T, C, Q, R> +impl<'a, A, C, Q, R> Iter<'a, A, C, Q, R> where + A: Allocator, C: Comparator, - T: Trailer, - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, + Q: ?Sized + Borrow<[u8]>, R: RangeBounds, { /// Moves the iterator to the highest element whose key is below the given bound. /// If no such element is found then `None` is returned. - pub fn seek_upper_bound(&mut self, upper: Bound<&[u8]>) -> Option> { + pub fn seek_upper_bound(&mut self, upper: Bound<&[u8]>) -> Option> { self.0.seek_upper_bound(upper).map(EntryRef) } /// Moves the iterator to the lowest element whose key is above the given bound. /// If no such element is found then `None` is returned. - pub fn seek_lower_bound(&mut self, lower: Bound<&[u8]>) -> Option> { + pub fn seek_lower_bound(&mut self, lower: Bound<&[u8]>) -> Option> { self.0.seek_lower_bound(lower).map(EntryRef) } } -impl<'a, Q, R, T, C> Iterator for Iter<'a, T, C, Q, R> +impl<'a, A, C, Q, R> Iterator for Iter<'a, A, C, Q, R> where + A: Allocator, C: Comparator, - T: Trailer, - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, + Q: ?Sized + Borrow<[u8]>, R: RangeBounds, { - type Item = EntryRef<'a, T>; + type Item = EntryRef<'a, A>; #[inline] fn next(&mut self) -> Option { @@ -96,12 +101,11 @@ where } } -impl<'a, Q, R, T, C> DoubleEndedIterator for Iter<'a, T, C, Q, R> +impl<'a, A, C, Q, R> DoubleEndedIterator for Iter<'a, A, C, Q, R> where + A: Allocator, C: Comparator, - T: Trailer, - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, + Q: ?Sized + Borrow<[u8]>, R: RangeBounds, { fn next_back(&mut self) -> Option { diff --git a/src/map/error.rs b/src/error.rs similarity index 71% rename from src/map/error.rs rename to src/error.rs index 75e615c..47215f7 100644 --- a/src/map/error.rs +++ b/src/error.rs @@ -1,7 +1,9 @@ +use super::Height; + /// Error type for the [`SkipMap`](crate::SkipMap). /// /// [`SkipMap`]: crate::SkipMap -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug)] pub enum Error { /// Indicates that the arena is full Arena(rarena_allocator::Error), @@ -15,8 +17,22 @@ pub enum Error { /// Indicates that the entry is too large to be stored in the [`SkipMap`](super::SkipMap). EntryTooLarge(u64), + /// Indicates that the height of the [`SkipMap`](super::SkipMap) is too large. + InvalidHeight { + /// The height of the [`SkipMap`](super::SkipMap). + height: Height, + + /// The max height of the [`SkipMap`](super::SkipMap). + max_height: Height, + }, + /// Arena too small ArenaTooSmall, + + /// I/O error + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + IO(std::io::Error), } impl core::fmt::Display for Error { @@ -27,6 +43,12 @@ impl core::fmt::Display for Error { Self::KeyTooLarge(size) => write!(f, "key size {} is too large", size), Self::EntryTooLarge(size) => write!(f, "entry size {size} is too large",), Self::ArenaTooSmall => write!(f, "ARENA capacity is too small"), + Self::InvalidHeight { height, max_height } => write!( + f, + "given height {height} is larger than the max height {max_height} or less than 1" + ), + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + Self::IO(e) => write!(f, "{e}"), } } } @@ -46,6 +68,18 @@ impl Error { pub const fn read_only() -> Self { Self::Arena(rarena_allocator::Error::ReadOnly) } + + #[inline] + pub(crate) const fn invalid_height(height: Height, max_height: Height) -> Self { + Self::InvalidHeight { height, max_height } + } +} + +#[cfg(all(feature = "memmap", not(target_family = "wasm")))] +impl From for Error { + fn from(e: std::io::Error) -> Self { + Self::IO(e) + } } #[cfg(all(feature = "memmap", not(target_family = "wasm")))] diff --git a/src/lib.rs b/src/lib.rs index 2b2da54..3384918 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -16,12 +16,29 @@ extern crate alloc as std; #[cfg(feature = "std")] extern crate std; -use core::{cmp, ops::RangeBounds}; +use core::{ + cmp, + ops::{Bound, RangeBounds}, + ptr::NonNull, +}; + +/// Skiplist implementation. See [`SkipList`](base::SkipList) for more information. +mod base; +pub use base::{AllVersionsIter, Entry, EntryRef, Iter, VersionedEntry, VersionedEntryRef}; + +mod allocator; +pub use allocator::GenericAllocator; + +mod error; +pub use error::Error; + +/// Implementations for concurrent environments. +pub mod sync; -/// A map implementation based on skiplist -pub mod map; +/// Implementations for single-threaded environments. +pub mod unsync; -/// Options for the [`SkipMap`](crate::SkipMap). +/// Options for configuration. pub mod options; pub use options::Options; #[cfg(all(feature = "memmap", not(target_family = "wasm")))] @@ -30,39 +47,58 @@ pub use options::{MmapOptions, OpenOptions}; mod types; pub use types::*; +pub use among; +pub use dbutils::{Ascend, Comparator, Descend}; pub use either; -pub use map::{AllVersionsIter, SkipMap}; -pub use rarena_allocator::{Arena, Error as ArenaError}; -pub use ux2::{u27, u5}; +pub use rarena_allocator::{Allocator as ArenaAllocator, ArenaPosition, Error as ArenaError}; -const MAX_HEIGHT: usize = 32; +const MAX_HEIGHT: usize = 1 << 5; +const MIN_VERSION: Version = Version::MIN; +/// The memory format version. +const CURRENT_VERSION: u16 = 0; +/// The tombstone value size, if a node's value size is equal to this value, then it is a tombstone. +const REMOVE: u32 = u32::MAX; +const DANGLING_ZST: NonNull<()> = NonNull::dangling(); + +/// # Safety +/// - `T` must be a ZST. +#[inline] +const unsafe fn dangling_zst_ref<'a, T>() -> &'a T { + #[cfg(debug_assertions)] + if core::mem::size_of::() != 0 { + panic!("`T` must be a ZST"); + } + + // Safety: T is ZST, so it's safe to cast and deref. + unsafe { &*(DANGLING_ZST.as_ptr() as *const T) } +} #[cfg(feature = "std")] -fn random_height(max_height: u8) -> u32 { +fn random_height(max_height: Height) -> Height { use rand::{thread_rng, Rng}; let mut rng = thread_rng(); let rnd: u32 = rng.gen(); let mut h = 1; - let max_height = max_height as usize; + let max_height = max_height.to_usize(); while h < max_height && rnd <= PROBABILITIES[h] { h += 1; } - h as u32 + Height::from_u8_unchecked(h as u8) } #[cfg(not(feature = "std"))] -fn random_height(max_height: u8) -> u32 { +fn random_height(max_height: Height) -> Height { use rand::{rngs::OsRng, Rng}; - let max_height = max_height as usize; + let max_height = max_height.to_usize(); let rnd: u32 = OsRng.gen(); let mut h = 1; while h < max_height && rnd <= PROBABILITIES[h] { h += 1; } - h as u32 + Height::from_u8_unchecked(h as u8) } /// Precompute the skiplist probabilities so that only a single random number @@ -84,134 +120,262 @@ const PROBABILITIES: [u32; MAX_HEIGHT] = { probabilities }; -/// Comparator is used for key-value database developers to define their own key comparison logic. -/// e.g. some key-value database developers may want to alpabetically comparation -pub trait Comparator: core::fmt::Debug { - /// Compares two byte slices. - fn compare(&self, a: &[u8], b: &[u8]) -> cmp::Ordering; - - /// Returns if a is contained in range. - fn contains<'a, Q>(&self, range: &impl RangeBounds, key: &'a [u8]) -> bool - where - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>; +#[inline] +const fn encode_value_pointer(offset: u32, val_size: u32) -> u64 { + (val_size as u64) << 32 | offset as u64 } -impl Comparator for std::sync::Arc { - #[inline] - fn compare(&self, a: &[u8], b: &[u8]) -> cmp::Ordering { - (**self).compare(a, b) - } - - #[inline] - fn contains<'a, Q>(&self, range: &impl RangeBounds, key: &'a [u8]) -> bool - where - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, - { - (**self).contains(range, key) - } +#[inline] +const fn decode_value_pointer(value: u64) -> (u32, u32) { + let offset = value as u32; + let val_size = (value >> 32) as u32; + (offset, val_size) } -impl Comparator for std::rc::Rc { - #[inline] - fn compare(&self, a: &[u8], b: &[u8]) -> cmp::Ordering { - (**self).compare(a, b) - } - - #[inline] - fn contains<'a, Q>(&self, range: &impl RangeBounds, key: &'a [u8]) -> bool - where - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, - { - (**self).contains(range, key) - } +#[inline] +const fn encode_key_size_and_height(key_size: u32, height: u8) -> u32 { + // first 27 bits for key_size, last 5 bits for height. + key_size << 5 | height as u32 } -impl Comparator for std::boxed::Box { - #[inline] - fn compare(&self, a: &[u8], b: &[u8]) -> cmp::Ordering { - (**self).compare(a, b) - } - - #[inline] - fn contains<'a, Q>(&self, range: &impl RangeBounds, key: &'a [u8]) -> bool - where - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, - { - (**self).contains(range, key) - } +#[inline] +const fn decode_key_size_and_height(size: u32) -> (u32, u8) { + let key_size = size >> 5; + let height = (size & 0b11111) as u8; + (key_size, height) } -/// Ascend is a comparator that compares byte slices in ascending order. -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] -pub struct Ascend; - -impl Comparator for Ascend { - #[inline] - fn compare(&self, a: &[u8], b: &[u8]) -> cmp::Ordering { - a.cmp(b) - } - - #[inline] - fn contains<'a, Q>(&self, range: &impl RangeBounds, key: &'a [u8]) -> bool - where - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, - { - range.contains(&key) - } +macro_rules! builder { + ($($name:ident($size:ident)),+ $(,)?) => { + $( + paste::paste! { + #[doc = "A " [< $name: snake>] " builder for the [`SkipList`], which requires the " [< $name: snake>] " size for accurate allocation and a closure to build the " [< $name: snake>]] + #[derive(Copy, Clone, Debug)] + pub struct [< $name Builder >] { + size: $size, + f: F, + } + + impl [< $name Builder >] { + #[doc = "Creates a new `" [<$name Builder>] "` with the given size and builder closure."] + #[inline] + pub const fn new(size: $size, f: F) -> Self + where + F: for<'a> FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, + { + Self { size, f } + } + + #[doc = "Returns the required" [< $name: snake>] "size."] + #[inline] + pub const fn size(&self) -> $size { + self.size + } + + #[doc = "Returns the " [< $name: snake>] "builder closure."] + #[inline] + pub const fn builder(&self) -> &F { + &self.f + } + + /// Deconstructs the value builder into the size and the builder closure. + #[inline] + pub fn into_components(self) -> ($size, F) { + (self.size, self.f) + } + } + } + )* + }; } -/// Descend is a comparator that compares byte slices in descending order. -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] -pub struct Descend; - -impl Comparator for Descend { - #[inline] - fn compare(&self, a: &[u8], b: &[u8]) -> cmp::Ordering { - b.cmp(a) - } - - #[inline] - fn contains<'a, Q>(&self, range: &impl RangeBounds, key: &'a [u8]) -> bool - where - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, - { - range.contains(&key) - } -} +builder!(Value(u32), Key(KeySize)); /// A trait for extra information that can be stored with entry in the skiplist. /// /// # Safety -/// The implementors must ensure that they can be reconstructed from a byte slice directly. -/// e.g. struct includes `*const T` cannot be used as the trailer, because the pointer cannot be reconstructed from a byte slice directly. -pub unsafe trait Trailer: Copy + core::fmt::Debug { - /// Returns the version of the trailer. - fn version(&self) -> u64; +/// - The implementors must ensure that they can be reconstructed from a byte slice directly. +/// e.g. struct includes `*const T` cannot be used as the trailer, because the pointer is invalid +/// after restart the program. +/// - The implementors must ensure that they can be safely convert from `*const [u8]` to `*const T` +pub unsafe trait Trailer: core::fmt::Debug { + /// Returns `true` if the trailer is valid. If a trailer is not valid, it will be ignored when + /// read or iterated, but users can still access such entry through `get_versioned` or `iter_all_versions`. + fn is_valid(&self) -> bool; } -unsafe impl Trailer for u64 { - /// Returns the version of the trailer. - #[inline] - fn version(&self) -> u64 { - *self - } +macro_rules! dummy_trailer { + ($($t:ty),+ $(,)?) => { + $( + unsafe impl Trailer for $t { + #[inline] + fn is_valid(&self) -> bool { + true + } + } + + unsafe impl Trailer for [$t; N] { + #[inline] + fn is_valid(&self) -> bool { + true + } + } + )* + }; } -unsafe impl Trailer for () { - /// Returns the version of the trailer. - #[inline] - fn version(&self) -> u64 { - 0 +dummy_trailer!( + (), + u8, + u16, + u32, + u64, + u128, + usize, + i8, + i16, + i32, + i64, + i128, + isize, + core::sync::atomic::AtomicUsize, + core::sync::atomic::AtomicIsize, + core::sync::atomic::AtomicU8, + core::sync::atomic::AtomicI8, + core::sync::atomic::AtomicU16, + core::sync::atomic::AtomicI16, + core::sync::atomic::AtomicU32, + core::sync::atomic::AtomicI32, + core::sync::atomic::AtomicU64, + core::sync::atomic::AtomicI64, + core::sync::atomic::AtomicBool, +); + +/// Time related trailers. +#[cfg(feature = "time")] +pub mod time { + use super::Trailer; + use ::time::OffsetDateTime; + + macro_rules! methods { + ($ident:ident($inner:ident: $from:ident <-> $into:ident)) => { + impl core::fmt::Display for $ident { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!( + f, + "{}", + OffsetDateTime::$from(self.0).expect("valid timestamp") + ) + } + } + + impl core::fmt::Debug for $ident { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!( + f, + "{}", + OffsetDateTime::$from(self.0).expect("valid timestamp") + ) + } + } + + impl From<$ident> for $inner { + fn from(ts: $ident) -> Self { + ts.0 + } + } + + impl TryFrom<$inner> for $ident { + type Error = time::error::ComponentRange; + + fn try_from(value: $inner) -> Result { + OffsetDateTime::$from(value).map(|t| Self(t.$into())) + } + } + }; + } + + macro_rules! timestamp { + ($( + [$($meta:meta)*] + $ident:ident($inner:ident: $from:ident <-> $into:ident) + ),+ $(,)?) => { + $( + $( + #[$meta] + )* + #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] + pub struct $ident($inner); + + methods!($ident($inner: $from <-> $into)); + + impl $ident { + /// Returns the current timestamp. + #[inline] + pub fn now() -> Self { + Self(OffsetDateTime::now_utc().$into()) + } + } + )* + }; + } + + timestamp!( + [doc = "A utc timestamp [`Trailer`] implementation."] + Timestamp(i64: from_unix_timestamp <-> unix_timestamp), + [doc = "A utc timestamp with nanoseconds [`Trailer`] implementation."] + TimestampNanos(i128: from_unix_timestamp_nanos <-> unix_timestamp_nanos), + ); + + dummy_trailer!(Timestamp, TimestampNanos); + + macro_rules! ttl { + ($( + [$($meta:meta)*] + $ident:ident($inner:ident: $from:ident <-> $into:ident) + ),+ $(,)?) => { + $( + $( + #[$meta] + )* + #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] + pub struct $ident($inner); + + methods!($ident($inner: $from <-> $into)); + + impl $ident { + /// Creates a new ttl. + #[inline] + pub fn new(ttl: std::time::Duration) -> Self { + Self((OffsetDateTime::now_utc() + ttl).$into()) + } + + /// Returns `true` if the ttl is expired. + #[inline] + pub fn is_expired(&self) -> bool { + OffsetDateTime::now_utc().$into() > self.0 + } + } + + unsafe impl Trailer for $ident { + #[inline] + fn is_valid(&self) -> bool { + !self.is_expired() + } + } + )* + }; } + + ttl!( + [doc = "A ttl [`Trailer`] implementation."] + Ttl(i64: from_unix_timestamp <-> unix_timestamp), + [doc = "A ttl with nanoseconds [`Trailer`] implementation."] + TtlNanos(i128: from_unix_timestamp_nanos <-> unix_timestamp_nanos), + ); } -mod sync { +mod common { #[cfg(not(feature = "loom"))] pub(crate) use core::sync::atomic::*; diff --git a/src/map.rs b/src/map.rs deleted file mode 100644 index d4f822c..0000000 --- a/src/map.rs +++ /dev/null @@ -1,2088 +0,0 @@ -use core::{ - cmp, - convert::Infallible, - marker::PhantomData, - mem, - ops::{Bound, RangeBounds}, - ptr::{self, NonNull}, -}; - -use std::boxed::Box; - -use crate::{Key, Trailer, VacantBuffer}; - -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -use error::{bad_magic_version, bad_version, invalid_data}; - -use super::{sync::*, Arena, Ascend, Comparator, *}; - -mod api; - -use either::Either; - -mod error; -pub use error::Error; -mod entry; -pub use entry::*; -mod iterator; -pub use iterator::*; - -use rarena_allocator::Error as ArenaError; - -#[cfg(test)] -mod tests; - -const CURRENT_VERSION: u16 = 0; - -/// The tombstone value size, if a node's value size is equal to this value, then it is a tombstone. -const REMOVE: u32 = u32::MAX; - -type UpdateOk<'a, 'b, T> = Either< - Option>, - Result, VersionedEntryRef<'a, T>>, ->; - -#[derive(Debug)] -#[repr(C)] -struct Meta { - /// The maximum MVCC version of the skiplist. CAS. - max_version: AtomicU64, - /// The minimum MVCC version of the skiplist. CAS. - min_version: AtomicU64, - len: AtomicU32, - magic_version: u16, - /// Current height. 1 <= height <= 31. CAS. - height: AtomicU8, - reserved_byte: u8, -} - -impl Meta { - #[inline] - fn new(version: u16) -> Self { - Self { - max_version: AtomicU64::new(0), - min_version: AtomicU64::new(0), - magic_version: version, - height: AtomicU8::new(1), - len: AtomicU32::new(0), - reserved_byte: 0, - } - } - - #[inline] - const fn magic_version(&self) -> u16 { - self.magic_version - } - - #[inline] - fn max_version(&self) -> u64 { - self.max_version.load(Ordering::Acquire) - } - - #[inline] - fn min_version(&self) -> u64 { - self.min_version.load(Ordering::Acquire) - } - - #[inline] - fn height(&self) -> u8 { - self.height.load(Ordering::Acquire) - } - - #[inline] - fn len(&self) -> u32 { - self.len.load(Ordering::Acquire) - } - - #[inline] - fn increase_len(&self) { - self.len.fetch_add(1, Ordering::Release); - } - - fn update_max_version(&self, version: u64) { - let mut current = self.max_version.load(Ordering::Acquire); - - loop { - if version <= current { - return; - } - - match self.max_version.compare_exchange_weak( - current, - version, - Ordering::SeqCst, - Ordering::Acquire, - ) { - Ok(_) => break, - Err(v) => current = v, - } - } - } - - fn update_min_version(&self, version: u64) { - let mut current = self.min_version.load(Ordering::Acquire); - - loop { - if version >= current { - return; - } - - match self.min_version.compare_exchange_weak( - current, - version, - Ordering::SeqCst, - Ordering::Acquire, - ) { - Ok(_) => break, - Err(v) => current = v, - } - } - } -} - -#[repr(C, align(8))] -pub(crate) struct AtomicValuePointer(AtomicU64); - -impl core::fmt::Debug for AtomicValuePointer { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - let (offset, len) = decode_value_pointer(self.0.load(Ordering::Relaxed)); - f.debug_struct("AtomicValuePointer") - .field("offset", &offset) - .field("len", &len) - .finish() - } -} - -impl AtomicValuePointer { - #[inline] - fn new(offset: u32, len: u32) -> Self { - Self(AtomicU64::new(encode_value_pointer(offset, len))) - } - - #[inline] - fn load(&self, ordering: Ordering) -> (u32, u32) { - decode_value_pointer(self.0.load(ordering)) - } - - #[inline] - fn swap(&self, offset: u32, len: u32) -> (u32, u32) { - decode_value_pointer( - self - .0 - .swap(encode_value_pointer(offset, len), Ordering::AcqRel), - ) - } - - #[inline] - fn compare_remove(&self, success: Ordering, failure: Ordering) -> Result<(u32, u32), (u32, u32)> { - let old = self.0.load(Ordering::Acquire); - let (offset, _) = decode_value_pointer(old); - let new = encode_value_pointer(offset, REMOVE); - self - .0 - .compare_exchange(old, new, success, failure) - .map(decode_value_pointer) - .map_err(decode_value_pointer) - } -} - -#[derive(Debug)] -struct NodePtr { - ptr: *mut Node, - offset: u32, -} - -impl Clone for NodePtr { - fn clone(&self) -> Self { - *self - } -} - -impl Copy for NodePtr {} - -impl NodePtr { - const NULL: Self = Self { - ptr: ptr::null_mut(), - offset: 0, - }; - - #[inline] - const fn new(ptr: *mut u8, offset: u32) -> Self { - Self { - ptr: ptr.cast(), - offset, - } - } - - #[inline] - const fn is_null(&self) -> bool { - self.offset == 0 - } - - /// ## Safety - /// - the pointer must be valid - #[inline] - unsafe fn as_ref(&self) -> &Node { - &*self.ptr.cast() - } - - /// ## Safety - /// - the pointer must be valid - #[inline] - unsafe fn as_mut(&self) -> &mut Node { - &mut *self.ptr.cast() - } - - #[inline] - unsafe fn tower(&self, arena: &Arena, idx: usize) -> &Link { - let tower_ptr_offset = self.offset as usize + Node::::SIZE + idx * Link::SIZE; - let tower_ptr = arena.get_pointer(tower_ptr_offset); - &*tower_ptr.cast() - } - - #[inline] - unsafe fn write_tower(&self, arena: &Arena, idx: usize, prev_offset: u32, next_offset: u32) { - let tower_ptr_offset = self.offset as usize + Node::::SIZE + idx * Link::SIZE; - let tower_ptr: *mut Link = arena.get_pointer_mut(tower_ptr_offset).cast(); - *tower_ptr = Link::new(next_offset, prev_offset); - } - - /// ## Safety - /// - /// - The caller must ensure that the node is allocated by the arena. - /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. - unsafe fn next_offset(&self, arena: &Arena, idx: usize) -> u32 { - self.tower(arena, idx).next_offset.load(Ordering::Acquire) - } - - /// ## Safety - /// - /// - The caller must ensure that the node is allocated by the arena. - /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. - unsafe fn prev_offset(&self, arena: &Arena, idx: usize) -> u32 { - self.tower(arena, idx).prev_offset.load(Ordering::Acquire) - } - - /// ## Safety - /// - /// - The caller must ensure that the node is allocated by the arena. - /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. - unsafe fn cas_prev_offset( - &self, - arena: &Arena, - idx: usize, - current: u32, - new: u32, - success: Ordering, - failure: Ordering, - ) -> Result { - #[cfg(not(feature = "unaligned"))] - self - .tower(arena, idx) - .prev_offset - .compare_exchange(current, new, success, failure) - } - - /// ## Safety - /// - /// - The caller must ensure that the node is allocated by the arena. - /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. - unsafe fn cas_next_offset( - &self, - arena: &Arena, - idx: usize, - current: u32, - new: u32, - success: Ordering, - failure: Ordering, - ) -> Result { - self - .tower(arena, idx) - .next_offset - .compare_exchange(current, new, success, failure) - } -} - -#[derive(Debug)] -#[repr(C)] -struct Link { - next_offset: AtomicU32, - prev_offset: AtomicU32, -} - -impl Link { - const SIZE: usize = mem::size_of::(); - - #[inline] - fn new(next_offset: u32, prev_offset: u32) -> Self { - Self { - next_offset: AtomicU32::new(next_offset), - prev_offset: AtomicU32::new(prev_offset), - } - } -} - -#[repr(C)] -struct Node { - // A byte slice is 24 bytes. We are trying to save space here. - /// Multiple parts of the value are encoded as a single uint64 so that it - /// can be atomically loaded and stored: - /// value offset: u32 (bits 0-31) - /// value size : u32 (bits 32-63) - value: AtomicValuePointer, - // Immutable. No need to lock to access key. - key_offset: u32, - // Immutable. No need to lock to access key. - key_size_and_height: u32, - trailer: PhantomData, - // ** DO NOT REMOVE BELOW COMMENT** - // The below field will be attached after the node, have to comment out - // this field, because each node will not use the full height, the code will - // not allocate the full size of the tower. - // - // Most nodes do not need to use the full height of the tower, since the - // probability of each successive level decreases exponentially. Because - // these elements are never accessed, they do not need to be allocated. - // Therefore, when a node is allocated in the arena, its memory footprint - // is deliberately truncated to not include unneeded tower elements. - // - // All accesses to elements should use CAS operations, with no need to lock. - // pub(super) tower: [Link; self.opts.max_height], -} - -impl core::fmt::Debug for Node { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - let (key_size, height) = decode_key_size_and_height(self.key_size_and_height); - let (value_offset, value_size) = decode_value_pointer(self.value.0.load(Ordering::Relaxed)); - f.debug_struct("Node") - .field("value_offset", &value_offset) - .field("value_size", &value_size) - .field("key_offset", &self.key_offset) - .field("key_size", &key_size) - .field("height", &height) - .finish() - } -} - -impl Node { - const SIZE: usize = mem::size_of::(); - const ALIGN: u32 = mem::align_of::() as u32; - - #[inline] - fn full(value_offset: u32, max_height: u8) -> Self { - Self { - value: AtomicValuePointer::new(value_offset, 0), - key_offset: 0, - key_size_and_height: encode_key_size_and_height(0, max_height), - trailer: PhantomData, - } - } - - #[inline] - const fn size(max_height: u8) -> usize { - Self::SIZE + (max_height as usize) * Link::SIZE - } - - #[inline] - fn set_value<'a, E>( - &self, - arena: &'a Arena, - trailer: T, - value_size: u32, - f: &impl Fn(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result<(), Either> { - let mut bytes = arena - .alloc_aligned_bytes::(value_size) - .map_err(|e| Either::Right(e.into()))?; - let trailer_ptr = bytes.as_mut_ptr().cast::(); - let trailer_offset = bytes.offset(); - let value_offset = trailer_offset + mem::size_of::(); - - let mut oval = VacantBuffer::new(value_size as usize, value_offset as u32, unsafe { - arena.get_bytes_mut(value_offset, value_size as usize) - }); - f(&mut oval).map_err(Either::Left)?; - - let remaining = oval.remaining(); - let mut discard = 0; - if remaining != 0 - && unsafe { !arena.dealloc((value_offset + oval.len()) as u32, remaining as u32) } - { - discard += remaining; - } - - bytes.detach(); - unsafe { - trailer_ptr.write(trailer); - } - - if discard != 0 { - arena.increase_discarded(discard as u32); - } - - self.value.swap(trailer_offset as u32, value_size); - - Ok(()) - } - - #[inline] - fn clear_value(&self, success: Ordering, failure: Ordering) -> Result<(), (u32, u32)> { - self.value.compare_remove(success, failure).map(|_| ()) - } -} - -impl Node { - #[inline] - const fn key_size(&self) -> u32 { - decode_key_size_and_height(self.key_size_and_height).0 - } - - #[inline] - const fn height(&self) -> u8 { - decode_key_size_and_height(self.key_size_and_height).1 - } - - /// ## Safety - /// - /// - The caller must ensure that the node is allocated by the arena. - const unsafe fn get_key<'a, 'b: 'a>(&'a self, arena: &'b Arena) -> &'b [u8] { - arena.get_bytes(self.key_offset as usize, self.key_size() as usize) - } - - /// ## Safety - /// - /// - The caller must ensure that the node is allocated by the arena. - #[inline] - unsafe fn get_value<'a, 'b: 'a>(&'a self, arena: &'b Arena) -> Option<&'b [u8]> { - let (offset, len) = self.value.load(Ordering::Acquire); - - if len == u32::MAX { - return None; - } - let align_offset = Self::align_offset(offset); - Some(arena.get_bytes(align_offset as usize + mem::size_of::(), len as usize)) - } - - /// ## Safety - /// - /// - The caller must ensure that the node is allocated by the arena. - #[inline] - unsafe fn get_value_by_offset<'a, 'b: 'a>( - &'a self, - arena: &'b Arena, - offset: u32, - len: u32, - ) -> Option<&'b [u8]> { - if len == u32::MAX { - return None; - } - let align_offset = Self::align_offset(offset); - Some(arena.get_bytes(align_offset as usize + mem::size_of::(), len as usize)) - } - - #[inline] - const fn align_offset(current_offset: u32) -> u32 { - let alignment = mem::align_of::() as u32; - (current_offset + alignment - 1) & !(alignment - 1) - } -} - -impl Node { - #[inline] - unsafe fn get_trailer<'a, 'b: 'a>(&'a self, arena: &'b Arena) -> T { - let (offset, _) = self.value.load(Ordering::Acquire); - *arena.get_aligned_pointer(offset as usize) - } - - /// ## Safety - /// - /// - The caller must ensure that the node is allocated by the arena. - #[inline] - unsafe fn get_trailer_by_offset<'a, 'b: 'a>(&'a self, arena: &'b Arena, offset: u32) -> T { - *arena.get_aligned_pointer::(offset as usize) - } - - /// ## Safety - /// - /// - The caller must ensure that the node is allocated by the arena. - #[inline] - unsafe fn get_value_and_trailer<'a, 'b: 'a>(&'a self, arena: &'b Arena) -> (T, Option<&'b [u8]>) { - let (offset, len) = self.value.load(Ordering::Acquire); - let ptr = arena.get_aligned_pointer(offset as usize); - #[cfg(not(feature = "unaligned"))] - let trailer = *ptr; - - if len == u32::MAX { - return (trailer, None); - } - - let value_offset = arena.offset(ptr as _) + mem::size_of::(); - (trailer, Some(arena.get_bytes(value_offset, len as usize))) - } -} - -/// A fast, cocnurrent map implementation based on skiplist that supports forward -/// and backward iteration. Keys and values are immutable once added to the skipmap and -/// deletion is not supported. Instead, higher-level code is expected to add new -/// entries that shadow existing entries and perform deletion via tombstones. It -/// is up to the user to process these shadow entries and tombstones -/// appropriately during retrieval. -#[derive(Debug)] -pub struct SkipMap { - arena: Arena, - meta: NonNull, - head: NodePtr, - tail: NodePtr, - data_offset: u32, - opts: Options, - /// If set to true by tests, then extra delays are added to make it easier to - /// detect unusual race conditions. - #[cfg(all(test, feature = "std"))] - yield_now: bool, - - cmp: C, -} - -// Safety: SkipMap is Sync and Send -unsafe impl Send for SkipMap {} -unsafe impl Sync for SkipMap {} - -impl Clone for SkipMap { - fn clone(&self) -> Self { - Self { - arena: self.arena.clone(), - meta: self.meta, - head: self.head, - tail: self.tail, - data_offset: self.data_offset, - opts: self.opts, - #[cfg(all(test, feature = "std"))] - yield_now: self.yield_now, - cmp: self.cmp.clone(), - } - } -} - -impl Drop for SkipMap { - fn drop(&mut self) { - if self.arena.refs() == 1 && !self.opts.unify() { - unsafe { - let _ = Box::from_raw(self.meta.as_ptr()); - } - } - } -} - -impl SkipMap { - fn new_in(arena: Arena, cmp: C, opts: Options) -> Result { - let data_offset = Self::check_capacity(&arena, opts.max_height().into())?; - - if arena.read_only() { - let (meta, head, tail) = Self::get_pointers(&arena); - return Ok(Self::construct( - arena, - meta, - head, - tail, - data_offset, - opts, - cmp, - )); - } - - let meta = if opts.unify() { - Self::allocate_meta(&arena, opts.magic_version())? - } else { - unsafe { - NonNull::new_unchecked(Box::into_raw(Box::new(Meta { - max_version: AtomicU64::new(0), - min_version: AtomicU64::new(0), - height: AtomicU8::new(1), - len: AtomicU32::new(0), - magic_version: opts.magic_version(), - reserved_byte: 0, - }))) - } - }; - - let max_height: u8 = opts.max_height().into(); - let head = Self::allocate_full_node(&arena, max_height)?; - let tail = Self::allocate_full_node(&arena, max_height)?; - - // Safety: - // We will always allocate enough space for the head node and the tail node. - unsafe { - // Link all head/tail levels together. - for i in 0..(max_height as usize) { - let head_link = head.tower(&arena, i); - let tail_link = tail.tower(&arena, i); - head_link.next_offset.store(tail.offset, Ordering::Relaxed); - tail_link.prev_offset.store(head.offset, Ordering::Relaxed); - } - } - - Ok(Self::construct( - arena, - meta, - head, - tail, - data_offset, - opts, - cmp, - )) - } - - /// Checks if the arena has enough capacity to store the skiplist, - /// and returns the data offset. - #[inline] - const fn check_capacity(arena: &Arena, max_height: u8) -> Result { - let offset = arena.data_offset(); - - let alignment = mem::align_of::(); - let meta_offset = (offset + alignment - 1) & !(alignment - 1); - let meta_end = meta_offset + mem::size_of::(); - - let alignment = mem::align_of::>(); - let head_offset = (meta_end + alignment - 1) & !(alignment - 1); - let head_end = - head_offset + mem::size_of::>() + mem::size_of::() * max_height as usize; - - let trailer_alignment = mem::align_of::(); - let trailer_size = mem::size_of::(); - let trailer_end = if trailer_size != 0 { - let trailer_offset = (head_end + trailer_alignment - 1) & !(trailer_alignment - 1); - trailer_offset + trailer_size - } else { - head_end - }; - - let tail_offset = (trailer_end + alignment - 1) & !(alignment - 1); - let tail_end = - tail_offset + mem::size_of::>() + mem::size_of::() * max_height as usize; - - let trailer_end = if trailer_size != 0 { - let trailer_offset = (tail_end + trailer_alignment - 1) & !(trailer_alignment - 1); - trailer_offset + trailer_size - } else { - tail_end - }; - if trailer_end > arena.capacity() { - return Err(Error::ArenaTooSmall); - } - - Ok(trailer_end as u32) - } - - /// Allocates a `Node`, key, trailer and value - fn allocate_entry_node<'a, 'b: 'a, E>( - &'a self, - height: u32, - trailer: T, - key_size: u32, - kf: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, - value_size: u32, - vf: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result<(NodePtr, Deallocator), Either> { - self - .check_node_size(height, key_size, value_size) - .map_err(Either::Right)?; - - unsafe { - let mut node = self - .arena - .alloc_aligned_bytes::>(height * Link::SIZE as u32) - .map_err(|e| Either::Right(e.into()))?; - let node_ptr = node.as_mut_ptr().cast::>(); - let node_offset = node.offset(); - - let mut key = self - .arena - .alloc_bytes(key_size) - .map_err(|e| Either::Right(e.into()))?; - let key_offset = key.offset(); - let key_cap = key.capacity(); - let mut trailer_and_value = self - .arena - .alloc_aligned_bytes::(value_size) - .map_err(|e| Either::Right(e.into()))?; - let trailer_offset = trailer_and_value.offset(); - let trailer_ptr = trailer_and_value.as_mut_ptr().cast::(); - trailer_ptr.write(trailer); - - let value_offset = (trailer_offset + mem::size_of::()) as u32; - - // Safety: the node is well aligned - let node_ref = &mut *node_ptr; - node_ref.value = AtomicValuePointer::new(trailer_offset as u32, value_size); - node_ref.key_offset = key_offset as u32; - node_ref.key_size_and_height = encode_key_size_and_height(key_cap as u32, height as u8); - key.detach(); - let (_, key_deallocate_info) = self - .fill_vacant_key(key_cap as u32, key_offset as u32, kf) - .map_err(Either::Left)?; - trailer_and_value.detach(); - let (_, value_deallocate_info) = self - .fill_vacant_value( - trailer_offset as u32, - trailer_and_value.capacity() as u32, - value_size, - value_offset, - vf, - ) - .map_err(Either::Left)?; - node.detach(); - Ok(( - NodePtr::new(node_ptr as _, node_offset as u32), - Deallocator { - node: Some(Pointer::new(node_offset as u32, node.capacity() as u32)), - key: Some(key_deallocate_info), - value: Some(value_deallocate_info), - }, - )) - } - } - - /// Allocates a `Node` and trailer - fn allocate_node<'a, 'b: 'a, E>( - &'a self, - height: u32, - trailer: T, - key_offset: u32, - key_size: u32, - value_size: u32, - ) -> Result<(NodePtr, Deallocator), Either> { - self - .check_node_size(height, key_size, value_size) - .map_err(Either::Right)?; - - unsafe { - let mut node = self - .arena - .alloc_aligned_bytes::>(height * Link::SIZE as u32) - .map_err(|e| Either::Right(e.into()))?; - let node_ptr = node.as_mut_ptr().cast::>(); - let node_offset = node.offset(); - - let mut trailer_ref = self - .arena - .alloc::() - .map_err(|e| Either::Right(e.into()))?; - let trailer_offset = trailer_ref.offset(); - trailer_ref.write(trailer); - - // Safety: the node is well aligned - let node_ref = &mut *node_ptr; - node_ref.value = AtomicValuePointer::new(trailer_offset as u32, value_size); - node_ref.key_offset = key_offset; - node_ref.key_size_and_height = encode_key_size_and_height(key_size, height as u8); - - trailer_ref.detach(); - node.detach(); - Ok(( - NodePtr::new(node_ptr as _, node_offset as u32), - Deallocator { - node: Some(Pointer::new(node_offset as u32, node.capacity() as u32)), - key: None, - value: Some(Pointer::new( - trailer_offset as u32, - mem::size_of::() as u32, - )), - }, - )) - } - } - - /// Allocates a `Node`, key and trailer - fn allocate_key_node<'a, 'b: 'a, E>( - &'a self, - height: u32, - trailer: T, - key_size: u32, - kf: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, - value_size: u32, - ) -> Result<(NodePtr, Deallocator), Either> { - self - .check_node_size(height, key_size, value_size) - .map_err(Either::Right)?; - - unsafe { - let mut node = self - .arena - .alloc_aligned_bytes::>(height * Link::SIZE as u32) - .map_err(|e| Either::Right(e.into()))?; - let node_ptr = node.as_mut_ptr().cast::>(); - let node_offset = node.offset(); - - let mut key = self - .arena - .alloc_bytes(key_size) - .map_err(|e| Either::Right(e.into()))?; - let key_offset = key.offset(); - let key_cap = key.capacity(); - - let mut trailer_ref = self - .arena - .alloc::() - .map_err(|e| Either::Right(e.into()))?; - let trailer_offset = trailer_ref.offset(); - trailer_ref.write(trailer); - - // Safety: the node is well aligned - let node_ref = &mut *node_ptr; - node_ref.value = AtomicValuePointer::new(trailer_offset as u32, value_size); - node_ref.key_offset = key_offset as u32; - node_ref.key_size_and_height = encode_key_size_and_height(key_cap as u32, height as u8); - - key.detach(); - let (_, key_deallocate_info) = self - .fill_vacant_key(key_cap as u32, key_offset as u32, kf) - .map_err(Either::Left)?; - - trailer_ref.detach(); - node.detach(); - - Ok(( - NodePtr::new(node_ptr as _, node_offset as u32), - Deallocator { - node: Some(Pointer::new(node_offset as u32, node.capacity() as u32)), - key: Some(key_deallocate_info), - value: Some(Pointer::new( - trailer_offset as u32, - mem::size_of::() as u32, - )), - }, - )) - } - } - - /// Allocates a `Node`, trailer and value - fn allocate_value_node<'a, 'b: 'a, E>( - &'a self, - height: u32, - trailer: T, - key_size: u32, - key_offset: u32, - value_size: u32, - vf: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result<(NodePtr, Deallocator), Either> { - self - .check_node_size(height, key_size, value_size) - .map_err(Either::Right)?; - - unsafe { - let mut node = self - .arena - .alloc_aligned_bytes::>(height * Link::SIZE as u32) - .map_err(|e| Either::Right(e.into()))?; - let node_ptr = node.as_mut_ptr().cast::>(); - let node_offset = node.offset(); - - let mut trailer_and_value = self - .arena - .alloc_aligned_bytes::(value_size) - .map_err(|e| Either::Right(e.into()))?; - let trailer_offset = trailer_and_value.offset(); - let trailer_ptr = trailer_and_value.as_mut_ptr().cast::(); - trailer_ptr.write(trailer); - let value_offset = (trailer_offset + mem::size_of::()) as u32; - - // Safety: the node is well aligned - let node_ref = &mut *node_ptr; - node_ref.value = AtomicValuePointer::new(trailer_offset as u32, value_size); - node_ref.key_offset = key_offset; - node_ref.key_size_and_height = encode_key_size_and_height(key_size, height as u8); - - trailer_and_value.detach(); - let (_, value_deallocate_info) = self - .fill_vacant_value( - trailer_offset as u32, - trailer_and_value.capacity() as u32, - value_size, - value_offset, - vf, - ) - .map_err(Either::Left)?; - - node.detach(); - - Ok(( - NodePtr::new(node_ptr as _, node_offset as u32), - Deallocator { - node: Some(Pointer::new(node_offset as u32, node.capacity() as u32)), - key: None, - value: Some(value_deallocate_info), - }, - )) - } - } - - fn allocate_full_node(arena: &Arena, max_height: u8) -> Result, ArenaError> { - // Safety: node, links and trailer do not need to be dropped, and they are recoverable. - unsafe { - let mut node = - arena.alloc_aligned_bytes::>(((max_height as usize) * Link::SIZE) as u32)?; - - // Safety: node and trailer do not need to be dropped. - node.detach(); - - let node_ptr = node.as_mut_ptr().cast::>(); - let node_offset = node.offset(); - - let trailer_offset = if mem::size_of::() != 0 { - let mut trailer = arena.alloc::()?; - trailer.detach(); - trailer.offset() - } else { - arena.allocated() - }; - - let node = &mut *node_ptr; - *node = Node::::full(trailer_offset as u32, max_height); - - Ok(NodePtr::new(node_ptr as _, node_offset as u32)) - } - } - - #[inline] - fn allocate_meta(arena: &Arena, magic_version: u16) -> Result, ArenaError> { - // Safety: meta does not need to be dropped, and it is recoverable. - unsafe { - let mut meta = arena.alloc::()?; - meta.detach(); - - meta.write(Meta { - max_version: AtomicU64::new(0), - min_version: AtomicU64::new(0), - height: AtomicU8::new(1), - len: AtomicU32::new(0), - magic_version, - reserved_byte: 0, - }); - Ok(meta.as_mut_ptr()) - } - } - - #[inline] - unsafe fn fill_vacant_key<'a, E>( - &'a self, - size: u32, - offset: u32, - f: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result<(u32, Pointer), E> { - let buf = self.arena.get_bytes_mut(offset as usize, size as usize); - let mut oval = VacantBuffer::new(size as usize, offset, buf); - if let Err(e) = f(&mut oval) { - self.arena.dealloc(offset, size); - return Err(e); - } - - let len = oval.len(); - let remaining = oval.remaining(); - if remaining != 0 { - #[cfg(feature = "tracing")] - tracing::warn!("vacant value is not fully filled, remaining {remaining} bytes"); - let deallocated = self.arena.dealloc(offset + len as u32, remaining as u32); - if deallocated { - return Ok(( - oval.len() as u32, - Pointer::new(offset, size - remaining as u32), - )); - } - } - Ok((oval.len() as u32, Pointer::new(offset, size))) - } - - #[inline] - unsafe fn fill_vacant_value<'a, E>( - &'a self, - offset: u32, - size: u32, - value_size: u32, - value_offset: u32, - f: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result<(u32, Pointer), E> { - let buf = self - .arena - .get_bytes_mut(value_offset as usize, value_size as usize); - let mut oval = VacantBuffer::new(value_size as usize, value_offset, buf); - if let Err(e) = f(&mut oval) { - self.arena.dealloc(offset, size); - return Err(e); - } - - let len = oval.len(); - let remaining = oval.remaining(); - if remaining != 0 { - #[cfg(feature = "tracing")] - tracing::warn!("vacant value is not fully filled, remaining {remaining} bytes"); - let deallocated = self - .arena - .dealloc(value_offset + len as u32, remaining as u32); - - if deallocated { - return Ok(( - oval.len() as u32, - Pointer::new(offset, size - remaining as u32), - )); - } - } - - Ok((oval.len() as u32, Pointer::new(offset, size))) - } - - #[inline] - fn get_pointers(arena: &Arena) -> (NonNull, NodePtr, NodePtr) { - unsafe { - let offset = arena.data_offset(); - let meta = arena.get_aligned_pointer::(offset); - - let offset = arena.offset(meta as _) + mem::size_of::(); - let head_ptr = arena.get_aligned_pointer::>(offset); - let head_offset = arena.offset(head_ptr as _); - let head = NodePtr::new(head_ptr as _, head_offset as u32); - - let (trailer_offset, _) = head.as_ref().value.load(Ordering::Relaxed); - let offset = trailer_offset as usize + mem::size_of::(); - let tail_ptr = arena.get_aligned_pointer::>(offset); - let tail_offset = arena.offset(tail_ptr as _); - let tail = NodePtr::new(tail_ptr as _, tail_offset as u32); - (NonNull::new_unchecked(meta as _), head, tail) - } - } - - #[inline] - fn check_node_size(&self, height: u32, key_size: u32, mut value_size: u32) -> Result<(), Error> { - let max_height: u32 = self.opts.max_height().into(); - if height < 1 || height > max_height { - panic!("height cannot be less than one or greater than the max height"); - } - - let max_key_size: u32 = self.opts.max_key_size().into(); - if key_size > max_key_size { - return Err(Error::KeyTooLarge(key_size as u64)); - } - - // if value_size is u32::MAX, it means that the value is removed. - value_size = if value_size == u32::MAX { - 0 - } else { - value_size - }; - - if value_size > self.opts.max_value_size() { - return Err(Error::ValueTooLarge(value_size as u64)); - } - - let entry_size = (value_size as u64 + key_size as u64) + Node::::size(height as u8) as u64; - if entry_size > u32::MAX as u64 { - return Err(Error::EntryTooLarge(entry_size)); - } - - Ok(()) - } - - #[inline] - fn construct( - arena: Arena, - meta: NonNull, - head: NodePtr, - tail: NodePtr, - data_offset: u32, - opts: Options, - cmp: C, - ) -> Self { - Self { - arena, - meta, - head, - tail, - data_offset, - opts, - #[cfg(all(test, feature = "std"))] - yield_now: false, - cmp, - } - } - - #[inline] - const fn meta(&self) -> &Meta { - // Safety: the pointer is well aligned and initialized. - unsafe { self.meta.as_ref() } - } -} - -impl SkipMap { - fn new_node<'a, 'b: 'a, E>( - &'a self, - key: &Key<'a, 'b>, - trailer: T, - value_size: u32, - f: &impl Fn(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result<(NodePtr, u32, Deallocator), Either> { - let height = super::random_height(self.opts.max_height().into()); - let (nd, deallocator) = match key { - Key::Occupied(key) => self.allocate_entry_node( - height, - trailer, - key.len() as u32, - |buf| { - buf.write(key).unwrap(); - Ok(()) - }, - value_size, - f, - )?, - Key::Vacant(key) => { - self.allocate_value_node(height, trailer, key.len() as u32, key.offset, value_size, f)? - } - Key::Pointer { offset, len, .. } => { - self.allocate_value_node(height, trailer, *len, *offset, value_size, f)? - } - Key::Remove(key) => self.allocate_key_node( - height, - trailer, - key.len() as u32, - |buf| { - buf.write(key).expect("buffer must be large enough for key"); - Ok(()) - }, - REMOVE, - )?, - Key::RemoveVacant(key) => { - self.allocate_node(height, trailer, key.offset, key.len() as u32, REMOVE)? - } - Key::RemovePointer { offset, len, .. } => { - self.allocate_node(height, trailer, *offset, *len, REMOVE)? - } - }; - - // Try to increase self.height via CAS. - let mut list_height = self.height(); - while height as u8 > list_height { - match self.meta().height.compare_exchange_weak( - list_height, - height as u8, - Ordering::SeqCst, - Ordering::Acquire, - ) { - // Successfully increased skiplist.height. - Ok(_) => break, - Err(h) => list_height = h, - } - } - Ok((nd, height, deallocator)) - } -} - -impl SkipMap { - /// ## Safety - /// - /// - The caller must ensure that the node is allocated by the arena. - #[inline] - unsafe fn get_prev(&self, nd: NodePtr, height: usize) -> NodePtr { - if nd.is_null() { - return NodePtr::NULL; - } - - let offset = nd.prev_offset(&self.arena, height); - let ptr = self.arena.get_pointer(offset as usize); - NodePtr::new(ptr as _, offset) - } - - /// ## Safety - /// - /// - The caller must ensure that the node is allocated by the arena. - #[inline] - unsafe fn get_next(&self, nptr: NodePtr, height: usize) -> NodePtr { - if nptr.is_null() { - return NodePtr::NULL; - } - let offset = nptr.next_offset(&self.arena, height); - let ptr = self.arena.get_pointer(offset as usize); - NodePtr::new(ptr as _, offset) - } - - /// Returns the first entry in the map. - fn first_in(&self, version: u64) -> Option> { - // Safety: head node was definitely allocated by self.arena - let nd = unsafe { self.get_next(self.head, 0) }; - - if nd.is_null() || nd.ptr == self.tail.ptr { - return None; - } - - unsafe { - let node = nd.as_ref(); - let curr_key = node.get_key(&self.arena); - self.ge(version, curr_key) - } - } - - /// Returns the last entry in the map. - fn last_in(&self, version: u64) -> Option> { - // Safety: tail node was definitely allocated by self.arena - let nd = unsafe { self.get_prev(self.tail, 0) }; - - if nd.is_null() || nd.ptr == self.head.ptr { - return None; - } - - unsafe { - let node = nd.as_ref(); - let curr_key = node.get_key(&self.arena); - self.le(version, curr_key) - } - } - - /// Returns the entry greater or equal to the given key, if it exists. - /// - /// e.g. - /// - /// - If k1 < k2 < k3, key is equal to k1, then the entry contains k2 will be returned. - /// - If k1 < k2 < k3, and k1 < key < k2, then the entry contains k2 will be returned. - fn gt<'a, 'b: 'a>(&'a self, version: u64, key: &'b [u8]) -> Option> { - unsafe { - let (n, _) = self.find_near(u64::MIN, key, false, false); // find the key with the max version. - - let n = n?; - - if n.is_null() || n.ptr == self.tail.ptr { - return None; - } - - self.find_next_max_version(n, version) - } - } - - /// Returns the entry less than the given key, if it exists. - /// - /// e.g. - /// - /// - If k1 < k2 < k3, and key is equal to k3, then the entry contains k2 will be returned. - /// - If k1 < k2 < k3, and k2 < key < k3, then the entry contains k2 will be returned. - fn lt<'a, 'b: 'a>(&'a self, version: u64, key: &'b [u8]) -> Option> { - unsafe { - let (n, _) = self.find_near(u64::MAX, key, true, false); // find less or equal. - - let n = n?; - if n.is_null() || n.ptr == self.head.ptr { - return None; - } - - self.find_prev_max_version(n, version) - } - } - - /// Returns the entry greater than or equal to the given key, if it exists. - /// - /// e.g. - /// - /// - If k1 < k2 < k3, key is equal to k1, then the entry contains k1 will be returned. - /// - If k1 < k2 < k3, and k1 < key < k2, then the entry contains k2 will be returned. - fn ge<'a, 'b: 'a>(&'a self, version: u64, key: &'b [u8]) -> Option> { - unsafe { - // TODO: optimize find_near implementation, so that we can directly use version instead of u64::MIN - let (n, _) = self.find_near(u64::MAX, key, false, true); // find the key with the max version. - - let n = n?; - - if n.is_null() || n.ptr == self.tail.ptr { - return None; - } - - self.find_next_max_version(n, version) - } - } - - /// Returns the entry less than or equal to the given key, if it exists. - /// - /// e.g. - /// - /// - If k1 < k2 < k3, and key is equal to k3, then the entry contains k3 will be returned. - /// - If k1 < k2 < k3, and k2 < key < k3, then the entry contains k2 will be returned. - fn le<'a, 'b: 'a>(&'a self, version: u64, key: &'b [u8]) -> Option> { - unsafe { - let (n, _) = self.find_near(u64::MIN, key, true, true); // find less or equal. - - let n = n?; - if n.is_null() || n.ptr == self.head.ptr { - return None; - } - - self.find_prev_max_version(n, version) - } - } - - unsafe fn find_prev_max_version(&self, mut curr: NodePtr, version: u64) -> Option> { - let mut prev = self.get_prev(curr, 0); - - loop { - let curr_node = curr.as_ref(); - let curr_key = curr_node.get_key(&self.arena); - // if the current version is greater than the given version, we should return. - let version_cmp = curr_node.get_trailer(&self.arena).version().cmp(&version); - if version_cmp == cmp::Ordering::Greater { - return None; - } - - if prev.is_null() || prev.ptr == self.head.ptr { - if let cmp::Ordering::Less | cmp::Ordering::Equal = version_cmp { - return Some(curr); - } - - return None; - } - - let prev_node = prev.as_ref(); - let prev_key = prev_node.get_key(&self.arena); - if self.cmp.compare(prev_key, curr_key) == cmp::Ordering::Less { - return Some(curr); - } - - let version_cmp = prev_node.get_trailer(&self.arena).version().cmp(&version); - - if version_cmp == cmp::Ordering::Equal { - return Some(prev); - } - - if version_cmp == cmp::Ordering::Greater { - return Some(curr); - } - - curr = prev; - prev = self.get_prev(curr, 0); - } - } - - unsafe fn find_next_max_version(&self, mut curr: NodePtr, version: u64) -> Option> { - let mut next = self.get_next(curr, 0); - - loop { - let curr_node = curr.as_ref(); - let curr_key = curr_node.get_key(&self.arena); - // if the current version is less or equal to the given version, we should return. - let version_cmp = curr_node.get_trailer(&self.arena).version().cmp(&version); - if let cmp::Ordering::Less | cmp::Ordering::Equal = version_cmp { - return Some(curr); - } - - if next.is_null() || next.ptr == self.head.ptr { - if let cmp::Ordering::Less | cmp::Ordering::Equal = version_cmp { - return Some(curr); - } - - return None; - } - - let next_node = next.as_ref(); - let next_key = next_node.get_key(&self.arena); - let version_cmp = next_node.get_trailer(&self.arena).version().cmp(&version); - if self.cmp.compare(next_key, curr_key) == cmp::Ordering::Greater { - if let cmp::Ordering::Less | cmp::Ordering::Equal = version_cmp { - return Some(curr); - } - - return None; - } - - if let cmp::Ordering::Less | cmp::Ordering::Equal = version_cmp { - if next.ptr == self.tail.ptr { - return None; - } - - return Some(next); - } - - curr = next; - next = self.get_next(curr, 0); - } - } - - /// finds the node near to key. - /// If less=true, it finds rightmost node such that node.key < key (if allow_equal=false) or - /// node.key <= key (if allow_equal=true). - /// If less=false, it finds leftmost node such that node.key > key (if allow_equal=false) or - /// node.key >= key (if allow_equal=true). - /// Returns the node found. The bool returned is true if the node has key equal to given key. - unsafe fn find_near( - &self, - version: u64, - key: &[u8], - less: bool, - allow_equal: bool, - ) -> (Option>, bool) { - let mut x = self.head; - let mut level = self.height() as usize - 1; - - loop { - // Assume x.key < key. - let next = self.get_next(x, level); - if next.is_null() || next.ptr == self.tail.ptr { - // x.key < key < END OF LIST - if level > 0 { - // Can descend further to iterate closer to the end. - level -= 1; - continue; - } - - // level == 0. Can't descend further. Let's return something that makes sense. - if !less { - return (None, false); - } - - // Try to return x. Make sure it is not a head node. - if x.ptr == self.head.ptr { - return (None, false); - } - - return (Some(x), false); - } - - let next_node = next.as_ref(); - let next_key = next_node.get_key(&self.arena); - let cmp = self - .cmp - .compare(key, next_key) - .then_with(|| next_node.get_trailer(&self.arena).version().cmp(&version)); - - match cmp { - cmp::Ordering::Greater => { - // x.key < next.key < key. We can continue to move right. - x = next; - continue; - } - cmp::Ordering::Equal => { - // x.key < key == next.key. - if allow_equal { - return (Some(next), true); - } - - if !less { - // We want >, so go to base level to grab the next bigger node. - return (Some(self.get_next(next, 0)), false); - } - - // We want <. If not base level, we should go closer in the next level. - if level > 0 { - level -= 1; - continue; - } - - // On base level, Return x. - return (Some(x), false); - } - // In other words, x.key < key < next. - cmp::Ordering::Less => { - if level > 0 { - level -= 1; - continue; - } - - // On base level. Need to return something. - if !less { - return (Some(next), false); - } - - // Try to return x. Make sure it is not a head node. - if x.ptr == self.head.ptr { - return (None, false); - } - - return (Some(x), false); - } - } - } - } - - /// ## Safety: - /// - All of splices in the inserter must be contains node ptrs are allocated by the current skip map. - unsafe fn find_splice<'a, 'b: 'a>( - &'a self, - version: u64, - key: &'b [u8], - ins: &mut Inserter, - returned_when_found: bool, - ) -> (bool, Option, Option>) { - let list_height = self.height() as u32; - let mut level = 0; - - let mut prev = self.head; - if ins.height < list_height { - // Our cached height is less than the list height, which means there were - // inserts that increased the height of the list. Recompute the splice from - // scratch. - ins.height = list_height; - level = ins.height as usize; - } else { - // Our cached height is equal to the list height. - while level < list_height as usize { - let spl = &ins.spl[level]; - if self.get_next(spl.prev, level).ptr != spl.next.ptr { - level += 1; - // One or more nodes have been inserted between the splice at this - // level. - continue; - } - - if spl.prev.ptr != self.head.ptr && !self.key_is_after_node(spl.prev, version, key) { - // Key lies before splice. - level = list_height as usize; - break; - } - - if spl.next.ptr != self.tail.ptr && !self.key_is_after_node(spl.next, version, key) { - // Key lies after splice. - level = list_height as usize; - break; - } - - // The splice brackets the key! - prev = spl.prev; - break; - } - } - - let mut found = false; - let mut found_key = None; - for lvl in (0..level).rev() { - let mut fr = self.find_splice_for_level(version, key, lvl, prev); - if fr.splice.next.is_null() { - fr.splice.next = self.tail; - } - found = fr.found; - if let Some(key) = fr.found_key { - found_key.get_or_insert(key); - } - if found && returned_when_found { - return (found, found_key, fr.curr); - } - ins.spl[lvl] = fr.splice; - } - - (found, found_key, None) - } - - /// ## Safety - /// - `level` is less than `MAX_HEIGHT`. - /// - `start` must be allocated by self's arena. - unsafe fn find_splice_for_level( - &self, - version: u64, - key: &[u8], - level: usize, - start: NodePtr, - ) -> FindResult { - let mut prev = start; - - loop { - // Assume prev.key < key. - let next = self.get_next(prev, level); - if next.ptr == self.tail.ptr { - // Tail node, so done. - return FindResult { - splice: Splice { prev, next }, - found: false, - found_key: None, - curr: None, - }; - } - - // offset is not zero, so we can safely dereference the next node ptr. - let next_node = next.as_ref(); - let next_key = next_node.get_key(&self.arena); - - let cmp = self.cmp.compare(key, next_key); - - let mut found_key = None; - - match cmp { - cmp::Ordering::Equal => { - found_key = Some(Pointer { - offset: next_node.key_offset, - size: next_node.key_size(), - height: Some(next_node.height()), - }); - } - cmp::Ordering::Greater => { - if next_key.starts_with(key) { - found_key = Some(Pointer { - offset: next_node.key_offset, - size: key.len() as u32, - height: Some(next_node.height()), - }); - } - } - _ => {} - } - - match cmp.then_with(|| next_node.get_trailer(&self.arena).version().cmp(&version)) { - // We are done for this level, since prev.key < key < next.key. - cmp::Ordering::Less => { - return FindResult { - splice: Splice { prev, next }, - found: false, - found_key, - curr: None, - }; - } - // Keep moving right on this level. - cmp::Ordering::Greater => prev = next, - cmp::Ordering::Equal => { - return FindResult { - splice: Splice { prev, next }, - found: true, - found_key, - curr: Some(next), - }; - } - } - } - } - - /// ## Safety - /// - The caller must ensure that the node is allocated by the arena. - /// - The caller must ensure that the node is not null. - unsafe fn key_is_after_node(&self, nd: NodePtr, version: u64, key: &[u8]) -> bool { - let nd = &*nd.ptr; - let nd_key = self - .arena - .get_bytes(nd.key_offset as usize, nd.key_size() as usize); - - match self - .cmp - .compare(nd_key, key) - // .then_with(|| version.cmp(&nd.version)) - { - cmp::Ordering::Less => true, - cmp::Ordering::Greater => false, - cmp::Ordering::Equal => { - matches!(version.cmp(&nd.get_trailer(&self.arena).version()), cmp::Ordering::Less) - } - } - } - - fn fetch_vacant_key<'a, 'b: 'a, E>( - &'a self, - key_size: u32, - key: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result, Either> { - let (key_offset, key_size) = self - .arena - .alloc_bytes(key_size) - .map(|mut b| { - b.detach(); - (b.offset(), b.capacity()) - }) - .map_err(|e| Either::Right(e.into()))?; - - let mut vk = unsafe { - VacantBuffer::new( - key_size, - key_offset as u32, - self.arena.get_bytes_mut(key_offset, key_size), - ) - }; - key(&mut vk) - .map_err(|e| { - unsafe { - self.arena.dealloc(key_offset as u32, key_size as u32); - } - Either::Left(e) - }) - .map(|_| vk) - } - - #[allow(clippy::too_many_arguments)] - fn update<'a, 'b: 'a, E>( - &'a self, - trailer: T, - key: Key<'a, 'b>, - value_size: u32, - f: impl Fn(&mut VacantBuffer<'a>) -> Result<(), E>, - success: Ordering, - failure: Ordering, - ins: &mut Inserter, - upsert: bool, - ) -> Result, Either> { - let version = trailer.version(); - - // Safety: a fresh new Inserter, so safe here - let found_key = unsafe { - let (found, found_key, ptr) = self.find_splice(version, key.as_ref(), ins, true); - if found { - let node_ptr = ptr.expect("the NodePtr cannot be `None` when we found"); - let old = VersionedEntryRef::from_node(node_ptr, &self.arena); - - key.on_fail(&self.arena); - - if upsert { - return self.upsert( - old, node_ptr, &key, trailer, value_size, &f, success, failure, - ); - } - - return Ok(Either::Left(if old.is_removed() { - None - } else { - Some(old) - })); - } - - found_key - }; - - #[cfg(all(test, feature = "std"))] - if self.yield_now { - // Add delay to make it easier to test race between this thread - // and another thread that sees the intermediate state between - // finding the splice and using it. - std::thread::yield_now(); - } - - let mut k = match found_key { - None => key, - Some(k) => { - if key.is_remove() { - Key::RemovePointer { - arena: &self.arena, - offset: k.offset, - len: k.size, - } - } else { - Key::Pointer { - arena: &self.arena, - offset: k.offset, - len: k.size, - } - } - } - }; - - let (nd, height, mut deallocator) = - self.new_node(&k, trailer, value_size, &f).map_err(|e| { - k.on_fail(&self.arena); - e - })?; - - // We always insert from the base level and up. After you add a node in base - // level, we cannot create a node in the level above because it would have - // discovered the node in the base level. - let mut invalid_data_splice = false; - - for i in 0..(height as usize) { - let mut prev = ins.spl[i].prev; - let mut next = ins.spl[i].next; - - if prev.is_null() { - // New node increased the height of the skiplist, so assume that the - // new level has not yet been populated. - if !next.is_null() { - panic!("next is expected to be nil, since prev is nil"); - } - - prev = self.head; - next = self.tail; - } - - // +----------------+ +------------+ +----------------+ - // | prev | | nd | | next | - // | prevNextOffset |---->| | | | - // | |<----| prevOffset | | | - // | | | nextOffset |---->| | - // | | | |<----| nextPrevOffset | - // +----------------+ +------------+ +----------------+ - // - // 1. Initialize prevOffset and nextOffset to point to prev and next. - // 2. CAS prevNextOffset to repoint from next to nd. - // 3. CAS nextPrevOffset to repoint from prev to nd. - unsafe { - loop { - let prev_offset = prev.offset; - let next_offset = next.offset; - nd.write_tower(&self.arena, i, prev_offset, next_offset); - - // Check whether next has an updated link to prev. If it does not, - // that can mean one of two things: - // 1. The thread that added the next node hasn't yet had a chance - // to add the prev link (but will shortly). - // 2. Another thread has added a new node between prev and next. - // - // Safety: we already check next is not null - let next_prev_offset = next.prev_offset(&self.arena, i); - if next_prev_offset != prev_offset { - // Determine whether #1 or #2 is true by checking whether prev - // is still pointing to next. As long as the atomic operations - // have at least acquire/release semantics (no need for - // sequential consistency), this works, as it is equivalent to - // the "publication safety" pattern. - let prev_next_offset = prev.next_offset(&self.arena, i); - if prev_next_offset == next_offset { - // Ok, case #1 is true, so help the other thread along by - // updating the next node's prev link. - let _ = next.cas_prev_offset( - &self.arena, - i, - next_prev_offset, - prev_offset, - Ordering::SeqCst, - Ordering::Acquire, - ); - } - } - - match prev.cas_next_offset( - &self.arena, - i, - next.offset, - nd.offset, - Ordering::SeqCst, - Ordering::Acquire, - ) { - Ok(_) => { - // Managed to insert nd between prev and next, so update the next - // node's prev link and go to the next level. - #[cfg(all(test, feature = "std"))] - if self.yield_now { - // Add delay to make it easier to test race between this thread - // and another thread that sees the intermediate state between - // setting next and setting prev. - std::thread::yield_now(); - } - - let _ = next.cas_prev_offset( - &self.arena, - i, - prev_offset, - nd.offset, - Ordering::SeqCst, - Ordering::Acquire, - ); - - break; - } - Err(_) => { - // CAS failed. We need to recompute prev and next. It is unlikely to - // be helpful to try to use a different level as we redo the search, - // because it is unlikely that lots of nodes are inserted between prev - // and next. - let fr = self.find_splice_for_level(trailer.version(), k.as_ref(), i, prev); - if fr.found { - if i != 0 { - panic!("how can another thread have inserted a node at a non-base level?"); - } - - let node_ptr = fr - .curr - .expect("the current should not be `None` when we found"); - let old = VersionedEntryRef::from_node(node_ptr, &self.arena); - - k.on_fail(&self.arena); - - if upsert { - deallocator.dealloc(&self.arena); - return self.upsert(old, node_ptr, &k, trailer, value_size, &f, success, failure); - } - - deallocator.dealloc(&self.arena); - return Ok(Either::Left(if old.is_removed() { - None - } else { - Some(old) - })); - } - - if let Some(p) = fr.found_key { - k.on_fail(&self.arena); - let node = nd.as_mut(); - node.key_offset = p.offset; - node.key_size_and_height = encode_key_size_and_height(p.size, p.height.unwrap()); - deallocator.key = None; - k = Key::Pointer { - arena: &self.arena, - offset: p.offset, - len: p.size, - }; - } - - invalid_data_splice = true; - prev = fr.splice.prev; - next = fr.splice.next; - } - } - } - } - } - - // If we had to recompute the splice for a level, invalidate the entire - // cached splice. - if invalid_data_splice { - ins.height = 0; - } else { - // The splice was valid. We inserted a node between spl[i].prev and - // spl[i].next. Optimistically update spl[i].prev for use in a subsequent - // call to add. - for i in 0..(height as usize) { - ins.spl[i].prev = nd; - } - } - self.meta().increase_len(); - self.meta().update_max_version(version); - self.meta().update_min_version(version); - - Ok(Either::Left(None)) - } - - #[allow(clippy::too_many_arguments)] - unsafe fn upsert<'a, 'b: 'a, E>( - &'a self, - old: VersionedEntryRef<'a, T>, - node_ptr: NodePtr, - key: &Key<'a, 'b>, - trailer: T, - value_size: u32, - f: &impl Fn(&mut VacantBuffer<'a>) -> Result<(), E>, - success: Ordering, - failure: Ordering, - ) -> Result, Either> { - match key { - Key::Occupied(_) | Key::Vacant(_) | Key::Pointer { .. } => node_ptr - .as_ref() - .set_value(&self.arena, trailer, value_size, f) - .map(|_| Either::Left(if old.is_removed() { None } else { Some(old) })), - Key::Remove(_) | Key::RemoveVacant(_) | Key::RemovePointer { .. } => { - let node = node_ptr.as_ref(); - let key = node.get_key(&self.arena); - match node.clear_value(success, failure) { - Ok(_) => Ok(Either::Left(None)), - Err((offset, len)) => { - let trailer = node.get_trailer_by_offset(&self.arena, offset); - let value = node.get_value_by_offset(&self.arena, offset, len); - Ok(Either::Right(Err(VersionedEntryRef { - arena: &self.arena, - key, - trailer, - value, - ptr: node_ptr, - }))) - } - } - } - } - } -} - -/// A helper struct for caching splice information -pub struct Inserter<'a, T> { - spl: [Splice; super::MAX_HEIGHT], - height: u32, - _m: core::marker::PhantomData<&'a ()>, -} - -impl<'a, T: Copy> Default for Inserter<'a, T> { - #[inline] - fn default() -> Self { - Self { - spl: [Splice::default(); super::MAX_HEIGHT], - height: 0, - _m: core::marker::PhantomData, - } - } -} - -#[derive(Debug, Clone, Copy)] -struct Splice { - prev: NodePtr, - next: NodePtr, -} - -impl Default for Splice { - #[inline] - fn default() -> Self { - Self { - prev: NodePtr::NULL, - next: NodePtr::NULL, - } - } -} - -struct Deallocator { - node: Option, - key: Option, - value: Option, -} - -impl Deallocator { - #[inline] - fn dealloc(self, arena: &Arena) { - unsafe { - if let Some(ptr) = self.node { - arena.dealloc(ptr.offset, ptr.size); - } - - if let Some(ptr) = self.key { - arena.dealloc(ptr.offset, ptr.size); - } - - if let Some(ptr) = self.value { - arena.dealloc(ptr.offset, ptr.size); - } - } - } -} - -struct Pointer { - offset: u32, - size: u32, - height: Option, -} - -impl Pointer { - #[inline] - const fn new(offset: u32, size: u32) -> Self { - Self { - offset, - size, - height: None, - } - } -} - -struct FindResult { - // both key and version are equal. - found: bool, - // only key is equal. - found_key: Option, - splice: Splice, - curr: Option>, -} - -#[inline] -const fn encode_value_pointer(offset: u32, val_size: u32) -> u64 { - (val_size as u64) << 32 | offset as u64 -} - -#[inline] -const fn decode_value_pointer(value: u64) -> (u32, u32) { - let offset = value as u32; - let val_size = (value >> 32) as u32; - (offset, val_size) -} - -#[inline] -const fn encode_key_size_and_height(key_size: u32, height: u8) -> u32 { - // first 27 bits for key_size, last 5 bits for height. - key_size << 5 | height as u32 -} - -#[inline] -const fn decode_key_size_and_height(size: u32) -> (u32, u8) { - let key_size = size >> 5; - let height = (size & 0b11111) as u8; - (key_size, height) -} - -#[cold] -#[inline(never)] -fn noop(_: &mut VacantBuffer<'_>) -> Result<(), E> { - Ok(()) -} diff --git a/src/map/api.rs b/src/map/api.rs deleted file mode 100644 index 4c23d6d..0000000 --- a/src/map/api.rs +++ /dev/null @@ -1,1127 +0,0 @@ -use rarena_allocator::ArenaOptions; -use ux2::u27; - -use super::*; - -impl SkipMap { - /// Create a new skipmap with default options. - /// - /// **Note:** The capacity stands for how many memory allocated, - /// it does not mean the skiplist can store `cap` entries. - /// - /// - /// - /// **What the difference between this method and [`SkipMap::mmap_anon`]?** - /// - /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. - /// Even if we are working with raw pointers with `Box::into_raw`, - /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` - /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, - /// especially if you're frequently accessing or modifying it. - /// - /// 2. Where as [`SkipMap::mmap_anon`] will use mmap anonymous to require memory from the OS. - /// If you require very large contiguous memory regions, `mmap` might be more suitable because - /// it's more direct in requesting large chunks of memory from the OS. - /// - /// [`SkipMap::mmap_anon`]: #method.mmap_anon - pub fn new() -> Result { - Self::with_options(Options::new()) - } - - /// Like [`SkipMap::new`], but with [`Options`]. - #[inline] - pub fn with_options(opts: Options) -> Result { - Self::with_options_and_comparator(opts, Ascend) - } - - /// Create a new memory map file backed with default options. - /// - /// **Note:** The capacity stands for how many memory mmaped, - /// it does not mean the skipmap can store `cap` entries. - /// - /// `lock`: whether to lock the underlying file or not - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn map_mut>( - path: P, - open_options: OpenOptions, - mmap_options: MmapOptions, - ) -> std::io::Result { - Self::map_mut_with_options(path, Options::new(), open_options, mmap_options) - } - - /// Like [`SkipMap::map_mut`], but with [`Options`]. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn map_mut_with_options>( - path: P, - opts: Options, - open_options: OpenOptions, - mmap_options: MmapOptions, - ) -> std::io::Result { - Self::map_mut_with_options_and_comparator(path, opts, open_options, mmap_options, Ascend) - } - - /// Open an exist file and mmap it to create skipmap. - /// - /// `lock`: whether to lock the underlying file or not - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn map>( - path: P, - open_options: OpenOptions, - mmap_options: MmapOptions, - magic_version: u16, - ) -> std::io::Result { - Self::map_with_comparator(path, open_options, mmap_options, Ascend, magic_version) - } - - /// Create a new memory map backed skipmap with default options. - /// - /// **What the difference between this method and [`SkipMap::new`]?** - /// - /// 1. This method will use mmap anonymous to require memory from the OS directly. - /// If you require very large contiguous memory regions, this method might be more suitable because - /// it's more direct in requesting large chunks of memory from the OS. - /// - /// 2. Where as [`SkipMap::new`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. - /// Even if we are working with raw pointers with `Box::into_raw`, - /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` - /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, - /// especially if you're frequently accessing or modifying it. - /// - /// [`SkipMap::new`]: #method.new - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn map_anon(mmap_options: MmapOptions) -> std::io::Result { - Self::map_anon_with_options_and_comparator(Options::new(), mmap_options, Ascend) - } - - /// Like [`SkipMap::map_anon`], but with [`Options`]. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn map_anon_with_options(opts: Options, mmap_options: MmapOptions) -> std::io::Result { - Self::map_anon_with_options_and_comparator(opts, mmap_options, Ascend) - } -} - -impl SkipMap { - /// Returns the underlying ARENA allocator used by the skipmap. - /// - /// This is a low level API, you should not use this method unless you know what you are doing. - /// - /// By default, `skl` does not do any forward and backward compatibility checks when using file backed memory map, - /// so this will allow the users to access the ARENA allocator directly, and allocate some bytes or structures - /// to help them implement forward and backward compatibility checks. - /// - /// # Example - /// - /// ```ignore - /// use skl::{SkipMap, OpenOptions, MmapOptinos}; - /// - /// const MAGIC_TEXT: u32 = u32::from_le_bytes(*b"al8n"); - /// - /// struct Meta { - /// magic: u32, - /// version: u32, - /// } - /// - /// let map = SkipMap::map_mut( - /// "/path/to/file", - /// OpenOptions::create_new(Some(1000)).read(true).write(true), - /// MmapOptions::default(), - /// ).unwrap(); - /// let arena = map.allocater(); - /// let mut meta = arena.alloc::(); - /// - /// // Safety: Meta does not require any drop, so it is safe to detach it from the ARENA. - /// unsafe { meta.detach(); } - /// meta.write(Meta { magic: MAGIC_TEXT, version: 1 }); // now the meta info is persisted to the file. - /// ``` - #[inline] - pub const fn allocator(&self) -> &Arena { - &self.arena - } - - /// Returns the offset of the data section in the `SkipMap`. - /// - /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, - /// and the data section will be allocated after the tail node. - /// - /// This method will return the offset of the data section in the ARENA. - #[inline] - pub const fn data_offset(&self) -> usize { - self.data_offset as usize - } - - /// Returns the version number of the [`SkipMap`]. - #[inline] - pub const fn version(&self) -> u16 { - self.arena.magic_version() - } - - /// Returns the magic version number of the [`SkipMap`]. - /// - /// This value can be used to check the compatibility for application using [`SkipMap`]. - #[inline] - pub const fn magic_version(&self) -> u16 { - self.meta().magic_version() - } - - /// Returns the height of the highest tower within any of the nodes that - /// have ever been allocated as part of this skiplist. - #[inline] - pub fn height(&self) -> u8 { - self.meta().height() - } - - /// Returns the number of remaining bytes can be allocated by the arena. - #[inline] - pub fn remaining(&self) -> usize { - self.arena.remaining() - } - - /// Returns the number of bytes that have allocated from the arena. - #[inline] - pub fn allocated(&self) -> usize { - self.arena.allocated() - } - - /// Returns the capacity of the arena. - #[inline] - pub const fn capacity(&self) -> usize { - self.arena.capacity() - } - - /// Returns the number of entries in the skipmap. - #[inline] - pub fn len(&self) -> usize { - self.meta().len() as usize - } - - /// Returns true if the skipmap is empty. - #[inline] - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Gets the number of pointers to this `SkipMap` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). - #[inline] - pub fn refs(&self) -> usize { - self.arena.refs() - } - - /// Returns how many bytes are discarded by the ARENA. - #[inline] - pub fn discarded(&self) -> u32 { - self.arena.discarded() - } - - /// Returns the maximum version of all entries in the map. - #[inline] - pub fn max_version(&self) -> u64 { - self.meta().max_version() - } - - /// Returns the minimum version of all entries in the map. - #[inline] - pub fn min_version(&self) -> u64 { - self.meta().min_version() - } - - /// Returns the comparator used to compare keys. - #[inline] - pub const fn comparator(&self) -> &C { - &self.cmp - } - - /// Like [`SkipMap::new`], but with a custom [`Comparator`]. - #[inline] - pub fn with_comparator(cmp: C) -> Result { - Self::with_options_and_comparator(Options::new(), cmp) - } - - /// Like [`SkipMap::new`], but with [`Options`] and a custom [`Comparator`]. - #[inline] - pub fn with_options_and_comparator(opts: Options, cmp: C) -> Result { - let arena_opts = ArenaOptions::new() - .with_capacity(opts.capacity()) - .with_maximum_alignment(Node::::ALIGN as usize) - .with_unify(opts.unify()) - .with_magic_version(CURRENT_VERSION) - .with_freelist(opts.freelist()); - let arena = Arena::new(arena_opts); - Self::new_in(arena, cmp, opts) - } - - /// Like [`SkipMap::map_mut`], but with a custom [`Comparator`]. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - pub fn map_mut_with_comparator>( - path: P, - open_options: OpenOptions, - mmap_options: MmapOptions, - cmp: C, - ) -> std::io::Result { - Self::map_mut_with_options_and_comparator(path, Options::new(), open_options, mmap_options, cmp) - } - - /// Like [`SkipMap::map_mut`], but with [`Options`] and a custom [`Comparator`]. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - pub fn map_mut_with_options_and_comparator>( - path: P, - opts: Options, - open_options: OpenOptions, - mmap_options: MmapOptions, - cmp: C, - ) -> std::io::Result { - let alignment = Node::::ALIGN as usize; - let arena_opts = ArenaOptions::new() - .with_maximum_alignment(alignment) - .with_magic_version(CURRENT_VERSION) - .with_freelist(opts.freelist()); - let arena = Arena::map_mut(path, arena_opts, open_options, mmap_options)?; - Self::new_in(arena, cmp, opts.with_unify(true)) - .map_err(invalid_data) - .and_then(|map| { - if map.magic_version() != opts.magic_version() { - Err(bad_magic_version()) - } else if map.version() != CURRENT_VERSION { - Err(bad_version()) - } else { - Ok(map) - } - }) - } - - /// Like [`SkipMap::map`], but with a custom [`Comparator`]. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - pub fn map_with_comparator>( - path: P, - open_options: OpenOptions, - mmap_options: MmapOptions, - cmp: C, - magic_version: u16, - ) -> std::io::Result { - let arena = Arena::map(path, open_options, mmap_options, CURRENT_VERSION)?; - Self::new_in( - arena, - cmp, - Options::new() - .with_unify(true) - .with_magic_version(magic_version), - ) - .map_err(invalid_data) - .and_then(|map| { - if map.magic_version() != magic_version { - Err(bad_magic_version()) - } else if map.version() != CURRENT_VERSION { - Err(bad_version()) - } else { - Ok(map) - } - }) - } - - /// Like [`SkipMap::map_anon`], but with a custom [`Comparator`]. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - pub fn map_anon_with_comparator(mmap_options: MmapOptions, cmp: C) -> std::io::Result { - Self::map_anon_with_options_and_comparator(Options::new(), mmap_options, cmp) - } - - /// Like [`SkipMap::map_anon`], but with [`Options`] and a custom [`Comparator`]. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - pub fn map_anon_with_options_and_comparator( - opts: Options, - mmap_options: MmapOptions, - cmp: C, - ) -> std::io::Result { - let alignment = Node::::ALIGN as usize; - let arena_opts = ArenaOptions::new() - .with_maximum_alignment(alignment) - .with_unify(opts.unify()) - .with_magic_version(CURRENT_VERSION); - let arena = Arena::map_anon(arena_opts, mmap_options)?; - Self::new_in(arena, cmp, opts).map_err(invalid_data) - } - - /// Clear the skiplist to empty and re-initialize. - /// - /// # Safety - /// - The current pointers get from the ARENA cannot be used anymore after calling this method. - /// - This method is not thread-safe. - /// - /// # Example - /// - /// Undefine behavior: - /// - /// ```ignore - /// let map = SkipMap::new(1000).unwrap(); - /// - /// map.insert(1, b"hello", b"world").unwrap(); - /// - /// let data = map.get(b"hello").unwrap(); - /// - /// map.clear().unwrap(); - /// - /// let w = data[0]; // undefined behavior - /// ``` - pub unsafe fn clear(&mut self) -> Result<(), Error> { - self.arena.clear()?; - - let meta = if self.opts.unify() { - Self::allocate_meta(&self.arena, self.meta().magic_version())? - } else { - unsafe { - let magic_version = self.meta().magic_version(); - let _ = Box::from_raw(self.meta.as_ptr()); - NonNull::new_unchecked(Box::into_raw(Box::new(Meta::new(magic_version)))) - } - }; - - self.meta = meta; - - let max_height: u8 = self.opts.max_height().into(); - let head = Self::allocate_full_node(&self.arena, max_height)?; - let tail = Self::allocate_full_node(&self.arena, max_height)?; - - // Safety: - // We will always allocate enough space for the head node and the tail node. - unsafe { - // Link all head/tail levels together. - for i in 0..(max_height as usize) { - let head_link = head.tower(&self.arena, i); - let tail_link = tail.tower(&self.arena, i); - head_link.next_offset.store(tail.offset, Ordering::Relaxed); - tail_link.prev_offset.store(head.offset, Ordering::Relaxed); - } - } - - self.head = head; - self.tail = tail; - Ok(()) - } - - /// Flushes outstanding memory map modifications to disk. - /// - /// When this method returns with a non-error result, - /// all outstanding changes to a file-backed memory map are guaranteed to be durably stored. - /// The file's metadata (including last modification timestamp) may not be updated. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn flush(&self) -> std::io::Result<()> { - self.arena.flush() - } - - /// Asynchronously flushes outstanding memory map modifications to disk. - /// - /// This method initiates flushing modified pages to durable storage, but it will not wait for - /// the operation to complete before returning. The file's metadata (including last - /// modification timestamp) may not be updated. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn flush_async(&self) -> std::io::Result<()> { - self.arena.flush_async() - } - - #[cfg(all(test, feature = "std"))] - #[inline] - pub(crate) fn with_yield_now(mut self) -> Self { - self.yield_now = true; - self - } -} - -impl SkipMap { - /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. - /// Unlike [`insert`](SkipMap::insert), this method will update the value if the key with the given version already exists. - /// - /// - Returns `Ok(None)` if the key was successfully inserted. - /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. - pub fn insert<'a, 'b: 'a>( - &'a self, - trailer: T, - key: &'b [u8], - value: &'b [u8], - ) -> Result>, Error> { - if self.arena.read_only() { - return Err(Error::read_only()); - } - - let copy = |buf: &mut VacantBuffer| { - let _ = buf.write(value); - Ok(()) - }; - let val_len = value.len() as u32; - - self - .update::( - trailer, - Key::Occupied(key), - val_len, - copy, - Ordering::Relaxed, - Ordering::Relaxed, - &mut Inserter::default(), - true, - ) - .map(|old| { - old.expect_left("insert must get InsertOk").and_then(|old| { - if old.is_removed() { - None - } else { - Some(EntryRef(old)) - } - }) - }) - .map_err(|e| e.expect_right("must be map::Error")) - } - - /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. - /// Unlike [`get_or_insert_with_value`](SkipMap::get_or_insert_with_value), this method will update the value if the key with the given version already exists. - /// - /// This method is useful when you want to insert a key and you know the value size but you do not have the value - /// at this moment. - /// - /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], - /// and you must fill the buffer with bytes later in the closure. - /// - /// - Returns `Ok(None)` if the key was successfully inserted. - /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. - /// - /// # Example - /// - /// ```rust - /// use skl::SkipMap; - /// - /// struct Person { - /// id: u32, - /// name: String, - /// } - /// - /// impl Person { - /// fn encoded_size(&self) -> usize { - /// 4 + self.name.len() - /// } - /// } - /// - /// - /// let alice = Person { - /// id: 1, - /// name: "Alice".to_string(), - /// }; - /// - /// let encoded_size = alice.encoded_size(); - /// - /// let l = SkipMap::new().unwrap(); - /// - /// l.insert_with_value::(1, b"alice", encoded_size as u32, |mut val| { - /// val.write(&alice.id.to_le_bytes()).unwrap(); - /// val.write(alice.name.as_bytes()).unwrap(); - /// Ok(()) - /// }) - /// .unwrap(); - /// ``` - pub fn insert_with_value<'a, 'b: 'a, E>( - &'a self, - trailer: T, - key: &'b [u8], - value_size: u32, - f: impl Fn(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result>, Either> { - if self.arena.read_only() { - return Err(Either::Right(Error::read_only())); - } - - self - .update( - trailer, - Key::Occupied(key), - value_size, - f, - Ordering::Relaxed, - Ordering::Relaxed, - &mut Inserter::default(), - true, - ) - .map(|old| { - old.expect_left("insert must get InsertOk").and_then(|old| { - if old.is_removed() { - None - } else { - Some(EntryRef(old)) - } - }) - }) - } - - /// Inserts a new key-value pair if it does not yet exist. - /// - /// Unlike [`insert`](SkipMap::insert), this method will not update the value if the key with the given version already exists. - /// - /// - Returns `Ok(None)` if the key was successfully get_or_inserted. - /// - Returns `Ok(Some(_))` if the key with the given version already exists. - pub fn get_or_insert<'a, 'b: 'a>( - &'a self, - trailer: T, - key: &'b [u8], - value: &'b [u8], - ) -> Result>, Error> { - if self.arena.read_only() { - return Err(Error::read_only()); - } - - let copy = |buf: &mut VacantBuffer| { - let _ = buf.write(value); - Ok(()) - }; - let val_len = value.len() as u32; - - self - .update::( - trailer, - Key::Occupied(key), - val_len, - copy, - Ordering::Relaxed, - Ordering::Relaxed, - &mut Inserter::default(), - false, - ) - .map(|old| { - old.expect_left("insert must get InsertOk").and_then(|old| { - if old.is_removed() { - None - } else { - Some(EntryRef(old)) - } - }) - }) - .map_err(|e| e.expect_right("must be map::Error")) - } - - /// Inserts a new key if it does not yet exist. - /// - /// Unlike [`insert_with_value`](SkipMap::insert_with_value), this method will not update the value if the key with the given version already exists. - /// - /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value - /// at this moment. - /// - /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], - /// and you must fill the buffer with bytes later in the closure. - /// - /// - Returns `Ok(None)` if the key was successfully get_or_inserted. - /// - Returns `Ok(Some(_))` if the key with the given version already exists. - /// - /// # Example - /// - /// ```rust - /// use skl::SkipMap; - /// - /// struct Person { - /// id: u32, - /// name: String, - /// } - /// - /// impl Person { - /// fn encoded_size(&self) -> usize { - /// 4 + self.name.len() - /// } - /// } - /// - /// - /// let alice = Person { - /// id: 1, - /// name: "Alice".to_string(), - /// }; - /// - /// let encoded_size = alice.encoded_size(); - /// - /// let l = SkipMap::new().unwrap(); - /// - /// l.get_or_insert_with_value::(1, b"alice", encoded_size as u32, |mut val| { - /// val.write(&alice.id.to_le_bytes()).unwrap(); - /// val.write(alice.name.as_bytes()).unwrap(); - /// Ok(()) - /// }) - /// .unwrap(); - /// ``` - pub fn get_or_insert_with_value<'a, 'b: 'a, E>( - &'a self, - trailer: T, - key: &'b [u8], - value_size: u32, - f: impl Fn(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result>, Either> { - if self.arena.read_only() { - return Err(Either::Right(Error::read_only())); - } - - self - .update( - trailer, - Key::Occupied(key), - value_size, - f, - Ordering::Relaxed, - Ordering::Relaxed, - &mut Inserter::default(), - false, - ) - .map(|old| { - old.expect_left("insert must get InsertOk").and_then(|old| { - if old.is_removed() { - None - } else { - Some(EntryRef(old)) - } - }) - }) - } - - /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. - /// Unlike [`get_or_insert_with`](SkipMap::get_or_insert_with), this method will update the value if the key with the given version already exists. - /// - /// This method is useful when you want to insert a key and you know the value size but you do not have the value - /// at this moment. - /// - /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], - /// and you must fill the buffer with bytes later in the closure. - /// - /// - Returns `Ok(None)` if the key was successfully inserted. - /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. - /// - /// # Example - /// - /// ```rust - /// use skl::{SkipMap, u27}; - /// - /// struct Person { - /// id: u32, - /// name: String, - /// } - /// - /// impl Person { - /// fn encoded_size(&self) -> usize { - /// 4 + self.name.len() - /// } - /// } - /// - /// - /// let alice = Person { - /// id: 1, - /// name: "Alice".to_string(), - /// }; - /// - /// let encoded_size = alice.encoded_size(); - /// - /// let l = SkipMap::new().unwrap(); - /// - /// l.insert_with::(1, u27::new(5), |key| { - /// key.write(b"alice").unwrap(); - /// Ok(()) - /// }, encoded_size as u32, |mut val| { - /// val.write(&alice.id.to_le_bytes()).unwrap(); - /// val.write(alice.name.as_bytes()).unwrap(); - /// Ok(()) - /// }) - /// .unwrap(); - /// ``` - pub fn insert_with<'a, E>( - &'a self, - trailer: T, - key_size: u27, - key: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, - val_size: u32, - val: impl Fn(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result>, Either> { - let vk = self.fetch_vacant_key(u32::from(key_size), key)?; - - self - .update( - trailer, - Key::Vacant(vk), - val_size, - val, - Ordering::Relaxed, - Ordering::Relaxed, - &mut Inserter::default(), - true, - ) - .map(|old| { - old.expect_left("insert must get InsertOk").and_then(|old| { - if old.is_removed() { - None - } else { - Some(EntryRef(old)) - } - }) - }) - } - - /// Inserts a new key if it does not yet exist. - /// - /// Unlike [`insert_with`](SkipMap::insert_with), this method will not update the value if the key with the given version already exists. - /// - /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value - /// at this moment. - /// - /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], - /// and you must fill the buffer with bytes later in the closure. - /// - /// # Example - /// - /// ```rust - /// use skl::{SkipMap, u27}; - /// - /// struct Person { - /// id: u32, - /// name: String, - /// } - /// - /// impl Person { - /// fn encoded_size(&self) -> usize { - /// 4 + self.name.len() - /// } - /// } - /// - /// - /// let alice = Person { - /// id: 1, - /// name: "Alice".to_string(), - /// }; - /// - /// let encoded_size = alice.encoded_size(); - /// - /// let l = SkipMap::new().unwrap(); - /// - /// l.get_or_insert_with::(1, u27::new(5), |key| { - /// key.write(b"alice").unwrap(); - /// Ok(()) - /// }, encoded_size as u32, |mut val| { - /// val.write(&alice.id.to_le_bytes()).unwrap(); - /// val.write(alice.name.as_bytes()).unwrap(); - /// Ok(()) - /// }) - /// .unwrap(); - /// ``` - pub fn get_or_insert_with<'a, E>( - &'a self, - trailer: T, - key_size: u27, - key: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, - val_size: u32, - val: impl Fn(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result>, Either> { - let vk = self.fetch_vacant_key(u32::from(key_size), key)?; - - self - .update( - trailer, - Key::Vacant(vk), - val_size, - val, - Ordering::Relaxed, - Ordering::Relaxed, - &mut Inserter::default(), - false, - ) - .map(|old| { - old.expect_left("insert must get InsertOk").and_then(|old| { - if old.is_removed() { - None - } else { - Some(EntryRef(old)) - } - }) - }) - } - - /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. - /// - /// Unlike [`get_or_remove`](SkipMap::get_or_remove), this method will remove the value if the key with the given version already exists. - /// - /// - Returns `Ok(None)`: - /// - if the remove operation is successful or the key is marked in remove status by other threads. - /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists - /// and the entry is not successfully removed because of an update on this entry happens in another thread. - pub fn compare_remove<'a, 'b: 'a>( - &'a self, - trailer: T, - key: &'b [u8], - success: Ordering, - failure: Ordering, - ) -> Result>, Error> { - self - .update( - trailer, - Key::Remove(key), - 0, - noop::, - success, - failure, - &mut Inserter::default(), - true, - ) - .map(|res| match res { - Either::Left(_) => None, - Either::Right(res) => match res { - Ok(old) => { - if old.is_removed() { - None - } else { - Some(EntryRef(old)) - } - } - Err(current) => { - if current.is_removed() { - None - } else { - Some(EntryRef(current)) - } - } - }, - }) - .map_err(|e| e.expect_right("must be map::Error")) - } - - /// Gets or removes the key-value pair if it exists. - /// Unlike [`compare_remove`](SkipMap::compare_remove), this method will not remove the value if the key with the given version already exists. - /// - /// - Returns `Ok(None)` if the key does not exist. - /// - Returns `Ok(Some(old))` if the key with the given version already exists. - pub fn get_or_remove<'a, 'b: 'a>( - &'a self, - trailer: T, - key: &'b [u8], - ) -> Result>, Error> { - self - .update( - trailer, - Key::Remove(key), - 0, - noop::, - Ordering::Relaxed, - Ordering::Relaxed, - &mut Inserter::default(), - false, - ) - .map(|res| match res { - Either::Left(old) => match old { - Some(old) => { - if old.is_removed() { - None - } else { - Some(EntryRef(old)) - } - } - None => None, - }, - _ => unreachable!("get_or_remove does not use CAS, so it must return `Either::Left`"), - }) - .map_err(|e| e.expect_right("must be map::Error")) - } - - /// Gets or removes the key-value pair if it exists. - /// Unlike [`compare_remove`](SkipMap::compare_remove), this method will not remove the value if the key with the given version already exists. - /// - /// - Returns `Ok(None)` if the key does not exist. - /// - Returns `Ok(Some(old))` if the key with the given version already exists. - /// - /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key - /// at this moment. - /// - /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], - /// and you must fill the buffer with bytes later in the closure. - /// - /// # Example - /// - /// ```rust - /// use skl::{SkipMap, u27}; - /// - /// struct Person { - /// id: u32, - /// name: String, - /// } - /// - /// impl Person { - /// fn encoded_size(&self) -> usize { - /// 4 + self.name.len() - /// } - /// } - /// - /// - /// let alice = Person { - /// id: 1, - /// name: "Alice".to_string(), - /// }; - /// - /// let encoded_size = alice.encoded_size(); - /// - /// let l = SkipMap::new().unwrap(); - /// - /// l.get_or_remove_with::(1, u27::new(5), |key| { - /// key.write(b"alice").unwrap(); - /// Ok(()) - /// }) - /// .unwrap(); - /// ``` - pub fn get_or_remove_with<'a, 'b: 'a, E>( - &'a self, - trailer: T, - key_size: u27, - key: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, - ) -> Result>, Either> { - let vk = self.fetch_vacant_key(u32::from(key_size), key)?; - let key = Key::RemoveVacant(vk); - self - .update( - trailer, - key, - 0, - noop::, - Ordering::Relaxed, - Ordering::Relaxed, - &mut Inserter::default(), - false, - ) - .map(|res| match res { - Either::Left(old) => match old { - Some(old) => { - if old.is_removed() { - None - } else { - Some(EntryRef(old)) - } - } - None => None, - }, - _ => unreachable!("get_or_remove does not use CAS, so it must return `Either::Left`"), - }) - .map_err(|e| Either::Right(e.expect_right("must be map::Error"))) - } - - /// Returns true if the key exists in the map. - #[inline] - pub fn contains_key<'a, 'b: 'a>(&'a self, version: u64, key: &'b [u8]) -> bool { - self.get(version, key).is_some() - } - - /// Returns the first entry in the map. - pub fn first(&self, version: u64) -> Option> { - self.iter(version).seek_lower_bound(Bound::Unbounded) - } - - /// Returns the last entry in the map. - pub fn last(&self, version: u64) -> Option> { - self.iter(version).seek_upper_bound(Bound::Unbounded) - } - - /// Returns the value associated with the given key, if it exists. - pub fn get<'a, 'b: 'a>(&'a self, version: u64, key: &'b [u8]) -> Option> { - unsafe { - let (n, eq) = self.find_near(version, key, false, true); // findLessOrEqual. - - let n = n?; - let node = n.as_ref(); - let node_key = node.get_key(&self.arena); - let (trailer, value) = node.get_value_and_trailer(&self.arena); - if eq { - return value.map(|val| { - EntryRef(VersionedEntryRef { - arena: &self.arena, - key: node_key, - trailer, - value: Some(val), - ptr: n, - }) - }); - } - - if !matches!(self.cmp.compare(key, node_key), cmp::Ordering::Equal) { - return None; - } - - if trailer.version() > version { - return None; - } - - value.map(|val| { - EntryRef(VersionedEntryRef { - arena: &self.arena, - key: node_key, - trailer, - value: Some(val), - ptr: n, - }) - }) - } - } - - /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. - /// If no such element is found then `None` is returned. - pub fn upper_bound<'a, 'b: 'a>( - &'a self, - version: u64, - upper: Bound<&'b [u8]>, - ) -> Option> { - self.iter(version).seek_upper_bound(upper) - } - - /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. - /// If no such element is found then `None` is returned. - pub fn lower_bound<'a, 'b: 'a>( - &'a self, - version: u64, - lower: Bound<&'b [u8]>, - ) -> Option> { - self.iter(version).seek_lower_bound(lower) - } - - /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. - #[inline] - pub const fn iter(&self, version: u64) -> iterator::Iter { - iterator::Iter::new(version, self) - } - - /// Returns a new iterator, this iterator will yield all versions for all entries in the map less or equal to the given version. - #[inline] - pub const fn iter_all_versions(&self, version: u64) -> iterator::AllVersionsIter { - iterator::AllVersionsIter::new(version, self, true) - } - - /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. - #[inline] - pub fn range<'a, Q, R>(&'a self, version: u64, range: R) -> iterator::Iter<'a, T, C, Q, R> - where - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, - R: RangeBounds + 'a, - { - iterator::Iter::range(version, self, range) - } - - /// Returns a iterator that within the range, this iterator will yield all versions for all entries in the range less or equal to the given version. - #[inline] - pub fn range_all_versions<'a, Q, R>( - &'a self, - version: u64, - range: R, - ) -> iterator::AllVersionsIter<'a, T, C, Q, R> - where - &'a [u8]: PartialOrd, - Q: ?Sized + PartialOrd<&'a [u8]>, - R: RangeBounds + 'a, - { - iterator::AllVersionsIter::range(version, self, range, true) - } -} diff --git a/src/map/entry.rs b/src/map/entry.rs deleted file mode 100644 index 88154fa..0000000 --- a/src/map/entry.rs +++ /dev/null @@ -1,297 +0,0 @@ -use rarena_allocator::Arena; - -use super::{NodePtr, Trailer}; - -/// A versioned entry reference of the skipmap. -/// -/// Compared to the [`EntryRef`], this one's value can be `None` which means the entry is removed. -#[derive(Debug)] -pub struct VersionedEntryRef<'a, T> { - pub(super) arena: &'a Arena, - pub(super) key: &'a [u8], - pub(super) trailer: T, - pub(super) value: Option<&'a [u8]>, - pub(super) ptr: NodePtr, -} - -impl<'a, T: Clone> Clone for VersionedEntryRef<'a, T> { - fn clone(&self) -> Self { - Self { - arena: self.arena, - key: self.key, - trailer: self.trailer.clone(), - value: self.value, - ptr: self.ptr, - } - } -} - -impl<'a, T: Copy> Copy for VersionedEntryRef<'a, T> {} - -impl<'a, T> VersionedEntryRef<'a, T> { - /// Returns the reference to the key - #[inline] - pub const fn key(&self) -> &[u8] { - self.key - } - - /// Returns the reference to the value, `None` means the entry is removed. - #[inline] - pub const fn value(&self) -> Option<&[u8]> { - self.value - } - - /// Returns the trailer of the entry - #[inline] - pub const fn trailer(&self) -> &T { - &self.trailer - } - - /// Returns if the entry is marked as removed - #[inline] - pub const fn is_removed(&self) -> bool { - self.value.is_none() - } - - /// Returns the owned versioned entry, - /// feel free to clone the entry if needed, no allocation and no deep clone will be made. - #[inline] - pub fn to_owned(&self) -> VersionedEntry - where - T: Clone, - { - VersionedEntry { - arena: self.arena.clone(), - trailer: self.trailer.clone(), - ptr: self.ptr, - } - } - - /// Returns the version of the entry - #[inline] - pub fn version(&self) -> u64 - where - T: Trailer, - { - self.trailer.version() - } -} - -impl<'a, T: Clone> From> for VersionedEntry { - fn from(entry: VersionedEntryRef<'a, T>) -> Self { - entry.to_owned() - } -} - -impl<'a, T: Copy> VersionedEntryRef<'a, T> { - pub(super) fn from_node(node_ptr: NodePtr, arena: &'a Arena) -> VersionedEntryRef<'a, T> { - unsafe { - let node = node_ptr.as_ref(); - let (trailer, value) = node.get_value_and_trailer(arena); - VersionedEntryRef { - key: node.get_key(arena), - trailer, - value, - arena, - ptr: node_ptr, - } - } - } -} - -/// An owned versioned entry of the skipmap. -/// -/// Compared to the [`Entry`], this one's value can be `None` which means the entry is removed. -#[derive(Debug)] -pub struct VersionedEntry { - pub(super) arena: Arena, - pub(super) trailer: T, - pub(super) ptr: NodePtr, -} - -impl Clone for VersionedEntry { - fn clone(&self) -> Self { - Self { - arena: self.arena.clone(), - trailer: self.trailer.clone(), - ptr: self.ptr, - } - } -} - -impl<'a, T: Clone> From<&'a VersionedEntry> for VersionedEntryRef<'a, T> { - fn from(entry: &'a VersionedEntry) -> VersionedEntryRef<'a, T> { - entry.borrow() - } -} - -impl VersionedEntry { - /// Returns the reference to the key - #[inline] - pub fn key(&self) -> &[u8] { - unsafe { - let node = self.ptr.as_ref(); - node.get_key(&self.arena) - } - } - - /// Returns the reference to the value, `None` means the entry is removed. - #[inline] - pub fn value(&self) -> Option<&[u8]> { - unsafe { - let node = self.ptr.as_ref(); - let value = node.get_value(&self.arena); - value - } - } - - /// Returns the trailer of the entry - #[inline] - pub const fn trailer(&self) -> &T { - &self.trailer - } - - /// Returns the borrowed entry reference - #[inline] - pub fn borrow(&self) -> VersionedEntryRef<'_, T> - where - T: Clone, - { - VersionedEntryRef { - arena: &self.arena, - key: self.key(), - trailer: self.trailer.clone(), - value: self.value(), - ptr: self.ptr, - } - } - - /// Returns the version of the entry - #[inline] - pub fn version(&self) -> u64 - where - T: Trailer, - { - self.trailer.version() - } -} - -/// An owned entry of the skipmap. -/// -/// Compared to the [`VersionedEntry`], this one's value cannot be `None`. -#[derive(Debug)] -pub struct Entry(VersionedEntry); - -impl Clone for Entry { - fn clone(&self) -> Self { - Self(self.0.clone()) - } -} - -impl<'a, T: Clone> From<&'a Entry> for EntryRef<'a, T> { - fn from(entry: &'a Entry) -> Self { - entry.borrow() - } -} - -impl Entry { - /// Returns the reference to the key - #[inline] - pub fn key(&self) -> &[u8] { - self.0.key() - } - - /// Returns the reference to the value - #[inline] - pub fn value(&self) -> &[u8] { - match self.0.value() { - Some(value) => value, - None => panic!("Entry's value cannot be `None`"), - } - } - - /// Returns the trailer of the entry - #[inline] - pub fn trailer(&self) -> &T { - self.0.trailer() - } - - /// Returns the borrowed entry reference - #[inline] - pub fn borrow(&self) -> EntryRef<'_, T> - where - T: Clone, - { - EntryRef(self.0.borrow()) - } - - /// Returns the version of the entry - #[inline] - pub fn version(&self) -> u64 - where - T: Trailer, - { - self.0.version() - } -} - -/// An entry reference to the skipmap's entry. -/// -/// Compared to the [`VersionedEntryRef`], this one's value cannot be `None`. -#[derive(Debug)] -pub struct EntryRef<'a, T>(pub(crate) VersionedEntryRef<'a, T>); - -impl<'a, T: Clone> Clone for EntryRef<'a, T> { - fn clone(&self) -> Self { - Self(self.0.clone()) - } -} - -impl<'a, T: Copy> Copy for EntryRef<'a, T> {} - -impl<'a, T: Clone> From> for Entry { - fn from(entry: EntryRef<'a, T>) -> Self { - entry.to_owned() - } -} - -impl<'a, T> EntryRef<'a, T> { - /// Returns the reference to the key - #[inline] - pub const fn key(&self) -> &[u8] { - self.0.key() - } - - /// Returns the reference to the value, `None` means the entry is removed. - #[inline] - pub const fn value(&self) -> &[u8] { - match self.0.value() { - Some(value) => value, - None => panic!("EntryRef's value cannot be `None`"), - } - } - - /// Returns the trailer of the entry - #[inline] - pub const fn trailer(&self) -> &T { - self.0.trailer() - } - - /// Returns the owned entry, feel free to clone the entry if needed, no allocation and no deep clone will be made. - #[inline] - pub fn to_owned(&self) -> Entry - where - T: Clone, - { - Entry(self.0.to_owned()) - } - - /// Returns the version of the entry - #[inline] - pub fn version(&self) -> u64 - where - T: Trailer, - { - self.0.version() - } -} diff --git a/src/options.rs b/src/options.rs index 9dffb83..6d288ec 100644 --- a/src/options.rs +++ b/src/options.rs @@ -4,18 +4,31 @@ pub use rarena_allocator::{MmapOptions, OpenOptions}; pub use rarena_allocator::Freelist; -use ux2::{u27, u5}; +use crate::{Height, KeySize}; -/// Options for `SkipMap`. +/// Configuration for the compression policy of the key in [`SkipMap`](super::SkipMap). +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum CompressionPolicy { + /// Fast compression policy, which only checks if the key is a prefix of the next key. + #[default] + Fast, + /// High compression policy, which checks if the key is a substring of the next key. + High, +} + +/// Options for [`SkipMap`](super::SkipMap). #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Options { max_value_size: u32, - max_key_size: u27, - max_height: u5, + max_key_size: KeySize, + max_height: Height, magic_version: u16, capacity: u32, unify: bool, freelist: Freelist, + policy: CompressionPolicy, + reserved: u32, } impl Default for Options { @@ -31,15 +44,42 @@ impl Options { pub const fn new() -> Self { Self { max_value_size: u32::MAX, - max_key_size: u27::MAX, - max_height: u5::new(20), + max_key_size: KeySize::MAX, + max_height: Height::new(), capacity: 1024, unify: false, magic_version: 0, freelist: Freelist::Optimistic, + policy: CompressionPolicy::Fast, + reserved: 0, } } + /// Set the reserved bytes of the ARENA. + /// + /// The reserved is used to configure the start position of the ARENA. This is useful + /// when you want to add some bytes before the ARENA, e.g. when using the memory map file backed ARENA, + /// you can set the reserved to the size to `8` to store a 8 bytes checksum. + /// + /// The default reserved is `0`. + /// + /// # Example + /// + /// ```rust + /// use skl::Options; + /// + /// let opts = Options::new().with_reserved(8); + /// ``` + #[inline] + pub const fn with_reserved(mut self, reserved: u32) -> Self { + self.reserved = if self.capacity <= reserved { + self.capacity + } else { + reserved + }; + self + } + /// Set the magic version of the [`SkipMap`](super::SkipMap). /// /// This is used by the application using [`SkipMap`](super::SkipMap) @@ -78,6 +118,23 @@ impl Options { self } + /// Set the compression policy of the key in [`SkipMap`](super::SkipMap). + /// + /// The default value is [`CompressionPolicy::Fast`]. + /// + /// # Example + /// + /// ``` + /// use skl::{Options, options::CompressionPolicy}; + /// + /// let opts = Options::new().with_compression_policy(CompressionPolicy::High); + /// ``` + #[inline] + pub const fn with_compression_policy(mut self, policy: CompressionPolicy) -> Self { + self.policy = policy; + self + } + /// Set if use the unify memory layout of the [`SkipMap`](super::SkipMap). /// /// File backed [`SkipMap`](super::SkipMap) has different memory layout with other kind backed [`SkipMap`](super::SkipMap), @@ -122,17 +179,17 @@ impl Options { /// /// The maximum size of the key is `u27::MAX`. /// - /// Default is `u27::MAX`. + /// Default is `65535`. /// /// # Example /// /// ``` - /// use skl::{Options, u27}; + /// use skl::{Options, KeySize}; /// - /// let options = Options::new().with_max_key_size(u27::new(1024)); + /// let options = Options::new().with_max_key_size(KeySize::new()); /// ``` #[inline] - pub const fn with_max_key_size(mut self, size: u27) -> Self { + pub const fn with_max_key_size(mut self, size: KeySize) -> Self { self.max_key_size = size; self } @@ -144,12 +201,12 @@ impl Options { /// # Example /// /// ``` - /// use skl::{Options, u5}; + /// use skl::{Options, Height}; /// - /// let options = Options::new().with_max_height(u5::new(20)); + /// let options = Options::new().with_max_height(Height::new()); /// ``` #[inline] - pub const fn with_max_height(mut self, height: u5) -> Self { + pub const fn with_max_height(mut self, height: Height) -> Self { self.max_height = height; self } @@ -171,9 +228,31 @@ impl Options { self } + /// Get the reserved of the ARENA. + /// + /// The reserved is used to configure the start position of the ARENA. This is useful + /// when you want to add some bytes before the ARENA, e.g. when using the memory map file backed ARENA, + /// you can set the reserved to the size to `8` to store a 8 bytes checksum. + /// + /// The default reserved is `0`. + /// + /// # Example + /// + /// ```rust + /// use skl::Options; + /// + /// let opts = Options::new().with_reserved(8); + /// + /// assert_eq!(opts.reserved(), 8); + /// ``` + #[inline] + pub const fn reserved(&self) -> u32 { + self.reserved + } + /// Returns the maximum size of the value. /// - /// Default is `u32::MAX`. + /// Default is `u32::MAX`. The maximum size of the value is `u32::MAX - header`. /// /// # Example /// @@ -191,37 +270,37 @@ impl Options { /// /// The maximum size of the key is `u27::MAX`. /// - /// Default is `u27::MAX`. + /// Default is `65535`. /// /// # Example /// /// ``` - /// use skl::{Options, u27}; + /// use skl::{Options, KeySize}; /// - /// let options = Options::new().with_max_key_size(u27::new(1024)); + /// let options = Options::new().with_max_key_size(KeySize::new()); /// - /// assert_eq!(options.max_key_size(), u27::new(1024)); + /// assert_eq!(options.max_key_size(), u16::MAX); /// ``` #[inline] - pub const fn max_key_size(&self) -> u27 { + pub const fn max_key_size(&self) -> KeySize { self.max_key_size } /// Returns the maximum height. /// - /// Default is `20`. The maximum height is `u5::MAX`. The minimum height is `1`. + /// Default is `20`. The maximum height is `31`. The minimum height is `1`. /// /// # Example /// /// ``` - /// use skl::{Options, u5}; + /// use skl::{Options, Height}; /// - /// let options = Options::new().with_max_height(u5::new(5)); + /// let options = Options::new().with_max_height(Height::from_u8_unchecked(5)); /// - /// assert_eq!(options.max_height(), u5::new(5)); + /// assert_eq!(options.max_height(), 5); /// ``` #[inline] - pub const fn max_height(&self) -> u5 { + pub const fn max_height(&self) -> Height { self.max_height } @@ -304,4 +383,22 @@ impl Options { pub const fn freelist(&self) -> Freelist { self.freelist } + + /// Get the compression policy of the key in [`SkipMap`](super::SkipMap). + /// + /// The default value is [`CompressionPolicy::Fast`]. + /// + /// # Example + /// + /// ```rust + /// use skl::{Options, options::CompressionPolicy}; + /// + /// let opts = Options::new().with_compression_policy(CompressionPolicy::High); + /// + /// assert_eq!(opts.compression_policy(), CompressionPolicy::High); + /// ``` + #[inline] + pub const fn compression_policy(&self) -> CompressionPolicy { + self.policy + } } diff --git a/src/sync.rs b/src/sync.rs new file mode 100644 index 0000000..9f3756d --- /dev/null +++ b/src/sync.rs @@ -0,0 +1,402 @@ +pub use rarena_allocator::sync::Arena; +use rarena_allocator::Allocator as _; + +use core::ops::{Bound, RangeBounds}; + +use super::{ + allocator::{Link as BaseLink, *}, + common::*, + *, +}; +use crate::VacantBuffer; + +use either::Either; + +/// Versioned header of the skiplist. +#[derive(Debug)] +#[repr(C)] +pub struct VersionedMeta { + /// The maximum MVCC version of the skiplist. CAS. + max_version: AtomicU64, + /// The minimum MVCC version of the skiplist. CAS. + min_version: AtomicU64, + len: AtomicU32, + magic_version: u16, + /// Current height. 1 <= height <= 31. CAS. + height: AtomicU8, + reserved_byte: u8, +} + +impl Header for VersionedMeta { + #[inline] + fn new(version: u16) -> Self { + Self { + max_version: AtomicU64::new(0), + min_version: AtomicU64::new(0), + magic_version: version, + height: AtomicU8::new(1), + len: AtomicU32::new(0), + reserved_byte: 0, + } + } + + #[inline] + fn magic_version(&self) -> u16 { + self.magic_version + } + + #[inline] + fn max_version(&self) -> u64 { + self.max_version.load(Ordering::Acquire) + } + + #[inline] + fn min_version(&self) -> u64 { + self.min_version.load(Ordering::Acquire) + } + + #[inline] + fn height(&self) -> u8 { + self.height.load(Ordering::Acquire) + } + + #[inline] + fn len(&self) -> u32 { + self.len.load(Ordering::Acquire) + } + + #[inline] + fn increase_len(&self) { + self.len.fetch_add(1, Ordering::Release); + } + + fn update_max_version(&self, version: Version) { + let mut current = self.max_version.load(Ordering::Acquire); + loop { + if version <= current { + return; + } + + match self.max_version.compare_exchange_weak( + current, + version, + Ordering::SeqCst, + Ordering::Acquire, + ) { + Ok(_) => break, + Err(v) => current = v, + } + } + } + + fn update_min_version(&self, version: Version) { + let mut current = self.min_version.load(Ordering::Acquire); + loop { + if version >= current { + return; + } + + match self.min_version.compare_exchange_weak( + current, + version, + Ordering::SeqCst, + Ordering::Acquire, + ) { + Ok(_) => break, + Err(v) => current = v, + } + } + } + + #[inline] + fn compare_exchange_height_weak( + &self, + current: u8, + new: u8, + success: Ordering, + failure: Ordering, + ) -> Result { + self + .height + .compare_exchange_weak(current, new, success, failure) + } +} + +/// Header of the skipmap. +#[derive(Debug)] +#[repr(C)] +pub struct Meta { + len: AtomicU32, + magic_version: u16, + /// Current height. 1 <= height <= 31. CAS. + height: AtomicU8, + reserved_byte: u8, +} + +impl Header for Meta { + #[inline] + fn new(version: u16) -> Self { + Self { + magic_version: version, + height: AtomicU8::new(1), + len: AtomicU32::new(0), + reserved_byte: 0, + } + } + + #[inline] + fn magic_version(&self) -> u16 { + self.magic_version + } + + #[inline] + fn max_version(&self) -> u64 { + MIN_VERSION + } + + #[inline] + fn min_version(&self) -> u64 { + MIN_VERSION + } + + #[inline] + fn height(&self) -> u8 { + self.height.load(Ordering::Acquire) + } + + #[inline] + fn len(&self) -> u32 { + self.len.load(Ordering::Acquire) + } + + #[inline] + fn increase_len(&self) { + self.len.fetch_add(1, Ordering::Release); + } + + fn update_max_version(&self, _: Version) {} + + fn update_min_version(&self, _: Version) {} + + #[inline] + fn compare_exchange_height_weak( + &self, + current: u8, + new: u8, + success: Ordering, + failure: Ordering, + ) -> Result { + self + .height + .compare_exchange_weak(current, new, success, failure) + } +} + +/// Atomic value pointer. +#[repr(C, align(8))] +pub struct AtomicValuePointer(AtomicU64); + +impl core::fmt::Debug for AtomicValuePointer { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let (offset, len) = decode_value_pointer(self.0.load(Ordering::Relaxed)); + f.debug_struct("AtomicValuePointer") + .field("offset", &offset) + .field("len", &len) + .finish() + } +} + +impl AtomicValuePointer { + #[inline] + fn new(offset: u32, len: u32) -> Self { + Self(AtomicU64::new(encode_value_pointer(offset, len))) + } + + #[inline] + fn compare_remove(&self, success: Ordering, failure: Ordering) -> Result<(u32, u32), (u32, u32)> { + let old = self.0.load(Ordering::Acquire); + let (offset, _) = decode_value_pointer(old); + let new = encode_value_pointer(offset, REMOVE); + self + .0 + .compare_exchange(old, new, success, failure) + .map(decode_value_pointer) + .map_err(decode_value_pointer) + } +} + +impl ValuePointer for AtomicValuePointer { + const REMOVE: u32 = REMOVE; + + #[inline] + fn load(&self) -> (u32, u32) { + decode_value_pointer(AtomicU64::load(&self.0, Ordering::Acquire)) + } + + #[inline] + fn swap(&self, offset: u32, len: u32) -> (u32, u32) { + decode_value_pointer( + self + .0 + .swap(encode_value_pointer(offset, len), Ordering::AcqRel), + ) + } +} + +/// Link to the previous and next node. +#[derive(Debug)] +#[repr(C)] +pub struct Link { + next_offset: AtomicU32, + prev_offset: AtomicU32, +} + +impl BaseLink for Link { + #[inline] + fn new(next_offset: u32, prev_offset: u32) -> Self { + Self { + next_offset: AtomicU32::new(next_offset), + prev_offset: AtomicU32::new(prev_offset), + } + } + + #[inline] + fn store_next_offset(&self, offset: u32, ordering: Ordering) { + self.next_offset.store(offset, ordering); + } + + #[inline] + fn store_prev_offset(&self, offset: u32, ordering: Ordering) { + self.prev_offset.store(offset, ordering); + } +} + +macro_rules! node_pointer { + ($node: ident $(<$t:ident>)?) => { + #[doc(hidden)] + #[derive(Debug)] + pub struct NodePointer $(<$t>)? { + offset: u32, + _m: core::marker::PhantomData<$node $(<$t>)?>, + } + + impl $(<$t>)? Clone for NodePointer $(<$t>)? { + fn clone(&self) -> Self { + *self + } + } + + impl $(<$t>)? Copy for NodePointer $(<$t>)? {} + + impl $(<$t: $crate::Trailer>)? $crate::allocator::NodePointer for NodePointer $(<$t>)? { + const NULL: Self = Self { + offset: 0, + _m: core::marker::PhantomData, + }; + + type Node = $node $(<$t>)?; + + #[inline] + fn is_null(&self) -> bool { + self.offset == 0 + } + + fn offset(&self) -> u32 { + self.offset + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn next_offset(&self, arena: &A, idx: usize) -> u32 { + self.tower(arena, idx).next_offset.load(Ordering::Acquire) + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn prev_offset(&self, arena: &A, idx: usize) -> u32 { + self.tower(arena, idx).prev_offset.load(Ordering::Acquire) + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn cas_prev_offset( + &self, + arena: &A, + idx: usize, + current: u32, + new: u32, + success: Ordering, + failure: Ordering, + ) -> Result { + self + .tower(arena, idx) + .prev_offset + .compare_exchange(current, new, success, failure) + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn cas_next_offset( + &self, + arena: &A, + idx: usize, + current: u32, + new: u32, + success: Ordering, + failure: Ordering, + ) -> Result { + self + .tower(arena, idx) + .next_offset + .compare_exchange(current, new, success, failure) + } + + #[inline] + fn new( + offset: u32 + ) -> Self { + Self { + offset, + _m: core::marker::PhantomData, + } + } + + /// ## Safety + /// - the pointer must be valid + #[inline] + unsafe fn as_ref(&self, arena: &A) -> &Self::Node { + &*(arena.get_pointer(self.offset as usize) as *const Self::Node) + } + + /// ## Safety + /// - the pointer must be valid + #[inline] + unsafe fn as_mut(&self, arena: &A) -> &mut Self::Node { + &mut *(arena.get_pointer_mut(self.offset as usize) as *mut Self::Node) + } + } + }; +} + +/// A lock free ARENA based skiplist. See [`SkipList`](base::SkipList) for more information. +pub mod full; + +/// A skipmap implementation with version support. See [`SkipMap`](versioned::SkipMap) for more information. +pub mod versioned; + +/// A skipmap implementation with trailer support. See [`SkipMap`](trailed::SkipMap) for more information. +pub mod trailed; + +/// A skipmap implementation without trailer and version support. See [`SkipMap`](map::SkipMap) for more information. +pub mod map; + +#[cfg(test)] +mod tests; diff --git a/src/sync/full.rs b/src/sync/full.rs new file mode 100644 index 0000000..7cd0181 --- /dev/null +++ b/src/sync/full.rs @@ -0,0 +1,1527 @@ +use core::{borrow::Borrow, marker::PhantomData}; + +use among::Among; + +use super::*; + +type Allocator = GenericAllocator, Arena>; +type SkipList = base::SkipList, C>; + +node_pointer!(FullNode); + +/// A node that supports both version and trailer. +#[repr(C)] +pub struct FullNode { + // A byte slice is 24 bytes. We are trying to save space here. + /// Multiple parts of the value are encoded as a single u64 so that it + /// can be atomically loaded and stored: + /// value offset: u32 (bits 0-31) + /// value size : u32 (bits 32-63) + value: AtomicValuePointer, + // Immutable. No need to lock to access key. + key_offset: u32, + // Immutable. No need to lock to access key. + key_size_and_height: u32, + version: u64, + trailer: PhantomData, + // ** DO NOT REMOVE BELOW COMMENT** + // The below field will be attached after the node, have to comment out + // this field, because each node will not use the full height, the code will + // not allocate the full size of the tower. + // + // Most nodes do not need to use the full height of the tower, since the + // probability of each successive level decreases exponentially. Because + // these elements are never accessed, they do not need to be allocated. + // Therefore, when a node is allocated in the arena, its memory footprint + // is deliberately truncated to not include unneeded tower elements. + // + // All accesses to elements should use CAS operations, with no need to lock. + // pub(super) tower: [Link; self.opts.max_height], +} + +impl core::fmt::Debug for FullNode { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let (key_size, height) = decode_key_size_and_height(self.key_size_and_height); + let (value_offset, value_size) = decode_value_pointer(self.value.0.load(Ordering::Relaxed)); + f.debug_struct("Node") + .field("value_offset", &value_offset) + .field("value_size", &value_size) + .field("key_offset", &self.key_offset) + .field("key_size", &key_size) + .field("height", &height) + .finish() + } +} + +impl WithTrailer for FullNode {} +impl WithVersion for FullNode {} + +impl Node for FullNode { + type Link = Link; + + type Trailer = T; + + type ValuePointer = AtomicValuePointer; + + type Pointer = NodePointer; + + fn full(value_offset: u32, max_height: u8) -> Self { + Self { + value: AtomicValuePointer::new(value_offset, 0), + key_offset: 0, + key_size_and_height: encode_key_size_and_height(0, max_height), + version: MIN_VERSION, + trailer: PhantomData, + } + } + + #[inline] + fn value_pointer(&self) -> &Self::ValuePointer { + &self.value + } + + #[inline] + fn set_value_pointer(&mut self, offset: u32, size: u32) { + self.value = AtomicValuePointer::new(offset, size); + } + + #[inline] + fn clear_value( + &self, + arena: &A, + success: Ordering, + failure: Ordering, + ) -> Result<(), (u32, u32)> { + self + .value + .compare_remove(success, failure) + .map(|(_, old_len)| { + if old_len != REMOVE { + arena.increase_discarded(old_len); + } + }) + } + + #[inline] + fn set_key_size_and_height(&mut self, key_size_and_height: u32) { + self.key_size_and_height = key_size_and_height; + } + + #[inline] + fn set_key_offset(&mut self, key_offset: u32) { + self.key_offset = key_offset; + } + + #[inline] + fn version(&self) -> Version { + self.version + } + + #[inline] + fn set_version(&mut self, version: Version) { + self.version = version; + } + + #[inline] + fn key_size_and_height(&self) -> u32 { + self.key_size_and_height + } + + #[inline] + fn key_offset(&self) -> u32 { + self.key_offset + } +} + +/// A fast, lock-free, thread-safe ARENA based `SkipMap` that supports trailed structure, multiple versions, forward and backward iteration. +/// +/// If you want to use in non-concurrent environment, you can use [`unsync::full::SkipMap`]. +#[repr(transparent)] +pub struct SkipMap(pub(super) SkipList); + +impl Clone for SkipMap { + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl SkipMap { + /// Create a new skipmap with default options. + /// + /// **Note:** The capacity stands for how many memory allocated, + /// it does not mean the skiplist can store `cap` entries. + /// + /// + /// + /// **What the difference between this method and [`SkipMap::mmap_anon`]?** + /// + /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// 2. Where as [`SkipMap::mmap_anon`] will use mmap anonymous to require memory from the OS. + /// If you require very large contiguous memory regions, `mmap` might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// [`SkipMap::mmap_anon`]: #method.mmap_anon + pub fn new(opts: Options) -> Result { + Self::with_comparator(opts, Ascend) + } + + /// Create a new memory map file backed with default options. + /// + /// **Note:** The capacity stands for how many memory mmaped, + /// it does not mean the skipmap can store `cap` entries. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map_mut>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_mut_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Open an exist file and mmap it to create skipmap. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Create a new memory map backed skipmap with default options. + /// + /// **What the difference between this method and [`SkipMap::new`]?** + /// + /// 1. This method will use mmap anonymous to require memory from the OS directly. + /// If you require very large contiguous memory regions, this method might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// 2. Where as [`SkipMap::new`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// [`SkipMap::new`]: #method.new + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn map_anon(opts: Options, mmap_options: MmapOptions) -> std::io::Result { + Self::map_anon_with_comparator(opts, mmap_options, Ascend) + } +} + +impl SkipMap { + /// Returns the reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + #[inline] + pub fn reserved_slice(&self) -> &[u8] { + self.0.arena.reserved_slice() + } + + /// Returns the mutable reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + /// + /// # Safety + /// - The caller need to make sure there is no data-race + /// + /// # Panics + /// - If in read-only mode, it will panic. + #[inline] + #[allow(clippy::mut_from_ref)] + pub unsafe fn reserved_slice_mut(&self) -> &mut [u8] { + self.0.arena.reserved_slice_mut() + } + /// Returns the path of the mmap file, only returns `Some` when the ARENA is backed by a mmap file. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn path(&self) -> Option<&std::sync::Arc> { + self.0.arena.path() + } + + /// Sets remove on drop, only works on mmap with a file backend. + /// + /// Default is `false`. + /// + /// > **WARNING:** Once set to `true`, the backed file will be removed when the allocator is dropped, even though the file is opened in + /// > read-only mode. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn remove_on_drop(&self, val: bool) { + self.0.remove_on_drop(val); + } + + /// Returns the offset of the data section in the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the offset of the data section in the ARENA. + #[inline] + pub fn data_offset(&self) -> usize { + self.0.data_offset() + } + + /// Returns the version number of the [`SkipMap`]. + #[inline] + pub fn version(&self) -> u16 { + self.0.version() + } + + /// Returns the magic version number of the [`SkipMap`]. + /// + /// This value can be used to check the compatibility for application using [`SkipMap`]. + #[inline] + pub fn magic_version(&self) -> u16 { + self.0.magic_version() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + pub fn height(&self) -> u8 { + self.0.height() + } + + /// Returns the number of remaining bytes can be allocated by the arena. + #[inline] + pub fn remaining(&self) -> usize { + self.0.remaining() + } + + /// Returns how many bytes are discarded by the ARENA. + #[inline] + pub fn discarded(&self) -> u32 { + self.0.discarded() + } + + /// Returns the number of bytes that have allocated from the arena. + #[inline] + pub fn allocated(&self) -> usize { + self.0.allocated() + } + + /// Returns the capacity of the arena. + #[inline] + pub fn capacity(&self) -> usize { + self.0.capacity() + } + + /// Returns the number of entries in the skipmap. + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns true if the skipmap is empty. + #[inline] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Gets the number of pointers to this `SkipMap` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). + #[inline] + pub fn refs(&self) -> usize { + self.0.refs() + } + + /// Returns the maximum version of all entries in the map. + #[inline] + pub fn max_version(&self) -> u64 { + self.0.max_version() + } + + /// Returns the minimum version of all entries in the map. + #[inline] + pub fn min_version(&self) -> u64 { + self.0.min_version() + } + + /// Returns the comparator used to compare keys. + #[inline] + pub fn comparator(&self) -> &C { + self.0.comparator() + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, Options, Ascend}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + pub fn random_height(&self) -> Height { + self.0.random_height() + } + + /// Returns the estimated size of a node with the given height and key/value sizes. + /// + /// **Note**: The returned size is only an estimate and may not be accurate, which means that the actual size is less than or equal to the returned size. + #[inline] + pub fn estimated_node_size(height: Height, key_size: usize, value_size: usize) -> usize { + SkipList::::estimated_node_size(height, key_size, value_size) + } + + /// Like [`SkipMap::new`], but with a custom [`Comparator`]. + #[inline] + pub fn with_comparator(opts: Options, cmp: C) -> Result { + SkipList::::with_comparator(opts, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::::map_mut_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with [`Options`], a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::::map_mut_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::::map_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::::map_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map_anon`], but with a custom [`Comparator`]. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn map_anon_with_comparator( + opts: Options, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::::map_anon_with_comparator(opts, mmap_options, cmp).map(Self) + } + + /// Clear the skiplist to empty and re-initialize. + /// + /// # Safety + /// - The current pointers get from the ARENA cannot be used anymore after calling this method. + /// - This method is not thread-safe. + /// + /// # Example + /// + /// Undefine behavior: + /// + /// ```ignore + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(1, b"hello", b"world").unwrap(); + /// + /// let data = map.get(b"hello").unwrap(); + /// + /// map.clear().unwrap(); + /// + /// let w = data[0]; // undefined behavior + /// ``` + pub unsafe fn clear(&mut self) -> Result<(), Error> { + self.0.clear() + } + + /// Flushes outstanding memory map modifications to disk. + /// + /// When this method returns with a non-error result, + /// all outstanding changes to a file-backed memory map are guaranteed to be durably stored. + /// The file's metadata (including last modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush(&self) -> std::io::Result<()> { + self.0.flush() + } + + /// Asynchronously flushes outstanding memory map modifications to disk. + /// + /// This method initiates flushing modified pages to durable storage, but it will not wait for + /// the operation to complete before returning. The file's metadata (including last + /// modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush_async(&self) -> std::io::Result<()> { + self.0.flush_async() + } + + #[cfg(all(test, feature = "std"))] + #[inline] + pub(crate) fn with_yield_now(mut self) -> Self { + self.0 = self.0.with_yield_now(); + self + } +} + +impl SkipMap { + /// Returns `true` if the key exists in the map. + /// + /// This method will return `false` if the entry is marked as removed. If you want to check if the key exists even if it is marked as removed, + /// you can use [`contains_key_versioned`](SkipMap::contains_key_versioned). + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world", ()).unwrap(); + /// + /// map.get_or_remove(1, b"hello", ()).unwrap(); + /// + /// assert!(!map.contains_key(1, b"hello")); + /// assert!(map.contains_key_versioned(1, b"hello")); + /// ``` + #[inline] + pub fn contains_key<'a, 'b: 'a>(&'a self, version: Version, key: &'b [u8]) -> bool { + self.get(version, key).is_some() + } + + /// Returns `true` if the key exists in the map, even if it is marked as removed. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world", ()).unwrap(); + /// + /// map.get_or_remove(1, b"hello", ()).unwrap(); + /// + /// assert!(!map.contains_key(1, b"hello")); + /// assert!(map.contains_key_versioned(1, b"hello")); + /// ``` + #[inline] + pub fn contains_key_versioned<'a, 'b: 'a>(&'a self, version: Version, key: &'b [u8]) -> bool { + self.0.contains_key_versioned(version, key) + } + + /// Returns the first entry in the map. + pub fn first(&self, version: Version) -> Option>> { + self.iter(version).seek_lower_bound(Bound::Unbounded) + } + + /// Returns the last entry in the map. + pub fn last(&self, version: Version) -> Option>> { + self.iter(version).seek_upper_bound(Bound::Unbounded) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// This method will return `None` if the entry is marked as removed. If you want to get the entry even if it is marked as removed, + /// you can use [`get_versioned`](SkipMap::get_versioned). + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world", ()).unwrap(); + /// + /// let ent = map.get(0, b"hello").unwrap(); + /// assert_eq!(ent.value(), b"world"); + /// + /// map.get_or_remove(1, b"hello", ()).unwrap(); + /// + /// assert!(map.get(1, b"hello").is_none()); + /// ``` + pub fn get<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Option>> { + self.0.get(version, key) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// The difference between `get` and `get_versioned` is that `get_versioned` will return the value even if the entry is removed. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world", ()).unwrap(); + /// + /// map.get_or_remove(1, b"hello", ()).unwrap(); + /// + /// assert!(map.get(1, b"hello").is_none()); + /// + /// let ent = map.get_versioned(1, b"hello").unwrap(); + /// // value is None because the entry is marked as removed. + /// assert!(ent.value().is_none()); + /// ``` + pub fn get_versioned<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Option>> { + self.0.get_versioned(version, key) + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + pub fn upper_bound<'a, 'b: 'a>( + &'a self, + version: Version, + upper: Bound<&'b [u8]>, + ) -> Option>> { + self.iter(version).seek_upper_bound(upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + pub fn lower_bound<'a, 'b: 'a>( + &'a self, + version: Version, + lower: Bound<&'b [u8]>, + ) -> Option>> { + self.iter(version).seek_lower_bound(lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + pub fn iter(&self, version: Version) -> Iter, C> { + self.0.iter(version) + } + + /// Returns a new iterator, this iterator will yield all versions for all entries in the map less or equal to the given version. + #[inline] + pub fn iter_all_versions(&self, version: Version) -> AllVersionsIter, C> { + self.0.iter_all_versions(version) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + pub fn range<'a, Q, R>(&'a self, version: Version, range: R) -> Iter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range(version, range) + } + + /// Returns a iterator that within the range, this iterator will yield all versions for all entries in the range less or equal to the given version. + #[inline] + pub fn range_all_versions<'a, Q, R>( + &'a self, + version: Version, + range: R, + ) -> AllVersionsIter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range_all_versions(version, range) + } +} + +impl SkipMap { + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](SkipMap::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + pub fn insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self.insert_at_height(version, self.random_height(), key, value, trailer) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](SkipMap::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// let height = map.random_height(); + /// map.insert_at_height(0, height, b"hello", b"world", 10).unwrap(); + /// ``` + pub fn insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .insert_at_height(version, height, key, value, trailer) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_value_builder::(1, b"alice", vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height_with_value_builder`](SkipMap::get_or_insert_at_height_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_value_builder::(1, height, b"alice", vb, 10) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self + .0 + .insert_at_height_with_value_builder(version, height, key, value_builder, trailer) + } + + /// Inserts a new key-value pair if it does not yet exist. + /// + /// Unlike [`insert`](SkipMap::insert), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + pub fn get_or_insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self.get_or_insert_at_height(version, self.random_height(), key, value, trailer) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](SkipMap::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + pub fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .get_or_insert_at_height(version, height, key, value, trailer) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_value_builder`](SkipMap::insert_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// l.get_or_insert_with_value_builder::(1, b"alice", vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.get_or_insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + trailer, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](SkipMap::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_value_builder::(1, height, b"alice", vb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self + .0 + .get_or_insert_at_height_with_value_builder(version, height, key, value_builder, trailer) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_builders::<(), ()>(1, kb, vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_builders::<(), ()>(1, height, kb, vb, 10) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self + .0 + .insert_at_height_with_builders(version, height, key_builder, value_builder, trailer) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_builders`](SkipMap::insert_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.get_or_insert_with_builders::<(), ()>(1, kb, vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.get_or_insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + trailer, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](SkipMap::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_builders::<(), ()>(1, height, kb, vb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.0.get_or_insert_at_height_with_builders( + version, + height, + key_builder, + value_builder, + trailer, + ) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove`](SkipMap::get_or_remove), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + #[inline] + pub fn compare_remove<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + trailer: T, + success: Ordering, + failure: Ordering, + ) -> Result>>, Error> { + self.compare_remove_at_height( + version, + self.random_height(), + key, + trailer, + success, + failure, + ) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove_at_height`](SkipMap::get_or_remove_at_height), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + pub fn compare_remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + trailer: T, + success: Ordering, + failure: Ordering, + ) -> Result>>, Error> { + self + .0 + .compare_remove_at_height(version, height, key, trailer, success, failure) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove`](SkipMap::compare_remove), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + #[inline] + pub fn get_or_remove<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self.get_or_remove_at_height(version, self.random_height(), key, trailer) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height`](SkipMap::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, Options}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world", 10).unwrap(); + /// + /// let height = map.random_height(); + /// map.get_or_remove_at_height(0, height, b"hello", 10).unwrap(); + /// ``` + pub fn get_or_remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .get_or_remove_at_height(version, height, key, trailer) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove`](SkipMap::compare_remove), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, KeyBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// l.get_or_remove_with_builder::(1, kb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_remove_with_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.get_or_remove_at_height_with_builder(version, self.random_height(), key_builder, trailer) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height`](SkipMap::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::full::SkipMap, KeyBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// let height = l.random_height(); + /// l.get_or_remove_at_height_with_builder::(1, height, kb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_remove_at_height_with_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self + .0 + .get_or_remove_at_height_with_builder(version, height, key_builder, trailer) + } +} diff --git a/src/sync/map.rs b/src/sync/map.rs new file mode 100644 index 0000000..899ff9c --- /dev/null +++ b/src/sync/map.rs @@ -0,0 +1,1375 @@ +use core::borrow::Borrow; + +use super::*; + +use among::Among; +use base::{EntryRef, Iter}; + +type Allocator = GenericAllocator; +type SkipList = base::SkipList; + +node_pointer!(RawNode); + +/// A node that does not support version and trailer. +#[repr(C)] +pub struct RawNode { + // A byte slice is 24 bytes. We are trying to save space here. + /// Multiple parts of the value are encoded as a single u64 so that it + /// can be atomically loaded and stored: + /// value offset: u32 (bits 0-31) + /// value size : u32 (bits 32-63) + value: AtomicValuePointer, + // Immutable. No need to lock to access key. + key_offset: u32, + // Immutable. No need to lock to access key. + key_size_and_height: u32, + // ** DO NOT REMOVE BELOW COMMENT** + // The below field will be attached after the node, have to comment out + // this field, because each node will not use the full height, the code will + // not allocate the full size of the tower. + // + // Most nodes do not need to use the full height of the tower, since the + // probability of each successive level decreases exponentially. Because + // these elements are never accessed, they do not need to be allocated. + // Therefore, when a node is allocated in the arena, its memory footprint + // is deliberately truncated to not include unneeded tower elements. + // + // All accesses to elements should use CAS operations, with no need to lock. + // pub(super) tower: [Link; self.opts.max_height], +} + +impl core::fmt::Debug for RawNode { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let (key_size, height) = decode_key_size_and_height(self.key_size_and_height); + let (value_offset, value_size) = decode_value_pointer(self.value.0.load(Ordering::Relaxed)); + f.debug_struct("Node") + .field("value_offset", &value_offset) + .field("value_size", &value_size) + .field("key_offset", &self.key_offset) + .field("key_size", &key_size) + .field("height", &height) + .finish() + } +} + +impl Node for RawNode { + type Link = Link; + + type Trailer = (); + + type ValuePointer = AtomicValuePointer; + + type Pointer = NodePointer; + + fn full(value_offset: u32, max_height: u8) -> Self { + Self { + value: AtomicValuePointer::new(value_offset, 0), + key_offset: 0, + key_size_and_height: encode_key_size_and_height(0, max_height), + } + } + + #[inline] + fn value_pointer(&self) -> &Self::ValuePointer { + &self.value + } + + #[inline] + fn set_value_pointer(&mut self, offset: u32, size: u32) { + self.value = AtomicValuePointer::new(offset, size); + } + + #[inline] + fn clear_value( + &self, + arena: &A, + success: Ordering, + failure: Ordering, + ) -> Result<(), (u32, u32)> { + self + .value + .compare_remove(success, failure) + .map(|(_, old_len)| { + if old_len != REMOVE { + arena.increase_discarded(old_len); + } + }) + } + + #[inline] + fn set_key_size_and_height(&mut self, key_size_and_height: u32) { + self.key_size_and_height = key_size_and_height; + } + + #[inline] + fn set_key_offset(&mut self, key_offset: u32) { + self.key_offset = key_offset; + } + + #[inline] + fn version(&self) -> Version { + 0 + } + + #[inline] + fn set_version(&mut self, _: Version) {} + + #[inline] + fn key_size_and_height(&self) -> u32 { + self.key_size_and_height + } + + #[inline] + fn key_offset(&self) -> u32 { + self.key_offset + } +} + +/// A fast, lock-free, thread-safe ARENA based `SkipMap` that supports forward and backward iteration. +/// +/// If you want to use in non-concurrent environment, you can use [`unsync::map::SkipMap`]. +#[repr(transparent)] +pub struct SkipMap(SkipList); + +impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl SkipMap { + /// Create a new skipmap with default options. + /// + /// **Note:** The capacity stands for how many memory allocated, + /// it does not mean the skiplist can store `cap` entries. + /// + /// + /// + /// **What the difference between this method and [`SkipMap::mmap_anon`]?** + /// + /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// 2. Where as [`SkipMap::mmap_anon`] will use mmap anonymous to require memory from the OS. + /// If you require very large contiguous memory regions, `mmap` might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// [`SkipMap::mmap_anon`]: #method.mmap_anon + pub fn new(opts: Options) -> Result { + Self::with_comparator(opts, Ascend) + } + + /// Create a new memory map file backed with default options. + /// + /// **Note:** The capacity stands for how many memory mmaped, + /// it does not mean the skipmap can store `cap` entries. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map_mut>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_mut_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Open an exist file and mmap it to create skipmap. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Create a new memory map backed skipmap with default options. + /// + /// **What the difference between this method and [`SkipMap::new`]?** + /// + /// 1. This method will use mmap anonymous to require memory from the OS directly. + /// If you require very large contiguous memory regions, this method might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// 2. Where as [`SkipMap::new`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// [`SkipMap::new`]: #method.new + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn map_anon(opts: Options, mmap_options: MmapOptions) -> std::io::Result { + Self::map_anon_with_comparator(opts, mmap_options, Ascend) + } +} + +impl SkipMap { + /// Returns the reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + #[inline] + pub fn reserved_slice(&self) -> &[u8] { + self.0.arena.reserved_slice() + } + + /// Returns the mutable reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + /// + /// # Safety + /// - The caller need to make sure there is no data-race + /// + /// # Panics + /// - If in read-only mode, it will panic. + #[inline] + #[allow(clippy::mut_from_ref)] + pub unsafe fn reserved_slice_mut(&self) -> &mut [u8] { + self.0.arena.reserved_slice_mut() + } + /// Returns the path of the mmap file, only returns `Some` when the ARENA is backed by a mmap file. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn path(&self) -> Option<&std::sync::Arc> { + self.0.arena.path() + } + + /// Sets remove on drop, only works on mmap with a file backend. + /// + /// Default is `false`. + /// + /// > **WARNING:** Once set to `true`, the backed file will be removed when the allocator is dropped, even though the file is opened in + /// > read-only mode. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn remove_on_drop(&self, val: bool) { + self.0.remove_on_drop(val); + } + + /// Returns the offset of the data section in the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the offset of the data section in the ARENA. + #[inline] + pub const fn data_offset(&self) -> usize { + self.0.data_offset() + } + + /// Returns the magic version number of the [`SkipMap`]. + /// + /// This value can be used to check the compatibility for application using [`SkipMap`]. + #[inline] + pub fn magic_version(&self) -> u16 { + self.0.magic_version() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + pub fn height(&self) -> u8 { + self.0.height() + } + + /// Returns the number of remaining bytes can be allocated by the arena. + #[inline] + pub fn remaining(&self) -> usize { + self.0.remaining() + } + + /// Returns the number of bytes that have allocated from the arena. + #[inline] + pub fn allocated(&self) -> usize { + self.0.allocated() + } + + /// Returns the capacity of the arena. + #[inline] + pub fn capacity(&self) -> usize { + self.0.capacity() + } + + /// Returns the number of entries in the skipmap. + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns true if the skipmap is empty. + #[inline] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Gets the number of pointers to this `SkipMap` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). + #[inline] + pub fn refs(&self) -> usize { + self.0.refs() + } + + /// Returns how many bytes are discarded by the ARENA. + #[inline] + pub fn discarded(&self) -> u32 { + self.0.discarded() + } + + /// Returns the comparator used to compare keys. + #[inline] + pub const fn comparator(&self) -> &C { + self.0.comparator() + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, Options, Ascend}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + pub fn random_height(&self) -> Height { + self.0.random_height() + } + + /// Returns the estimated size of a node with the given height and key/value sizes. + /// + /// **Note**: The returned size is only an estimate and may not be accurate, which means that the actual size is less than or equal to the returned size. + #[inline] + pub fn estimated_node_size(height: Height, key_size: usize, value_size: usize) -> usize { + SkipList::::estimated_node_size(height, key_size, value_size) + } + + /// Like [`SkipMap::new`], but with a custom [`Comparator`]. + #[inline] + pub fn with_comparator(opts: Options, cmp: C) -> Result { + SkipList::::with_comparator(opts, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::::map_mut_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::::map_mut_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::map_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map_anon`], but with a custom [`Comparator`]. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn map_anon_with_comparator( + opts: Options, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_anon_with_comparator(opts, mmap_options, cmp).map(Self) + } + + /// Clear the skiplist to empty and re-initialize. + /// + /// # Safety + /// - The current pointers get from the ARENA cannot be used anymore after calling this method. + /// - This method is not thread-safe. + /// + /// # Example + /// + /// Undefine behavior: + /// + /// ```ignore + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(1u8, b"hello", b"world").unwrap(); + /// + /// let data = map.get(b"hello").unwrap(); + /// + /// map.clear().unwrap(); + /// + /// let w = data[0]; // undefined behavior + /// ``` + pub unsafe fn clear(&mut self) -> Result<(), Error> { + self.0.clear() + } + + /// Flushes outstanding memory map modifications to disk. + /// + /// When this method returns with a non-error result, + /// all outstanding changes to a file-backed memory map are guaranteed to be durably stored. + /// The file's metadata (including last modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush(&self) -> std::io::Result<()> { + self.0.flush() + } + + /// Asynchronously flushes outstanding memory map modifications to disk. + /// + /// This method initiates flushing modified pages to durable storage, but it will not wait for + /// the operation to complete before returning. The file's metadata (including last + /// modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush_async(&self) -> std::io::Result<()> { + self.0.flush_async() + } + + #[cfg(all(test, feature = "std"))] + #[inline] + pub(crate) fn with_yield_now(self) -> Self { + Self(self.0.with_yield_now()) + } +} + +impl SkipMap { + /// Returns `true` if the key exists in the map. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, Options}; + /// use core::sync::atomic::Ordering; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world").unwrap(); + /// + /// map.compare_remove(b"hello", Ordering::Relaxed, Ordering::Relaxed).unwrap(); + /// + /// assert!(!map.contains_key(b"hello")); + /// ``` + #[inline] + pub fn contains_key<'a, 'b: 'a>(&'a self, key: &'b [u8]) -> bool { + self.0.contains_key(MIN_VERSION, key) + } + + /// Returns the first entry in the map. + pub fn first(&self) -> Option> { + self.0.first(MIN_VERSION) + } + + /// Returns the last entry in the map. + pub fn last(&self) -> Option> { + self.0.last(MIN_VERSION) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, Options}; + /// use core::sync::atomic::Ordering; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world").unwrap(); + /// + /// let ent = map.get(b"hello").unwrap(); + /// assert_eq!(ent.value(), b"world"); + /// + /// map.compare_remove(b"hello", Ordering::Relaxed, Ordering::Relaxed).unwrap(); + /// + /// assert!(map.get(b"hello").is_none()); + /// ``` + pub fn get<'a, 'b: 'a>(&'a self, key: &'b [u8]) -> Option> { + self.0.get(MIN_VERSION, key) + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + pub fn upper_bound<'a, 'b: 'a>( + &'a self, + upper: Bound<&'b [u8]>, + ) -> Option> { + self.0.upper_bound(MIN_VERSION, upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + pub fn lower_bound<'a, 'b: 'a>( + &'a self, + lower: Bound<&'b [u8]>, + ) -> Option> { + self.0.lower_bound(MIN_VERSION, lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + pub fn iter(&self) -> Iter { + self.0.iter(MIN_VERSION) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + pub fn range<'a, Q, R>(&'a self, range: R) -> Iter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range(MIN_VERSION, range) + } +} + +impl SkipMap { + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](SkipMap::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + pub fn insert<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.0.insert(MIN_VERSION, key, value, ()) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](SkipMap::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// let height = map.random_height(); + /// map.insert_at_height(height, b"hello", b"world").unwrap(); + /// ``` + #[inline] + pub fn insert_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.0.insert_at_height(MIN_VERSION, height, key, value, ()) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_value_builder::(b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self.0.insert_at_height_with_value_builder( + MIN_VERSION, + self.random_height(), + key, + value_builder, + (), + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_value_builder::(height, b"alice", vb) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .insert_at_height_with_value_builder(MIN_VERSION, height, key, value_builder, ()) + } + + /// Inserts a new key-value pair if it does not yet exist. + /// + /// Unlike [`insert`](SkipMap::insert), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + pub fn get_or_insert<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .0 + .get_or_insert_at_height(MIN_VERSION, self.random_height(), key, value, ()) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](SkipMap::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + pub fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .0 + .get_or_insert_at_height(MIN_VERSION, height, key, value, ()) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_value_builder`](SkipMap::insert_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// l.get_or_insert_with_value_builder::(b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self.get_or_insert_at_height_with_value_builder(self.random_height(), key, value_builder) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](SkipMap::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_value_builder::(height, b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .get_or_insert_at_height_with_value_builder(MIN_VERSION, height, key, value_builder, ()) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_builders::<(), ()>(kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_builders<'a, KE, VE>( + &'a self, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self.0.insert_at_height_with_builders( + MIN_VERSION, + self.random_height(), + key_builder, + value_builder, + (), + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders_and_trailer`](SkipMap::get_or_insert_with_builders_and_trailer), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_builders::<(), ()>(height, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_at_height_with_builders<'a, KE, VE>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self + .0 + .insert_at_height_with_builders(MIN_VERSION, height, key_builder, value_builder, ()) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_builders`](SkipMap::insert_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.get_or_insert_with_builders::<(), ()>(kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_builders<'a, KE, VE>( + &'a self, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self.0.get_or_insert_at_height_with_builders( + MIN_VERSION, + self.random_height(), + key_builder, + value_builder, + (), + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](SkipMap::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_builders::<(), ()>(height, kb, vb) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self.0.get_or_insert_at_height_with_builders( + MIN_VERSION, + height, + key_builder, + value_builder, + (), + ) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove`](SkipMap::get_or_remove), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + #[inline] + pub fn compare_remove<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + success: Ordering, + failure: Ordering, + ) -> Result>, Error> { + self.compare_remove_at_height(self.random_height(), key, success, failure) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove_at_height`](SkipMap::get_or_remove_at_height), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + pub fn compare_remove_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + success: Ordering, + failure: Ordering, + ) -> Result>, Error> { + self + .0 + .compare_remove_at_height(MIN_VERSION, height, key, (), success, failure) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove`](SkipMap::compare_remove), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + #[inline] + pub fn get_or_remove<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + ) -> Result>, Error> { + self.get_or_remove_at_height(self.random_height(), key) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height`](SkipMap::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world").unwrap(); + /// + /// let height = map.random_height(); + /// map.get_or_remove_at_height(height, b"hello").unwrap(); + /// ``` + #[inline] + pub fn get_or_remove_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + ) -> Result>, Error> { + self.0.get_or_remove_at_height(MIN_VERSION, height, key, ()) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_with_builder_and_trailer`](SkipMap::compare_remove_with_builder_and_trailer), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, KeyBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// l.get_or_remove_with_builder::(kb) + /// .unwrap(); + /// ``` + pub fn get_or_remove_with_builder<'a, 'b: 'a, E>( + &'a self, + key_builder: KeyBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .get_or_remove_at_height_with_builder(MIN_VERSION, self.random_height(), key_builder, ()) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height_with_builder_and_trailer`](SkipMap::compare_remove_at_height_with_builder_and_trailer), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::map::SkipMap, KeyBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// let height = l.random_height(); + /// l.get_or_remove_at_height_with_builder::(height, kb) + /// .unwrap(); + /// ``` + pub fn get_or_remove_at_height_with_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .get_or_remove_at_height_with_builder(MIN_VERSION, height, key_builder, ()) + } +} diff --git a/src/sync/tests.rs b/src/sync/tests.rs new file mode 100644 index 0000000..6f09bfe --- /dev/null +++ b/src/sync/tests.rs @@ -0,0 +1,87 @@ +#![allow(warnings)] + +use super::*; +use crate::Descend; + +use std::format; + +use std::sync::Arc; + +use rarena_allocator::Freelist; +#[cfg(feature = "std")] +use wg::WaitGroup; + +const ARENA_SIZE: usize = 1 << 20; +#[cfg(feature = "std")] +const BIG_ARENA_SIZE: usize = 120 << 20; +const TEST_OPTIONS: Options = Options::new().with_capacity(ARENA_SIZE as u32); +const UNIFY_TEST_OPTIONS: Options = Options::new() + .with_capacity(ARENA_SIZE as u32) + .with_unify(true); +#[cfg(feature = "std")] +const BIG_TEST_OPTIONS: Options = Options::new().with_capacity(BIG_ARENA_SIZE as u32); +#[cfg(feature = "std")] +const UNIFY_BIG_TEST_OPTIONS: Options = Options::new() + .with_capacity(BIG_ARENA_SIZE as u32) + .with_unify(true); + +fn run(f: impl Fn() + Send + Sync + 'static) { + f(); +} + +/// Only used for testing + +pub fn key(i: usize) -> std::vec::Vec { + format!("{:05}", i).into_bytes() +} + +/// Only used for testing +#[cfg(feature = "std")] +pub fn big_value(i: usize) -> std::vec::Vec { + format!("{:01048576}", i).into_bytes() +} + +/// Only used for testing +pub fn new_value(i: usize) -> std::vec::Vec { + format!("{:05}", i).into_bytes() +} + +fn make_int_key(i: usize) -> std::vec::Vec { + format!("{:05}", i).into_bytes() +} + +fn make_value(i: usize) -> std::vec::Vec { + format!("v{:05}", i).into_bytes() +} + +#[test] +fn test_encode_decode_key_size() { + // Test cases + let test_cases = [ + (0, 0), // Minimum values + (1, 1), // Small values + (0x1FFFFFF, 0), // Maximum key_size, minimum height + (0, 0b11111), // Minimum key_size, maximum height + (0x1FFFFFF, 0b11111), // Maximum values + (0x1FFFFFF - 1, 0b11111 - 1), // One less than maximum values + (12345678, 31), // Random values + (0, 1), // Edge case: Minimum key_size, small height + (1, 0), // Edge case: Small key_size, minimum height + ]; + + for &(key_size, height) in &test_cases { + let encoded = encode_key_size_and_height(key_size, height); + let (decoded_key_size, decoded_height) = decode_key_size_and_height(encoded); + + assert_eq!(key_size, decoded_key_size); + assert_eq!(height, decoded_height); + } +} + +mod full; + +mod map; + +mod trailed; + +mod versioned; diff --git a/src/sync/tests/full.rs b/src/sync/tests/full.rs new file mode 100644 index 0000000..63e6064 --- /dev/null +++ b/src/sync/tests/full.rs @@ -0,0 +1,2927 @@ +use super::*; + +type SkipList = crate::sync::full::SkipMap; + +type SkipMap = crate::sync::full::SkipMap<(), Ascend>; + +fn empty_in(l: SkipMap) { + let mut it = l.iter_all_versions(MIN_VERSION); + + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + assert!(it.seek_lower_bound(Bound::Included(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_lower_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Included(b"aaa")).is_none()); + assert!(l.first(MIN_VERSION).is_none()); + assert!(l.last(MIN_VERSION).is_none()); + assert!(l.0.ge(MIN_VERSION, b"aaa", false).is_none()); + assert!(l.0.lt(MIN_VERSION, b"aaa", false).is_none()); + assert!(l.0.gt(MIN_VERSION, b"aaa", false).is_none()); + assert!(l.0.le(MIN_VERSION, b"aaa", false).is_none()); + assert!(l.get(MIN_VERSION, b"aaa").is_none()); + assert!(!l.contains_key(MIN_VERSION, b"aaa")); + assert!(l.allocated() > 0); + assert!(l.capacity() > 0); + assert_eq!(l.remaining(), l.capacity() - l.allocated()); +} + +#[test] +fn test_empty() { + run(|| empty_in(SkipList::new(Options::new()).unwrap())); +} + +#[test] +fn test_empty_unify() { + run(|| empty_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_empty_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_empty_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(1000)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + + let x = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + empty_in(x); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn full_in(l: impl FnOnce(usize) -> SkipMap) { + let l = l(1000); + let mut found_arena_full = false; + + for i in 0..100 { + if let Err(e) = l.get_or_insert(0, &make_int_key(i), &make_value(i), ()) { + assert!(matches!( + e, + Error::Arena(ArenaError::InsufficientSpace { .. }) + )); + found_arena_full = true; + break; + } + } + + assert!(found_arena_full); +} + +#[test] +fn test_full() { + run(|| { + full_in(|n| { + SkipList::new( + Options::new() + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +fn test_full_unify() { + run(|| { + full_in(|n| { + SkipList::new( + UNIFY_TEST_OPTIONS + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_full_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_full_map_mut"); + + full_in(|n| { + let open_options = OpenOptions::default() + .create_new(Some(n as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + SkipList::map_mut( + p, + Options::new().with_freelist(Freelist::None), + open_options, + map_options, + ) + .unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon_unify() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +fn basic_in(mut l: SkipMap) { + // Try adding values. + l.get_or_insert(0, b"key1", &make_value(1), ()).unwrap(); + l.get_or_insert(0, b"key3", &make_value(3), ()).unwrap(); + l.get_or_insert(0, b"key2", &make_value(2), ()).unwrap(); + assert_eq!(l.comparator(), &Ascend); + + { + let mut it = l.iter_all_versions(0); + let ent = it.seek_lower_bound(Bound::Included(b"key1")).unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value().unwrap(), &make_value(1)); + assert_eq!(ent.version(), 0); + + let ent = it.seek_lower_bound(Bound::Included(b"key2")).unwrap(); + assert_eq!(ent.key(), b"key2"); + assert_eq!(ent.value().unwrap(), &make_value(2)); + assert_eq!(ent.version(), 0); + + let ent = it.seek_lower_bound(Bound::Included(b"key3")).unwrap(); + assert_eq!(ent.key(), b"key3"); + assert_eq!(ent.value().unwrap(), &make_value(3)); + assert_eq!(ent.version(), 0); + } + + l.get_or_insert(1, "a".as_bytes(), &[], ()).unwrap(); + l.get_or_insert(2, "a".as_bytes(), &[], ()).unwrap(); + + { + let mut it = l.iter_all_versions(2); + let ent = it.seek_lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 2); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + } + + l.get_or_insert(2, "b".as_bytes(), &[], ()).unwrap(); + l.get_or_insert(1, "b".as_bytes(), &[], ()).unwrap(); + + { + let mut it = l.iter_all_versions(2); + let ent = it.seek_lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 2); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + + let ent = it.entry().unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + } + + l.get_or_insert(2, b"b", &[], ()).unwrap().unwrap(); + + assert!(l.get_or_insert(2, b"c", &[], ()).unwrap().is_none()); + + unsafe { + l.clear().unwrap(); + } + + let l = l.clone(); + { + let mut it = l.iter_all_versions(0); + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + } + assert!(l.is_empty()); + + #[cfg(feature = "memmap")] + l.flush().unwrap(); + + #[cfg(feature = "memmap")] + l.flush_async().unwrap(); +} + +#[test] +fn test_basic() { + run(|| basic_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_basic_unify() { + run(|| basic_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_basic_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + basic_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_mvcc(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + + let mut it = l.iter_all_versions(0); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 0); + + let mut it = l.iter_all_versions(1); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut it = l.iter_all_versions(2); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut it = l.iter_all_versions(3); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 4); + + let mut it = l.iter_all_versions(0); + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + + let mut it = l.iter_all_versions(1); + let ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c1"); + assert_eq!(ent.version(), 1); + + let mut it = l.iter_all_versions(2); + let ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c1"); + assert_eq!(ent.version(), 1); + + let mut it = l.iter_all_versions(3); + + let ent = it.seek_upper_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = it.seek_upper_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = it.seek_lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = it.seek_lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c2"); + assert_eq!(ent.version(), 3); +} + +#[test] +fn test_iter_all_versions_mvcc() { + run(|| iter_all_versions_mvcc(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_mvcc_unify() { + run(|| iter_all_versions_mvcc(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_mvcc_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_mvcc_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_mvcc( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }); +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_mvcc_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_mvcc(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_mvcc_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_mvcc(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ordering() { + let l = SkipList::with_comparator(TEST_OPTIONS, Descend).unwrap(); + + l.get_or_insert(1, b"a1", b"a1", ()).unwrap(); + l.get_or_insert(2, b"a2", b"a2", ()).unwrap(); + l.get_or_insert(3, b"a3", b"a3", ()).unwrap(); + + let mut it = l.iter_all_versions(3); + for i in (1..=3).rev() { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), format!("a{i}").as_bytes()); + assert_eq!(ent.value().unwrap(), format!("a{i}").as_bytes()); + } +} + +#[test] +fn test_ordering() { + run(ordering); +} + +fn get_mvcc(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + + let ent = l.get(1, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(2, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(3, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.get(4, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + assert!(l.get(0, b"b").is_none()); + assert!(l.get(1, b"b").is_none()); + assert!(l.get(2, b"b").is_none()); + assert!(l.get(3, b"b").is_none()); + assert!(l.get(4, b"b").is_none()); + + let ent = l.get(1, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(2, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(3, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.get(4, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + assert!(l.get(5, b"d").is_none()); +} + +#[test] +fn test_get_mvcc() { + run(|| get_mvcc(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_get_mvcc_unify() { + run(|| get_mvcc(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_mvcc_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_mvcc_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_mvcc(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_mvcc_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_mvcc(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_mvcc_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_mvcc(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn gt_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + l.get_or_insert(5, b"c", b"c3", ()).unwrap(); + + assert!(l.lower_bound(0, Bound::Excluded(b"a")).is_none()); + assert!(l.lower_bound(0, Bound::Excluded(b"b")).is_none()); + assert!(l.lower_bound(0, Bound::Excluded(b"c")).is_none()); + + let ent = l.lower_bound(1, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(5, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c3"); + assert_eq!(ent.version(), 5); + + let ent = l.lower_bound(6, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c3"); + assert_eq!(ent.version(), 5); + + assert!(l.lower_bound(1, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(2, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(3, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(4, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(5, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(6, Bound::Excluded(b"c")).is_none()); +} + +#[test] +fn test_gt() { + run(|| gt_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_gt_unify() { + run(|| gt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_gt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_gt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ge_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"a")).is_none()); + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"b")).is_none()); + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"c")).is_none()); + + let ent = l.lower_bound(1, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(1, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(2, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(3, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(4, Bound::Included(b"d")).is_none()); +} + +#[test] +fn test_ge() { + run(|| ge_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_ge_unify() { + run(|| ge_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + ge_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn le_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"a")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"b")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"c")).is_none()); + + let ent = l.upper_bound(1, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); +} + +#[test] +fn test_le() { + run(|| le_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_le_unify() { + run(|| le_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_le_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_le_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn lt_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"b")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"c")).is_none()); + assert!(l.upper_bound(1, Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(2, Bound::Excluded(b"a")).is_none()); + + let ent = l.upper_bound(1, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); +} + +#[test] +fn test_lt() { + run(|| lt_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_lt_unify() { + run(|| lt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_lt_map_mut() { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + lt_in(unsafe { SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap() }); +} + +#[test] +#[cfg(feature = "memmap")] + +fn test_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn test_basic_large_testcases_in(l: SkipMap) { + let n = 1000; + + for i in 0..n { + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i), ()) + .unwrap(); + } + + for i in 0..n { + let k = key(i); + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), 0); + assert_eq!(ent.key(), k); + } + + assert_eq!(n, l.len()); +} + +#[test] +fn test_basic_large_testcases() { + run(|| { + let l = SkipList::new(TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +fn test_basic_large_testcases_unify() { + run(|| { + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_large_testcases_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_basic_large_testcases_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(Options::new(), map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[cfg(feature = "std")] +fn test_concurrent_basic_runner(l: SkipMap) { + #[cfg(not(any(miri, feature = "loom")))] + const N: usize = 1000; + #[cfg(any(miri, feature = "loom"))] + const N: usize = 5; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i), ()) + .unwrap(); + }); + } + while l.refs() > 1 {} + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!( + l.get(MIN_VERSION, &k).unwrap().value(), + new_value(i), + "broken: {i}" + ); + }); + } +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_basic() { + run(|| { + let l = SkipList::new(TEST_OPTIONS).unwrap().with_yield_now(); + test_concurrent_basic_runner(l); + }) +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_basic_unify() { + run(|| { + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap().with_yield_now(); + test_concurrent_basic_runner(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_concurrent_basic_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_concurrent_basic_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options) + .unwrap() + .with_yield_now(); + test_concurrent_basic_runner(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_basic_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + test_concurrent_basic_runner( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_basic_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + test_concurrent_basic_runner( + SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[cfg(all(feature = "std", not(miri)))] +fn test_concurrent_basic_big_values_runner(l: SkipMap) { + #[cfg(not(any(miri, feature = "loom")))] + const N: usize = 100; + #[cfg(any(miri, feature = "loom"))] + const N: usize = 5; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + l.get_or_insert(MIN_VERSION, &key(i), &big_value(i), ()) + .unwrap(); + }); + } + while l.refs() > 1 {} + // assert_eq!(N, l.len()); + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!( + l.get(MIN_VERSION, &k).unwrap().value(), + big_value(i), + "broken: {i}" + ); + }); + } + while l.refs() > 1 {} +} + +#[test] +#[cfg(all(feature = "std", not(miri)))] +fn test_concurrent_basic_big_values() { + run(|| { + test_concurrent_basic_big_values_runner( + SkipList::new(BIG_TEST_OPTIONS).unwrap().with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "std", not(miri)))] +fn test_concurrent_basic_big_values_unify() { + run(|| { + test_concurrent_basic_big_values_runner( + SkipList::new(UNIFY_BIG_TEST_OPTIONS) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "memmap", not(miri)))] +fn test_concurrent_basic_big_values_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_concurrent_basic_big_values_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(120 << 20)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + test_concurrent_basic_big_values_runner( + SkipList::map_mut(p, Options::new(), open_options, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "memmap", not(miri)))] +fn test_concurrent_basic_big_values_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(120 << 20); + test_concurrent_basic_big_values_runner( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "memmap", not(miri)))] +fn test_concurrent_basic_big_values_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(120 << 20); + test_concurrent_basic_big_values_runner( + SkipList::map_anon(UNIFY_BIG_TEST_OPTIONS, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[cfg(feature = "std")] +fn concurrent_one_key(l: SkipMap) { + #[cfg(not(any(miri, feature = "loom")))] + const N: usize = 5; + #[cfg(any(miri, feature = "loom"))] + const N: usize = 5; + + let wg = WaitGroup::new(); + for i in 0..N { + let wg = wg.add(1); + let l = l.clone(); + std::thread::spawn(move || { + let _ = l.get_or_insert(MIN_VERSION, b"thekey", &make_value(i), ()); + wg.done(); + }); + } + + wg.wait(); + + let saw_value = Arc::new(crate::sync::AtomicU32::new(0)); + for _ in 0..N { + let wg = wg.add(1); + let l = l.clone(); + let saw_value = saw_value.clone(); + std::thread::spawn(move || { + let ent = l.get(MIN_VERSION, b"thekey").unwrap(); + let val = ent.value(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + + let mut it = l.iter_all_versions(MIN_VERSION); + let ent = it.seek_lower_bound(Bound::Included(b"thekey")).unwrap(); + let val = ent.value().unwrap(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + assert_eq!(ent.key(), b"thekey"); + saw_value.fetch_add(1, Ordering::SeqCst); + wg.done(); + }); + } + + wg.wait(); + + assert_eq!(N, saw_value.load(Ordering::SeqCst) as usize); + assert_eq!(l.len(), 1); +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_one_key() { + run(|| { + concurrent_one_key(SkipList::new(TEST_OPTIONS).unwrap().with_yield_now()); + }) +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_one_key_unify() { + run(|| { + concurrent_one_key(SkipList::new(UNIFY_TEST_OPTIONS).unwrap().with_yield_now()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_concurrent_one_key_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_concurrent_one_key_map_mut"); + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + concurrent_one_key( + SkipList::map_mut(p, Options::new(), open_options, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_one_key_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + concurrent_one_key( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_one_key_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + concurrent_one_key( + SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +fn iter_all_versions_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let mut ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + for i in 0..N { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value().unwrap(), make_value(i)); + if i != N - 1 { + ent = it.next().unwrap(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_iter_all_versions_next() { + run(|| iter_all_versions_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_next_unify() { + run(|| iter_all_versions_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + let upper = make_int_key(50); + let mut it = l.range(MIN_VERSION, ..=upper.as_slice()); + let mut ent = it.seek_lower_bound(Bound::Unbounded); + for i in 0..N { + if i <= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next(); + } else { + assert!(ent.is_none()); + ent = it.next(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_range_next() { + run(|| range_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_next_unify() { + run(|| range_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let mut ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + for i in (0..N).rev() { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value().unwrap(), make_value(i)); + if i != 0 { + ent = it.next_back().unwrap(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_iter_all_versions_next_back() { + run(|| iter_all_versions_prev(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_prev( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + let lower = make_int_key(50); + let mut it = l.range(MIN_VERSION, lower.as_slice()..); + let mut ent = it.seek_upper_bound(Bound::Unbounded); + for i in (0..N).rev() { + if i >= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next_back(); + } else { + assert!(ent.is_none()); + ent = it.next_back(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range_prev() { + run(|| range_prev(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_prev_unify() { + run(|| range_prev(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_prev(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_ge(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(MIN_VERSION, &make_int_key(v), &make_value(v), ()) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value().unwrap(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01000")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value().unwrap(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01005")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value().unwrap(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01010")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value().unwrap(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01020")).unwrap(); + assert_eq!(ent.key(), make_int_key(1020)); + assert_eq!(ent.value().unwrap(), make_value(1020)); + + let ent = it.seek_lower_bound(Bound::Included(b"01200")).unwrap(); + assert_eq!(ent.key(), make_int_key(1200)); + assert_eq!(ent.value().unwrap(), make_value(1200)); + + let ent = it.seek_lower_bound(Bound::Included(b"01100")).unwrap(); + assert_eq!(ent.key(), make_int_key(1100)); + assert_eq!(ent.value().unwrap(), make_value(1100)); + + let ent = it.seek_lower_bound(Bound::Included(b"99999")); + assert!(ent.is_none()); + + l.get_or_insert(MIN_VERSION, &[], &[], ()).unwrap(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); + + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_ge() { + run(|| iter_all_versions_seek_ge(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_seek_ge_unify() { + run(|| iter_all_versions_seek_ge(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_ge( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_lt(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(MIN_VERSION, &make_int_key(v), &make_value(v), ()) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + assert!(it.seek_upper_bound(Bound::Excluded(b"")).is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01000")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01001")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value().unwrap(), make_value(1000)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01991")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value().unwrap(), make_value(1990)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"99999")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value().unwrap(), make_value(1990)); + + l.get_or_insert(MIN_VERSION, &[], &[], ()).unwrap(); + assert!(l.0.lt(MIN_VERSION, &[], false).is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"\x01")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_lt() { + run(|| iter_all_versions_seek_lt(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_all_versions_seek_lt_unify() { + run(|| iter_all_versions_seek_lt(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_lt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_lt( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range(l: SkipMap) { + for i in 1..10 { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + let k3 = make_int_key(3); + let k7 = make_int_key(7); + let mut it = l.range(MIN_VERSION, k3.as_slice()..k7.as_slice()).clone(); + assert_eq!(it.bounds(), &(k3.as_slice()..k7.as_slice())); + + for i in 3..=6 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 7..10 { + let k = make_int_key(i); + assert!(it.seek_lower_bound(Bound::Included(&k)).is_none()); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + let ent = it + .seek_lower_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + let ent = it + .seek_upper_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + for i in 4..=7 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i - 1)); + assert_eq!(ent.value(), make_value(i - 1)); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 1..4 { + let k = make_int_key(i); + assert!(it.seek_upper_bound(Bound::Excluded(&k)).is_none()); + } + + let ent = it + .seek_upper_bound(Bound::Excluded(&make_int_key(4))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range() { + run(|| range(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_unify() { + run(|| range(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + for i in 50..N { + l.get_or_insert(1, &make_int_key(i), &make_value(i + 1000), ()) + .unwrap(); + } + + for i in 0..50 { + l.get_or_insert(2, &make_int_key(i), &make_value(i + 1000), ()) + .unwrap(); + } + + let mut it = l.iter(4); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_iter_latest() { + run(|| iter_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_latest_unify() { + run(|| iter_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_iter_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + for i in 50..N { + l.get_or_insert(1, &make_int_key(i), &make_value(i + 1000), ()) + .unwrap(); + } + + for i in 0..50 { + l.get_or_insert(2, &make_int_key(i), &make_value(i + 1000), ()) + .unwrap(); + } + + let mut it = l.range::<[u8], _>(4, ..); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_range_latest() { + run(|| range_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_latest_unify() { + run(|| range_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map_mut(&p, Options::new(), open_options, map_options).unwrap(); + for i in 0..1000 { + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i), ()) + .unwrap(); + } + l.flush().unwrap(); + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map(&p, Options::new(), open_options, map_options).unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), 0); + assert_eq!(ent.key(), k); + } + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap2() { + run(|| unsafe { + use rand::seq::SliceRandom; + + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap2"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_mut_with_comparator(&p, Options::new(), open_options, map_options, Ascend) + .unwrap(); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in &data { + let i = *i; + l.get_or_insert(i as u64, &key(i), &new_value(i), ()) + .unwrap(); + } + l.flush_async().unwrap(); + assert_eq!(l.max_version(), 999); + assert_eq!(l.min_version(), 0); + + for i in data { + let k = key(i); + let ent = l.get(i as u64, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), i as u64); + assert_eq!(ent.key(), k); + } + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_with_comparator(&p, Options::new(), open_options, map_options, Ascend).unwrap(); + assert_eq!(1000, l.len()); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in data { + let k = key(i); + let ent = l.get(i as u64, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), i as u64); + assert_eq!(ent.key(), k); + } + assert_eq!(l.max_version(), 999); + assert_eq!(l.min_version(), 0); + }) +} + +struct Person { + id: u32, + name: std::string::String, +} + +impl Person { + fn encoded_size(&self) -> usize { + 4 + self.name.len() + } +} + +fn get_or_insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_value_builder::<()>(1, b"alice", vb, ()) + .unwrap(); +} + +#[test] +fn test_get_or_insert_with_value() { + run(|| { + get_or_insert_with_value(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_get_or_insert_with_value_unify() { + run(|| { + get_or_insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with_value( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn get_or_insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_builders::<(), ()>(1, kb, vb, ()) + .unwrap(); +} + +#[test] +fn test_get_or_insert_with() { + run(|| get_or_insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_get_or_insert_with_unify() { + run(|| get_or_insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_or_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_in(l: SkipMap) { + let k = 0u64.to_le_bytes(); + for i in 0..100 { + let v = new_value(i); + let old = l.insert(MIN_VERSION, &k, &v, ()).unwrap(); + if let Some(old) = old { + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i - 1)); + } + } + + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(ent.key(), k); + assert_eq!(ent.value(), new_value(99)); +} + +#[test] +fn test_insert_in() { + run(|| { + insert_in(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_insert_in_unify() { + run(|| { + insert_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_in_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_in_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_value_builder::<()>(1, b"alice", vb, ()) + .unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + let old = l + .insert_with_value_builder::<()>(1, b"alice", vb, ()) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(1, b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with_value() { + run(|| insert_with_value(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_insert_with_value_unify() { + run(|| insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with_value(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size as u32, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_builders::<(), ()>(1, kb, vb, ()).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + let old = l + .insert_with_builders::<(), ()>(1, kb, vb, ()) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(1, b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with() { + run(|| insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_insert_with_unify() { + run(|| insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn get_or_remove(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(MIN_VERSION, &key(i), &v, ()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + let old = l.get_or_remove(MIN_VERSION, &k, ()).unwrap().unwrap(); + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i)); + + let old = l.get_or_remove(MIN_VERSION, &k, ()).unwrap().unwrap(); + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i)); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(ent.key(), k); + assert_eq!(ent.value(), new_value(i)); + } +} + +#[test] +fn test_get_or_remove() { + run(|| get_or_remove(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_get_or_remove_unify() { + run(|| get_or_remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_remove_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_or_remove_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_remove_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_remove(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_remove_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(MIN_VERSION, &key(i), &v, ()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // no race, remove should succeed + let old = l + .compare_remove(MIN_VERSION, &k, (), Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + + // key already removed + let old = l + .compare_remove(MIN_VERSION, &k, (), Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove() { + run(|| remove(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove_unify() { + run(|| remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove2(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(MIN_VERSION, &key(i), &v, ()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // not found, remove should succeed + let old = l + .compare_remove(1, &k, (), Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + + // no-race, remove should succeed + let old = l + .compare_remove(MIN_VERSION, &k, (), Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove2() { + run(|| remove2(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove2_unify() { + run(|| remove2(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove2_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove2_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove2(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon() { + run(|| unsafe { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} diff --git a/src/sync/tests/map.rs b/src/sync/tests/map.rs new file mode 100644 index 0000000..d3926a8 --- /dev/null +++ b/src/sync/tests/map.rs @@ -0,0 +1,2539 @@ +use super::*; + +type SkipList = crate::sync::map::SkipMap; + +type SkipMap = crate::sync::map::SkipMap; + +fn empty_in(l: SkipMap) { + let mut it = l.iter(); + + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + assert!(it.seek_lower_bound(Bound::Included(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_lower_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Included(b"aaa")).is_none()); + assert!(l.first().is_none()); + assert!(l.last().is_none()); + assert!(l.get(b"aaa").is_none()); + assert!(!l.contains_key(b"aaa")); + assert!(l.allocated() > 0); + assert!(l.capacity() > 0); + assert_eq!(l.remaining(), l.capacity() - l.allocated()); +} + +#[test] +fn test_empty() { + run(|| empty_in(SkipList::new(Options::new()).unwrap())); +} + +#[test] +fn test_empty_unify() { + run(|| empty_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_empty_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_empty_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(1000)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + + let x = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + empty_in(x); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn full_in(l: impl FnOnce(usize) -> SkipMap) { + let l = l(1000); + let mut found_arena_full = false; + + for i in 0..100 { + if let Err(e) = l.get_or_insert(&make_int_key(i), &make_value(i)) { + assert!(matches!( + e, + Error::Arena(ArenaError::InsufficientSpace { .. }) + )); + found_arena_full = true; + break; + } + } + + assert!(found_arena_full); +} + +#[test] +fn test_full() { + run(|| { + full_in(|n| { + SkipList::new( + Options::new() + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +fn test_full_unify() { + run(|| { + full_in(|n| { + SkipList::new( + UNIFY_TEST_OPTIONS + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_full_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_full_map_mut"); + + full_in(|n| { + let open_options = OpenOptions::default() + .create_new(Some(n as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + SkipList::map_mut( + p, + Options::new().with_freelist(Freelist::None), + open_options, + map_options, + ) + .unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon_unify() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +fn basic_in(mut l: SkipMap) { + // Try adding values. + l.get_or_insert(b"key1", &make_value(1)).unwrap(); + l.get_or_insert(b"key3", &make_value(3)).unwrap(); + l.get_or_insert(b"key2", &make_value(2)).unwrap(); + assert_eq!(l.comparator(), &Ascend); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"key1")).unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + + let ent = it.seek_lower_bound(Bound::Included(b"key2")).unwrap(); + assert_eq!(ent.key(), b"key2"); + assert_eq!(ent.value(), &make_value(2)); + + let ent = it.seek_lower_bound(Bound::Included(b"key3")).unwrap(); + assert_eq!(ent.key(), b"key3"); + assert_eq!(ent.value(), &make_value(3)); + } + + l.get_or_insert("a".as_bytes(), &[]).unwrap(); + l.get_or_insert("a".as_bytes(), &[]).unwrap(); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), &[]); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + } + + l.get_or_insert("b".as_bytes(), &[]).unwrap(); + l.get_or_insert("b".as_bytes(), &[]).unwrap(); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value(), &[]); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + + let ent = it.entry().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + } + + l.get_or_insert(b"b", &[]).unwrap().unwrap(); + + assert!(l.get_or_insert(b"c", &[]).unwrap().is_none()); + + unsafe { + l.clear().unwrap(); + } + + let l = l.clone(); + { + let mut it = l.iter(); + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + } + assert!(l.is_empty()); + + #[cfg(feature = "memmap")] + l.flush().unwrap(); + + #[cfg(feature = "memmap")] + l.flush_async().unwrap(); +} + +#[test] +fn test_basic() { + run(|| basic_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_basic_unify() { + run(|| basic_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_basic_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + basic_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ordering() { + let l = SkipList::with_comparator(TEST_OPTIONS, Descend).unwrap(); + + l.get_or_insert(b"a1", b"a1").unwrap(); + l.get_or_insert(b"a2", b"a2").unwrap(); + l.get_or_insert(b"a3", b"a3").unwrap(); + + let mut it = l.iter(); + for i in (1..=3).rev() { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), format!("a{i}").as_bytes()); + assert_eq!(ent.value(), format!("a{i}").as_bytes()); + } +} + +#[test] +fn test_ordering() { + run(ordering); +} + +fn get(l: SkipMap) { + l.get_or_insert(b"a", b"a1").unwrap(); + l.get_or_insert(b"a", b"a2").unwrap(); + l.get_or_insert(b"c", b"c1").unwrap(); + l.get_or_insert(b"c", b"c2").unwrap(); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + l.insert(b"a", b"a2").unwrap(); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + + let ent = l.get(b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.get(b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.get(b"d").is_none()); +} + +#[test] +fn test_get() { + run(|| get(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_get_unify() { + run(|| get(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn gt_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1").unwrap(); + l.get_or_insert(b"a", b"a2").unwrap(); + l.get_or_insert(b"c", b"c1").unwrap(); + l.get_or_insert(b"c", b"c2").unwrap(); + l.get_or_insert(b"c", b"c3").unwrap(); + + assert!(l.lower_bound(Bound::Excluded(b"a")).is_some()); + assert!(l.lower_bound(Bound::Excluded(b"b")).is_some()); + assert!(l.lower_bound(Bound::Excluded(b"c")).is_none()); + + let ent = l.lower_bound(Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.lower_bound(Bound::Excluded(b"c")).is_none()); +} + +#[test] +fn test_gt() { + run(|| gt_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_gt_unify() { + run(|| gt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_gt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_gt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ge_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1").unwrap(); + l.get_or_insert(b"a", b"a2").unwrap(); + l.get_or_insert(b"c", b"c1").unwrap(); + l.get_or_insert(b"c", b"c2").unwrap(); + + assert!(l.lower_bound(Bound::Included(b"a")).is_some()); + assert!(l.lower_bound(Bound::Included(b"b")).is_some()); + assert!(l.lower_bound(Bound::Included(b"c")).is_some()); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + l.insert(b"a", b"a2").unwrap(); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + + let ent = l.lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.lower_bound(Bound::Included(b"d")).is_none()); +} + +#[test] +fn test_ge() { + run(|| ge_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_ge_unify() { + run(|| ge_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + ge_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn le_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1").unwrap(); + l.get_or_insert(b"a", b"a2").unwrap(); + l.get_or_insert(b"c", b"c1").unwrap(); + l.get_or_insert(b"c", b"c2").unwrap(); + + assert!(l.upper_bound(Bound::Included(b"a")).is_some()); + assert!(l.upper_bound(Bound::Included(b"b")).is_some()); + assert!(l.upper_bound(Bound::Included(b"c")).is_some()); + + let ent = l.upper_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.upper_bound(Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); +} + +#[test] +fn test_le() { + run(|| le_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_le_unify() { + run(|| le_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_le_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_le_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn lt_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1").unwrap(); + l.get_or_insert(b"a", b"a2").unwrap(); + l.get_or_insert(b"c", b"c1").unwrap(); + l.get_or_insert(b"c", b"c2").unwrap(); + + assert!(l.upper_bound(Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(Bound::Excluded(b"b")).is_some()); + assert!(l.upper_bound(Bound::Excluded(b"c")).is_some()); + + let ent = l.upper_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); +} + +#[test] +fn test_lt() { + run(|| lt_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_lt_unify() { + run(|| lt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_lt_map_mut() { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + lt_in(unsafe { SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap() }); +} + +#[test] +#[cfg(feature = "memmap")] + +fn test_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn test_basic_large_testcases_in(l: SkipMap) { + let n = 1000; + + for i in 0..n { + l.get_or_insert(&key(i), &new_value(i)).unwrap(); + } + + for i in 0..n { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + + assert_eq!(ent.key(), k); + } + + assert_eq!(n, l.len()); +} + +#[test] +fn test_basic_large_testcases() { + run(|| { + let l = SkipList::new(TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +fn test_basic_large_testcases_unify() { + run(|| { + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_large_testcases_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_basic_large_testcases_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(Options::new(), map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[cfg(feature = "std")] +fn test_concurrent_basic_runner(l: SkipMap) { + #[cfg(not(any(miri, feature = "loom")))] + const N: usize = 1000; + #[cfg(any(miri, feature = "loom"))] + const N: usize = 5; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + l.get_or_insert(&key(i), &new_value(i)).unwrap(); + }); + } + while l.refs() > 1 {} + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!(l.get(&k).unwrap().value(), new_value(i), "broken: {i}"); + }); + } +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_basic() { + run(|| { + let l = SkipList::new(TEST_OPTIONS).unwrap().with_yield_now(); + test_concurrent_basic_runner(l); + }) +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_basic_unify() { + run(|| { + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap().with_yield_now(); + test_concurrent_basic_runner(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_concurrent_basic_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_concurrent_basic_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options) + .unwrap() + .with_yield_now(); + test_concurrent_basic_runner(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_basic_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + test_concurrent_basic_runner( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_basic_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + test_concurrent_basic_runner( + SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[cfg(all(feature = "std", not(miri)))] +fn test_concurrent_basic_big_values_runner(l: SkipMap) { + #[cfg(not(any(miri, feature = "loom")))] + const N: usize = 100; + #[cfg(any(miri, feature = "loom"))] + const N: usize = 5; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + l.get_or_insert(&key(i), &big_value(i)).unwrap(); + }); + } + while l.refs() > 1 {} + // assert_eq!(N, l.len()); + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!(l.get(&k).unwrap().value(), big_value(i), "broken: {i}"); + }); + } + while l.refs() > 1 {} +} + +#[test] +#[cfg(all(feature = "std", not(miri)))] +fn test_concurrent_basic_big_values() { + run(|| { + test_concurrent_basic_big_values_runner( + SkipList::new(BIG_TEST_OPTIONS).unwrap().with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "std", not(miri)))] +fn test_concurrent_basic_big_values_unify() { + run(|| { + test_concurrent_basic_big_values_runner( + SkipList::new(UNIFY_BIG_TEST_OPTIONS) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "memmap", not(miri)))] +fn test_concurrent_basic_big_values_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_concurrent_basic_big_values_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(120 << 20)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + test_concurrent_basic_big_values_runner( + SkipList::map_mut(p, Options::new(), open_options, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "memmap", not(miri)))] +fn test_concurrent_basic_big_values_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(120 << 20); + test_concurrent_basic_big_values_runner( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "memmap", not(miri)))] +fn test_concurrent_basic_big_values_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(120 << 20); + test_concurrent_basic_big_values_runner( + SkipList::map_anon(UNIFY_BIG_TEST_OPTIONS, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[cfg(feature = "std")] +fn concurrent_one_key(l: SkipMap) { + #[cfg(not(any(miri, feature = "loom")))] + const N: usize = 5; + #[cfg(any(miri, feature = "loom"))] + const N: usize = 5; + + let wg = WaitGroup::new(); + for i in 0..N { + let wg = wg.add(1); + let l = l.clone(); + std::thread::spawn(move || { + let _ = l.get_or_insert(b"thekey", &make_value(i)); + wg.done(); + }); + } + + wg.wait(); + + let saw_value = Arc::new(crate::sync::AtomicU32::new(0)); + for _ in 0..N { + let wg = wg.add(1); + let l = l.clone(); + let saw_value = saw_value.clone(); + std::thread::spawn(move || { + let ent = l.get(b"thekey").unwrap(); + let val = ent.value(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"thekey")).unwrap(); + let val = ent.value(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + assert_eq!(ent.key(), b"thekey"); + saw_value.fetch_add(1, Ordering::SeqCst); + wg.done(); + }); + } + + wg.wait(); + + assert_eq!(N, saw_value.load(Ordering::SeqCst) as usize); + assert_eq!(l.len(), 1); +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_one_key() { + run(|| { + concurrent_one_key(SkipList::new(TEST_OPTIONS).unwrap().with_yield_now()); + }) +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_one_key_unify() { + run(|| { + concurrent_one_key(SkipList::new(UNIFY_TEST_OPTIONS).unwrap().with_yield_now()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_concurrent_one_key_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_concurrent_one_key_map_mut"); + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + concurrent_one_key( + SkipList::map_mut(p, Options::new(), open_options, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_one_key_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + concurrent_one_key( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_one_key_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + concurrent_one_key( + SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +fn iter_all_versions_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + let mut it = l.iter(); + let mut ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + for i in 0..N { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + if i != N - 1 { + ent = it.next().unwrap(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_iter_all_versions_next() { + run(|| iter_all_versions_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_next_unify() { + run(|| iter_all_versions_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + let upper = make_int_key(50); + let mut it = l.range(..=upper.as_slice()); + let mut ent = it.seek_lower_bound(Bound::Unbounded); + for i in 0..N { + if i <= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next(); + } else { + assert!(ent.is_none()); + ent = it.next(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_range_next() { + run(|| range_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_next_unify() { + run(|| range_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + let mut it = l.iter(); + let mut ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + for i in (0..N).rev() { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + if i != 0 { + ent = it.next_back().unwrap(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_iter_all_versions_next_back() { + run(|| iter_all_versions_prev(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_prev( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + let lower = make_int_key(50); + let mut it = l.range(lower.as_slice()..); + let mut ent = it.seek_upper_bound(Bound::Unbounded); + for i in (0..N).rev() { + if i >= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next_back(); + } else { + assert!(ent.is_none()); + ent = it.next_back(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range_prev() { + run(|| range_prev(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_prev_unify() { + run(|| range_prev(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_prev(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_ge(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(&make_int_key(v), &make_value(v)).unwrap(); + } + + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01000")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01005")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01010")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01020")).unwrap(); + assert_eq!(ent.key(), make_int_key(1020)); + assert_eq!(ent.value(), make_value(1020)); + + let ent = it.seek_lower_bound(Bound::Included(b"01200")).unwrap(); + assert_eq!(ent.key(), make_int_key(1200)); + assert_eq!(ent.value(), make_value(1200)); + + let ent = it.seek_lower_bound(Bound::Included(b"01100")).unwrap(); + assert_eq!(ent.key(), make_int_key(1100)); + assert_eq!(ent.value(), make_value(1100)); + + let ent = it.seek_lower_bound(Bound::Included(b"99999")); + assert!(ent.is_none()); + + l.get_or_insert(&[], &[]).unwrap(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); + + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_ge() { + run(|| iter_all_versions_seek_ge(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_seek_ge_unify() { + run(|| iter_all_versions_seek_ge(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_ge( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_lt(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(&make_int_key(v), &make_value(v)).unwrap(); + } + + let mut it = l.iter(); + assert!(it.seek_upper_bound(Bound::Excluded(b"")).is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01000")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01001")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01991")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value(), make_value(1990)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"99999")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value(), make_value(1990)); + + l.get_or_insert(&[], &[]).unwrap(); + + let ent = it.seek_upper_bound(Bound::Excluded(b"")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"\x01")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_lt() { + run(|| iter_all_versions_seek_lt(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_all_versions_seek_lt_unify() { + run(|| iter_all_versions_seek_lt(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_lt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_lt( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range(l: SkipMap) { + for i in 1..10 { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + let k3 = make_int_key(3); + let k7 = make_int_key(7); + let mut it = l.range(k3.as_slice()..k7.as_slice()).clone(); + assert_eq!(it.bounds(), &(k3.as_slice()..k7.as_slice())); + + for i in 3..=6 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 7..10 { + let k = make_int_key(i); + assert!(it.seek_lower_bound(Bound::Included(&k)).is_none()); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + let ent = it + .seek_lower_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + let ent = it + .seek_upper_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + for i in 4..=7 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i - 1)); + assert_eq!(ent.value(), make_value(i - 1)); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 1..4 { + let k = make_int_key(i); + assert!(it.seek_upper_bound(Bound::Excluded(&k)).is_none()); + } + + let ent = it + .seek_upper_bound(Bound::Excluded(&make_int_key(4))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range() { + run(|| range(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_unify() { + run(|| range(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + for i in 50..N { + l.insert(&make_int_key(i), &make_value(i + 1000)).unwrap(); + } + + for i in 0..50 { + l.insert(&make_int_key(i), &make_value(i + 1000)).unwrap(); + } + + let mut it = l.iter(); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_iter_latest() { + run(|| iter_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_latest_unify() { + run(|| iter_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_iter_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + for i in 50..N { + l.insert(&make_int_key(i), &make_value(i + 1000)).unwrap(); + } + + for i in 0..50 { + l.insert(&make_int_key(i), &make_value(i + 1000)).unwrap(); + } + + let mut it = l.range::<[u8], _>(..); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_range_latest() { + run(|| range_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_latest_unify() { + run(|| range_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map_mut(&p, Options::new(), open_options, map_options).unwrap(); + for i in 0..1000 { + l.get_or_insert(&key(i), &new_value(i)).unwrap(); + } + l.flush().unwrap(); + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map(&p, Options::new(), open_options, map_options).unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + + assert_eq!(ent.key(), k); + } + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap2() { + run(|| unsafe { + use rand::seq::SliceRandom; + + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap2"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_mut_with_comparator(&p, Options::new(), open_options, map_options, Ascend) + .unwrap(); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in &data { + let i = *i; + l.get_or_insert(&key(i), &new_value(i)).unwrap(); + } + l.flush_async().unwrap(); + + for i in data { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.key(), k); + } + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_with_comparator(&p, Options::new(), open_options, map_options, Ascend).unwrap(); + assert_eq!(1000, l.len()); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in data { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.key(), k); + } + }) +} + +struct Person { + id: u32, + name: std::string::String, +} + +impl Person { + fn encoded_size(&self) -> usize { + 4 + self.name.len() + } +} + +fn get_or_insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_value_builder::<()>(b"alice", vb) + .unwrap(); +} + +#[test] +fn test_get_or_insert_with_value() { + run(|| { + get_or_insert_with_value(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_get_or_insert_with_value_unify() { + run(|| { + get_or_insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with_value( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn get_or_insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_builders::<(), ()>(kb, vb).unwrap(); +} + +#[test] +fn test_get_or_insert_with() { + run(|| get_or_insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_get_or_insert_with_unify() { + run(|| get_or_insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_or_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_in(l: SkipMap) { + let k = 0u64.to_le_bytes(); + for i in 0..100 { + let v = new_value(i); + let old = l.insert(&k, &v).unwrap(); + if let Some(old) = old { + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i - 1)); + } + } + + let ent = l.get(&k).unwrap(); + assert_eq!(ent.key(), k); + assert_eq!(ent.value(), new_value(99)); +} + +#[test] +fn test_insert_in() { + run(|| { + insert_in(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_insert_in_unify() { + run(|| { + insert_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_in_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_in_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_value_builder::<()>(b"alice", vb).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + let old = l + .insert_with_value_builder::<()>(b"alice", vb) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with_value() { + run(|| insert_with_value(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_insert_with_value_unify() { + run(|| insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with_value(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size as u32, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_builders::<(), ()>(kb, vb).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + let old = l.insert_with_builders::<(), ()>(kb, vb).unwrap().unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with() { + run(|| insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_insert_with_unify() { + run(|| insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn get_or_remove(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(&key(i), &v).unwrap(); + } + + for i in 0..100 { + let k = key(i); + let old = l.get_or_remove(&k).unwrap().unwrap(); + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i)); + + let old = l.get_or_remove(&k).unwrap().unwrap(); + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i)); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(ent.key(), k); + assert_eq!(ent.value(), new_value(i)); + } +} + +#[test] +fn test_get_or_remove() { + run(|| get_or_remove(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_get_or_remove_unify() { + run(|| get_or_remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_remove_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_or_remove_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_remove_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_remove(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_remove_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(&key(i), &v).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // no race, remove should succeed + let old = l + .compare_remove(&k, Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + + // key already removed + let old = l + .compare_remove(&k, Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(&k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove() { + run(|| remove(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove_unify() { + run(|| remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove2(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(&key(i), &v).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // not found, remove should succeed + let old = l + .compare_remove(&k, Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + + // no-race, remove should succeed + let old = l + .compare_remove(&k, Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(&k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove2() { + run(|| remove2(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove2_unify() { + run(|| remove2(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove2_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove2_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove2(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon() { + run(|| unsafe { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} diff --git a/src/sync/tests/trailed.rs b/src/sync/tests/trailed.rs new file mode 100644 index 0000000..9923a32 --- /dev/null +++ b/src/sync/tests/trailed.rs @@ -0,0 +1,2561 @@ +use super::*; + +type SkipList = crate::sync::trailed::SkipMap; + +type SkipMap = crate::sync::trailed::SkipMap; + +fn trailer() -> u64 { + 123456789 +} + +fn empty_in(l: SkipMap) { + let mut it = l.iter(); + + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + assert!(it.seek_lower_bound(Bound::Included(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_lower_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Included(b"aaa")).is_none()); + assert!(l.first().is_none()); + assert!(l.last().is_none()); + assert!(l.get(b"aaa").is_none()); + assert!(!l.contains_key(b"aaa")); + assert!(l.allocated() > 0); + assert!(l.capacity() > 0); + assert_eq!(l.remaining(), l.capacity() - l.allocated()); +} + +#[test] +fn test_empty() { + run(|| empty_in(SkipList::new(Options::new()).unwrap())); +} + +#[test] +fn test_empty_unify() { + run(|| empty_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_empty_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_empty_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(1000)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + + let x = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + empty_in(x); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn full_in(l: impl FnOnce(usize) -> SkipMap) { + let l = l(1000); + let mut found_arena_full = false; + + for i in 0..100 { + if let Err(e) = l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) { + assert!(matches!( + e, + Error::Arena(ArenaError::InsufficientSpace { .. }) + )); + found_arena_full = true; + break; + } + } + + assert!(found_arena_full); +} + +#[test] +fn test_full() { + run(|| { + full_in(|n| { + SkipList::new( + Options::new() + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +fn test_full_unify() { + run(|| { + full_in(|n| { + SkipList::new( + UNIFY_TEST_OPTIONS + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_full_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_full_map_mut"); + + full_in(|n| { + let open_options = OpenOptions::default() + .create_new(Some(n as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + SkipList::map_mut( + p, + Options::new().with_freelist(Freelist::None), + open_options, + map_options, + ) + .unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon_unify() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +fn basic_in(mut l: SkipMap) { + // Try adding values. + l.get_or_insert(b"key1", &make_value(1), trailer()).unwrap(); + l.get_or_insert(b"key3", &make_value(3), trailer()).unwrap(); + l.get_or_insert(b"key2", &make_value(2), trailer()).unwrap(); + assert_eq!(l.comparator(), &Ascend); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"key1")).unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + + let ent = it.seek_lower_bound(Bound::Included(b"key2")).unwrap(); + assert_eq!(ent.key(), b"key2"); + assert_eq!(ent.value(), &make_value(2)); + + let ent = it.seek_lower_bound(Bound::Included(b"key3")).unwrap(); + assert_eq!(ent.key(), b"key3"); + assert_eq!(ent.value(), &make_value(3)); + } + + l.get_or_insert("a".as_bytes(), &[], trailer()).unwrap(); + l.get_or_insert("a".as_bytes(), &[], trailer()).unwrap(); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), &[]); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + } + + l.get_or_insert("b".as_bytes(), &[], trailer()).unwrap(); + l.get_or_insert("b".as_bytes(), &[], trailer()).unwrap(); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value(), &[]); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + + let ent = it.entry().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + } + + l.get_or_insert(b"b", &[], trailer()).unwrap().unwrap(); + + assert!(l.get_or_insert(b"c", &[], trailer()).unwrap().is_none()); + + unsafe { + l.clear().unwrap(); + } + + let l = l.clone(); + { + let mut it = l.iter(); + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + } + assert!(l.is_empty()); + + #[cfg(feature = "memmap")] + l.flush().unwrap(); + + #[cfg(feature = "memmap")] + l.flush_async().unwrap(); +} + +#[test] +fn test_basic() { + run(|| basic_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_basic_unify() { + run(|| basic_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_basic_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + basic_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ordering() { + let l = SkipList::with_comparator(TEST_OPTIONS, Descend).unwrap(); + + l.get_or_insert(b"a1", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a2", b"a2", trailer()).unwrap(); + l.get_or_insert(b"a3", b"a3", trailer()).unwrap(); + + let mut it = l.iter(); + for i in (1..=3).rev() { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), format!("a{i}").as_bytes()); + assert_eq!(ent.value(), format!("a{i}").as_bytes()); + } +} + +#[test] +fn test_ordering() { + run(ordering); +} + +fn get(l: SkipMap) { + l.get_or_insert(b"a", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a", b"a2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c1", trailer()).unwrap(); + l.get_or_insert(b"c", b"c2", trailer()).unwrap(); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + l.insert(b"a", b"a2", trailer()).unwrap(); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + + let ent = l.get(b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.get(b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.get(b"d").is_none()); +} + +#[test] +fn test_get() { + run(|| get(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_get_unify() { + run(|| get(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn gt_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a", b"a2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c1", trailer()).unwrap(); + l.get_or_insert(b"c", b"c2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c3", trailer()).unwrap(); + + assert!(l.lower_bound(Bound::Excluded(b"a")).is_some()); + assert!(l.lower_bound(Bound::Excluded(b"b")).is_some()); + assert!(l.lower_bound(Bound::Excluded(b"c")).is_none()); + + let ent = l.lower_bound(Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.lower_bound(Bound::Excluded(b"c")).is_none()); +} + +#[test] +fn test_gt() { + run(|| gt_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_gt_unify() { + run(|| gt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_gt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_gt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ge_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a", b"a2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c1", trailer()).unwrap(); + l.get_or_insert(b"c", b"c2", trailer()).unwrap(); + + assert!(l.lower_bound(Bound::Included(b"a")).is_some()); + assert!(l.lower_bound(Bound::Included(b"b")).is_some()); + assert!(l.lower_bound(Bound::Included(b"c")).is_some()); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + l.insert(b"a", b"a2", trailer()).unwrap(); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + + let ent = l.lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.lower_bound(Bound::Included(b"d")).is_none()); +} + +#[test] +fn test_ge() { + run(|| ge_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_ge_unify() { + run(|| ge_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + ge_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn le_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a", b"a2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c1", trailer()).unwrap(); + l.get_or_insert(b"c", b"c2", trailer()).unwrap(); + + assert!(l.upper_bound(Bound::Included(b"a")).is_some()); + assert!(l.upper_bound(Bound::Included(b"b")).is_some()); + assert!(l.upper_bound(Bound::Included(b"c")).is_some()); + + let ent = l.upper_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.upper_bound(Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); +} + +#[test] +fn test_le() { + run(|| le_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_le_unify() { + run(|| le_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_le_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_le_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn lt_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a", b"a2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c1", trailer()).unwrap(); + l.get_or_insert(b"c", b"c2", trailer()).unwrap(); + + assert!(l.upper_bound(Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(Bound::Excluded(b"b")).is_some()); + assert!(l.upper_bound(Bound::Excluded(b"c")).is_some()); + + let ent = l.upper_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); +} + +#[test] +fn test_lt() { + run(|| lt_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_lt_unify() { + run(|| lt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_lt_map_mut() { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + lt_in(unsafe { SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap() }); +} + +#[test] +#[cfg(feature = "memmap")] + +fn test_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn test_basic_large_testcases_in(l: SkipMap) { + let n = 1000; + + for i in 0..n { + l.get_or_insert(&key(i), &new_value(i), trailer()).unwrap(); + } + + for i in 0..n { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + + assert_eq!(ent.key(), k); + } + + assert_eq!(n, l.len()); +} + +#[test] +fn test_basic_large_testcases() { + run(|| { + let l = SkipList::new(TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +fn test_basic_large_testcases_unify() { + run(|| { + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_large_testcases_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_basic_large_testcases_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(Options::new(), map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[cfg(feature = "std")] +fn test_concurrent_basic_runner(l: SkipMap) { + #[cfg(not(any(miri, feature = "loom")))] + const N: usize = 1000; + #[cfg(any(miri, feature = "loom"))] + const N: usize = 5; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + l.get_or_insert(&key(i), &new_value(i), trailer()).unwrap(); + }); + } + while l.refs() > 1 {} + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!(l.get(&k).unwrap().value(), new_value(i), "broken: {i}"); + }); + } +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_basic() { + run(|| { + let l = SkipList::new(TEST_OPTIONS).unwrap().with_yield_now(); + test_concurrent_basic_runner(l); + }) +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_basic_unify() { + run(|| { + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap().with_yield_now(); + test_concurrent_basic_runner(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_concurrent_basic_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_concurrent_basic_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options) + .unwrap() + .with_yield_now(); + test_concurrent_basic_runner(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_basic_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + test_concurrent_basic_runner( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_basic_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + test_concurrent_basic_runner( + SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[cfg(all(feature = "std", not(miri)))] +fn test_concurrent_basic_big_values_runner(l: SkipMap) { + #[cfg(not(any(miri, feature = "loom")))] + const N: usize = 100; + #[cfg(any(miri, feature = "loom"))] + const N: usize = 5; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + l.get_or_insert(&key(i), &big_value(i), trailer()).unwrap(); + }); + } + while l.refs() > 1 {} + // assert_eq!(N, l.len()); + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!(l.get(&k).unwrap().value(), big_value(i), "broken: {i}"); + }); + } + while l.refs() > 1 {} +} + +#[test] +#[cfg(all(feature = "std", not(miri)))] +fn test_concurrent_basic_big_values() { + run(|| { + test_concurrent_basic_big_values_runner( + SkipList::new(BIG_TEST_OPTIONS).unwrap().with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "std", not(miri)))] +fn test_concurrent_basic_big_values_unify() { + run(|| { + test_concurrent_basic_big_values_runner( + SkipList::new(UNIFY_BIG_TEST_OPTIONS) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "memmap", not(miri)))] +fn test_concurrent_basic_big_values_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_concurrent_basic_big_values_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(120 << 20)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + test_concurrent_basic_big_values_runner( + SkipList::map_mut(p, Options::new(), open_options, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "memmap", not(miri)))] +fn test_concurrent_basic_big_values_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(120 << 20); + test_concurrent_basic_big_values_runner( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(all(feature = "memmap", not(miri)))] +fn test_concurrent_basic_big_values_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(120 << 20); + test_concurrent_basic_big_values_runner( + SkipList::map_anon(UNIFY_BIG_TEST_OPTIONS, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[cfg(feature = "std")] +fn concurrent_one_key(l: SkipMap) { + #[cfg(not(any(miri, feature = "loom")))] + const N: usize = 5; + #[cfg(any(miri, feature = "loom"))] + const N: usize = 5; + + let wg = WaitGroup::new(); + for i in 0..N { + let wg = wg.add(1); + let l = l.clone(); + std::thread::spawn(move || { + let _ = l.get_or_insert(b"thekey", &make_value(i), trailer()); + wg.done(); + }); + } + + wg.wait(); + + let saw_value = Arc::new(crate::sync::AtomicU32::new(0)); + for _ in 0..N { + let wg = wg.add(1); + let l = l.clone(); + let saw_value = saw_value.clone(); + std::thread::spawn(move || { + let ent = l.get(b"thekey").unwrap(); + let val = ent.value(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"thekey")).unwrap(); + let val = ent.value(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + assert_eq!(ent.key(), b"thekey"); + saw_value.fetch_add(1, Ordering::SeqCst); + wg.done(); + }); + } + + wg.wait(); + + assert_eq!(N, saw_value.load(Ordering::SeqCst) as usize); + assert_eq!(l.len(), 1); +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_one_key() { + run(|| { + concurrent_one_key(SkipList::new(TEST_OPTIONS).unwrap().with_yield_now()); + }) +} + +#[test] +#[cfg(feature = "std")] +fn test_concurrent_one_key_unify() { + run(|| { + concurrent_one_key(SkipList::new(UNIFY_TEST_OPTIONS).unwrap().with_yield_now()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_concurrent_one_key_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_concurrent_one_key_map_mut"); + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + concurrent_one_key( + SkipList::map_mut(p, Options::new(), open_options, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_one_key_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + concurrent_one_key( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_concurrent_one_key_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + concurrent_one_key( + SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options) + .unwrap() + .with_yield_now(), + ); + }) +} + +fn iter_all_versions_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + let mut it = l.iter(); + let mut ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + for i in 0..N { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + if i != N - 1 { + ent = it.next().unwrap(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_iter_all_versions_next() { + run(|| iter_all_versions_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_next_unify() { + run(|| iter_all_versions_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + let upper = make_int_key(50); + let mut it = l.range(..=upper.as_slice()); + let mut ent = it.seek_lower_bound(Bound::Unbounded); + for i in 0..N { + if i <= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next(); + } else { + assert!(ent.is_none()); + ent = it.next(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_range_next() { + run(|| range_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_next_unify() { + run(|| range_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + let mut it = l.iter(); + let mut ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + for i in (0..N).rev() { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + if i != 0 { + ent = it.next_back().unwrap(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_iter_all_versions_next_back() { + run(|| iter_all_versions_prev(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_prev( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + let lower = make_int_key(50); + let mut it = l.range(lower.as_slice()..); + let mut ent = it.seek_upper_bound(Bound::Unbounded); + for i in (0..N).rev() { + if i >= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next_back(); + } else { + assert!(ent.is_none()); + ent = it.next_back(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range_prev() { + run(|| range_prev(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_prev_unify() { + run(|| range_prev(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_prev(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_ge(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(&make_int_key(v), &make_value(v), trailer()) + .unwrap(); + } + + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01000")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01005")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01010")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01020")).unwrap(); + assert_eq!(ent.key(), make_int_key(1020)); + assert_eq!(ent.value(), make_value(1020)); + + let ent = it.seek_lower_bound(Bound::Included(b"01200")).unwrap(); + assert_eq!(ent.key(), make_int_key(1200)); + assert_eq!(ent.value(), make_value(1200)); + + let ent = it.seek_lower_bound(Bound::Included(b"01100")).unwrap(); + assert_eq!(ent.key(), make_int_key(1100)); + assert_eq!(ent.value(), make_value(1100)); + + let ent = it.seek_lower_bound(Bound::Included(b"99999")); + assert!(ent.is_none()); + + l.get_or_insert(&[], &[], trailer()).unwrap(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); + + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_ge() { + run(|| iter_all_versions_seek_ge(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_seek_ge_unify() { + run(|| iter_all_versions_seek_ge(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_ge( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_lt(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(&make_int_key(v), &make_value(v), trailer()) + .unwrap(); + } + + let mut it = l.iter(); + assert!(it.seek_upper_bound(Bound::Excluded(b"")).is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01000")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01001")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01991")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value(), make_value(1990)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"99999")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value(), make_value(1990)); + + l.get_or_insert(&[], &[], trailer()).unwrap(); + + let ent = it.seek_upper_bound(Bound::Excluded(b"")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"\x01")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_lt() { + run(|| iter_all_versions_seek_lt(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_all_versions_seek_lt_unify() { + run(|| iter_all_versions_seek_lt(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_lt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_lt( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range(l: SkipMap) { + for i in 1..10 { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + let k3 = make_int_key(3); + let k7 = make_int_key(7); + let mut it = l.range(k3.as_slice()..k7.as_slice()).clone(); + assert_eq!(it.bounds(), &(k3.as_slice()..k7.as_slice())); + + for i in 3..=6 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 7..10 { + let k = make_int_key(i); + assert!(it.seek_lower_bound(Bound::Included(&k)).is_none()); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + let ent = it + .seek_lower_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + let ent = it + .seek_upper_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + for i in 4..=7 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i - 1)); + assert_eq!(ent.value(), make_value(i - 1)); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 1..4 { + let k = make_int_key(i); + assert!(it.seek_upper_bound(Bound::Excluded(&k)).is_none()); + } + + let ent = it + .seek_upper_bound(Bound::Excluded(&make_int_key(4))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range() { + run(|| range(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_unify() { + run(|| range(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + for i in 50..N { + l.insert(&make_int_key(i), &make_value(i + 1000), trailer()) + .unwrap(); + } + + for i in 0..50 { + l.insert(&make_int_key(i), &make_value(i + 1000), trailer()) + .unwrap(); + } + + let mut it = l.iter(); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_iter_latest() { + run(|| iter_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_latest_unify() { + run(|| iter_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_iter_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + for i in 50..N { + l.insert(&make_int_key(i), &make_value(i + 1000), trailer()) + .unwrap(); + } + + for i in 0..50 { + l.insert(&make_int_key(i), &make_value(i + 1000), trailer()) + .unwrap(); + } + + let mut it = l.range::<[u8], _>(..); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_range_latest() { + run(|| range_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_latest_unify() { + run(|| range_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map_mut(&p, Options::new(), open_options, map_options).unwrap(); + for i in 0..1000 { + l.get_or_insert(&key(i), &new_value(i), trailer()).unwrap(); + } + l.flush().unwrap(); + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map(&p, Options::new(), open_options, map_options).unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + + assert_eq!(ent.key(), k); + } + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap2() { + run(|| unsafe { + use rand::seq::SliceRandom; + + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap2"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_mut_with_comparator(&p, Options::new(), open_options, map_options, Ascend) + .unwrap(); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in &data { + let i = *i; + l.get_or_insert(&key(i), &new_value(i), trailer()).unwrap(); + } + l.flush_async().unwrap(); + + for i in data { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.key(), k); + } + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_with_comparator(&p, Options::new(), open_options, map_options, Ascend).unwrap(); + assert_eq!(1000, l.len()); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in data { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.key(), k); + } + }) +} + +struct Person { + id: u32, + name: std::string::String, +} + +impl Person { + fn encoded_size(&self) -> usize { + 4 + self.name.len() + } +} + +fn get_or_insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_value_builder::<()>(b"alice", vb, trailer()) + .unwrap(); +} + +#[test] +fn test_get_or_insert_with_value() { + run(|| { + get_or_insert_with_value(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_get_or_insert_with_value_unify() { + run(|| { + get_or_insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with_value( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn get_or_insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_builders::<(), ()>(kb, vb, trailer()) + .unwrap(); +} + +#[test] +fn test_get_or_insert_with() { + run(|| get_or_insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_get_or_insert_with_unify() { + run(|| get_or_insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_or_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_in(l: SkipMap) { + let k = 0u64.to_le_bytes(); + for i in 0..100 { + let v = new_value(i); + let old = l.insert(&k, &v, trailer()).unwrap(); + if let Some(old) = old { + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i - 1)); + } + } + + let ent = l.get(&k).unwrap(); + assert_eq!(ent.key(), k); + assert_eq!(ent.value(), new_value(99)); +} + +#[test] +fn test_insert_in() { + run(|| { + insert_in(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_insert_in_unify() { + run(|| { + insert_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_in_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_in_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_value_builder::<()>(b"alice", vb, trailer()) + .unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + let old = l + .insert_with_value_builder::<()>(b"alice", vb, trailer()) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with_value() { + run(|| insert_with_value(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_insert_with_value_unify() { + run(|| insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with_value(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size as u32, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_builders::<(), ()>(kb, vb, trailer()).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + let old = l + .insert_with_builders::<(), ()>(kb, vb, trailer()) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with() { + run(|| insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_insert_with_unify() { + run(|| insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn get_or_remove(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(&key(i), &v, trailer()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + let old = l.get_or_remove(&k, trailer()).unwrap().unwrap(); + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i)); + + let old = l.get_or_remove(&k, trailer()).unwrap().unwrap(); + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i)); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(ent.key(), k); + assert_eq!(ent.value(), new_value(i)); + } +} + +#[test] +fn test_get_or_remove() { + run(|| get_or_remove(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_get_or_remove_unify() { + run(|| get_or_remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_remove_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_or_remove_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_remove_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_remove(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_remove_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(&key(i), &v, trailer()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // no race, remove should succeed + let old = l + .compare_remove(&k, trailer(), Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + + // key already removed + let old = l + .compare_remove(&k, trailer(), Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(&k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove() { + run(|| remove(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove_unify() { + run(|| remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove2(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(&key(i), &v, trailer()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // not found, remove should succeed + let old = l + .compare_remove(&k, trailer(), Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + + // no-race, remove should succeed + let old = l + .compare_remove(&k, trailer(), Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(&k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove2() { + run(|| remove2(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove2_unify() { + run(|| remove2(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove2_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove2_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove2(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon() { + run(|| unsafe { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} diff --git a/src/map/tests.rs b/src/sync/tests/versioned.rs similarity index 69% rename from src/map/tests.rs rename to src/sync/tests/versioned.rs index 8a301ac..00130c2 100644 --- a/src/map/tests.rs +++ b/src/sync/tests/versioned.rs @@ -1,94 +1,11 @@ use super::*; -use crate::Descend; -use std::format; +type SkipList = crate::sync::versioned::SkipMap; -use std::sync::Arc; - -use rarena_allocator::Freelist; -#[cfg(feature = "std")] -use wg::WaitGroup; - -const ARENA_SIZE: usize = 1 << 20; -#[cfg(feature = "std")] -const BIG_ARENA_SIZE: usize = 120 << 20; -const TEST_OPTIONS: Options = Options::new().with_capacity(ARENA_SIZE as u32); -const UNIFY_TEST_OPTIONS: Options = Options::new() - .with_capacity(ARENA_SIZE as u32) - .with_unify(true); -#[cfg(feature = "std")] -const BIG_TEST_OPTIONS: Options = Options::new().with_capacity(BIG_ARENA_SIZE as u32); -#[cfg(feature = "std")] -const UNIFY_BIG_TEST_OPTIONS: Options = Options::new() - .with_capacity(BIG_ARENA_SIZE as u32) - .with_unify(true); - -fn run(f: impl Fn() + Send + Sync + 'static) { - f(); -} - -/// Only used for testing - -pub fn key(i: usize) -> std::vec::Vec { - format!("{:05}", i).into_bytes() -} - -/// Only used for testing -#[cfg(all(feature = "std", not(miri)))] -pub fn big_value(i: usize) -> std::vec::Vec { - format!("{:01048576}", i).into_bytes() -} -#[cfg(all(feature = "std", miri))] -pub fn big_value(i: usize) -> std::vec::Vec { - format!("{:01024}", i).into_bytes() -} - -/// Only used for testing -pub fn new_value(i: usize) -> std::vec::Vec { - format!("{:05}", i).into_bytes() -} - -fn make_int_key(i: usize) -> std::vec::Vec { - format!("{:05}", i).into_bytes() -} - -fn make_value(i: usize) -> std::vec::Vec { - format!("v{:05}", i).into_bytes() -} - -#[test] -fn test_node_ptr_clone() { - let node_ptr = NodePtr::::NULL; - #[allow(clippy::clone_on_copy)] - let _ = node_ptr.clone(); -} - -#[test] -fn test_encode_decode_key_size() { - // Test cases - let test_cases = [ - (0, 0), // Minimum values - (1, 1), // Small values - (0x1FFFFFF, 0), // Maximum key_size, minimum height - (0, 0b11111), // Minimum key_size, maximum height - (0x1FFFFFF, 0b11111), // Maximum values - (0x1FFFFFF - 1, 0b11111 - 1), // One less than maximum values - (12345678, 31), // Random values - (0, 1), // Edge case: Minimum key_size, small height - (1, 0), // Edge case: Small key_size, minimum height - ]; - - for &(key_size, height) in &test_cases { - let encoded = encode_key_size_and_height(key_size, height); - let (decoded_key_size, decoded_height) = decode_key_size_and_height(encoded); - - assert_eq!(key_size, decoded_key_size); - assert_eq!(height, decoded_height); - } -} +type SkipMap = crate::sync::versioned::SkipMap; fn empty_in(l: SkipMap) { - let mut it = l.iter_all_versions(0); + let mut it = l.iter_all_versions(MIN_VERSION); assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); @@ -96,14 +13,10 @@ fn empty_in(l: SkipMap) { assert!(it.seek_upper_bound(Bound::Excluded(b"aaa")).is_none()); assert!(it.seek_lower_bound(Bound::Excluded(b"aaa")).is_none()); assert!(it.seek_upper_bound(Bound::Included(b"aaa")).is_none()); - assert!(l.first(0).is_none()); - assert!(l.last(0).is_none()); - assert!(l.ge(0, b"aaa").is_none()); - assert!(l.lt(0, b"aaa").is_none()); - assert!(l.gt(0, b"aaa").is_none()); - assert!(l.le(0, b"aaa").is_none()); - assert!(l.get(0, b"aaa").is_none()); - assert!(!l.contains_key(0, b"aaa")); + assert!(l.first(MIN_VERSION).is_none()); + assert!(l.last(MIN_VERSION).is_none()); + assert!(l.get(MIN_VERSION, b"aaa").is_none()); + assert!(!l.contains_key(MIN_VERSION, b"aaa")); assert!(l.allocated() > 0); assert!(l.capacity() > 0); assert_eq!(l.remaining(), l.capacity() - l.allocated()); @@ -111,19 +24,19 @@ fn empty_in(l: SkipMap) { #[test] fn test_empty() { - run(|| empty_in(SkipMap::new().unwrap())); + run(|| empty_in(SkipList::new(Options::new()).unwrap())); } #[test] fn test_empty_unify() { - run(|| empty_in(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())); + run(|| empty_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_empty_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_empty_map_mut"); let open_options = OpenOptions::default() @@ -132,7 +45,8 @@ fn test_empty_map_mut() { .write(true); let map_options = MmapOptions::default(); - empty_in(SkipMap::map_mut(p, open_options, map_options).unwrap()); + let x = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + empty_in(x); }) } @@ -141,7 +55,7 @@ fn test_empty_map_mut() { fn test_empty_map_anon() { run(|| { let map_options = MmapOptions::default().len(1000); - empty_in(SkipMap::map_anon(map_options).unwrap()); + empty_in(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -150,7 +64,7 @@ fn test_empty_map_anon() { fn test_empty_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(1000); - empty_in(SkipMap::map_anon_with_options(TEST_OPTIONS, map_options).unwrap()); + empty_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); }) } @@ -176,7 +90,7 @@ fn full_in(l: impl FnOnce(usize) -> SkipMap) { fn test_full() { run(|| { full_in(|n| { - SkipMap::with_options( + SkipList::new( Options::new() .with_capacity(n as u32) .with_freelist(Freelist::None), @@ -190,7 +104,7 @@ fn test_full() { fn test_full_unify() { run(|| { full_in(|n| { - SkipMap::with_options( + SkipList::new( UNIFY_TEST_OPTIONS .with_capacity(n as u32) .with_freelist(Freelist::None), @@ -204,7 +118,7 @@ fn test_full_unify() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_full_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_full_map_mut"); @@ -214,7 +128,7 @@ fn test_full_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - SkipMap::map_mut_with_options( + SkipList::map_mut( p, Options::new().with_freelist(Freelist::None), open_options, @@ -231,8 +145,7 @@ fn test_full_map_anon() { run(|| { full_in(|n| { let map_options = MmapOptions::default().len(n as u32); - SkipMap::map_anon_with_options(Options::new().with_freelist(Freelist::None), map_options) - .unwrap() + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() }); }) } @@ -243,8 +156,7 @@ fn test_full_map_anon_unify() { run(|| { full_in(|n| { let map_options = MmapOptions::default().len(n as u32); - SkipMap::map_anon_with_options(Options::new().with_freelist(Freelist::None), map_options) - .unwrap() + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() }); }) } @@ -261,17 +173,17 @@ fn basic_in(mut l: SkipMap) { let ent = it.seek_lower_bound(Bound::Included(b"key1")).unwrap(); assert_eq!(ent.key(), b"key1"); assert_eq!(ent.value().unwrap(), &make_value(1)); - assert_eq!(ent.trailer().version(), 0); + assert_eq!(ent.version(), 0); let ent = it.seek_lower_bound(Bound::Included(b"key2")).unwrap(); assert_eq!(ent.key(), b"key2"); assert_eq!(ent.value().unwrap(), &make_value(2)); - assert_eq!(ent.trailer().version(), 0); + assert_eq!(ent.version(), 0); let ent = it.seek_lower_bound(Bound::Included(b"key3")).unwrap(); assert_eq!(ent.key(), b"key3"); assert_eq!(ent.value().unwrap(), &make_value(3)); - assert_eq!(ent.trailer().version(), 0); + assert_eq!(ent.version(), 0); } l.get_or_insert(1, "a".as_bytes(), &[]).unwrap(); @@ -282,12 +194,12 @@ fn basic_in(mut l: SkipMap) { let ent = it.seek_lower_bound(Bound::Included(b"a")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value().unwrap(), &[]); - assert_eq!(ent.trailer().version(), 2); + assert_eq!(ent.version(), 2); let ent = it.next().unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value().unwrap(), &[]); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); } l.get_or_insert(2, "b".as_bytes(), &[]).unwrap(); @@ -298,17 +210,17 @@ fn basic_in(mut l: SkipMap) { let ent = it.seek_lower_bound(Bound::Included(b"b")).unwrap(); assert_eq!(ent.key(), b"b"); assert_eq!(ent.value().unwrap(), &[]); - assert_eq!(ent.trailer().version(), 2); + assert_eq!(ent.version(), 2); let ent = it.next().unwrap(); assert_eq!(ent.key(), b"b"); assert_eq!(ent.value().unwrap(), &[]); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = it.entry().unwrap(); assert_eq!(ent.key(), b"b"); assert_eq!(ent.value().unwrap(), &[]); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); } l.get_or_insert(2, b"b", &[]).unwrap().unwrap(); @@ -336,19 +248,19 @@ fn basic_in(mut l: SkipMap) { #[test] fn test_basic() { - run(|| basic_in(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| basic_in(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] fn test_basic_unify() { - run(|| basic_in(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())) + run(|| basic_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_basic_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_basic_map_mut"); let open_options = OpenOptions::default() @@ -356,7 +268,7 @@ fn test_basic_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - basic_in(SkipMap::map_mut(p, open_options, map_options).unwrap()); + basic_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -365,7 +277,7 @@ fn test_basic_map_mut() { fn test_basic_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - basic_in(SkipMap::map_anon(map_options).unwrap()); + basic_in(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -374,7 +286,7 @@ fn test_basic_map_anon() { fn test_basic_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - basic_in(SkipMap::map_anon_with_options(TEST_OPTIONS, map_options).unwrap()); + basic_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); }) } @@ -420,62 +332,62 @@ fn iter_all_versions_mvcc(l: SkipMap) { let ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value().unwrap(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value().unwrap(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let mut it = l.iter_all_versions(2); let ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value().unwrap(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value().unwrap(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let mut it = l.iter_all_versions(3); let ent = it.seek_upper_bound(Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value().unwrap(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = it.seek_upper_bound(Bound::Included(b"c")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value().unwrap(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = it.seek_lower_bound(Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value().unwrap(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = it.seek_lower_bound(Bound::Included(b"c")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value().unwrap(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); } #[test] fn test_iter_all_versions_mvcc() { - run(|| iter_all_versions_mvcc(SkipMap::with_options(TEST_OPTIONS).unwrap())); + run(|| iter_all_versions_mvcc(SkipList::new(TEST_OPTIONS).unwrap())); } #[test] fn test_iter_all_versions_mvcc_unify() { - run(|| iter_all_versions_mvcc(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())); + run(|| iter_all_versions_mvcc(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versions_mvcc_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir .path() @@ -485,7 +397,9 @@ fn test_iter_all_versions_mvcc_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - iter_all_versions_mvcc(SkipMap::map_mut(p, open_options, map_options).unwrap()); + iter_all_versions_mvcc( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); }); } @@ -494,7 +408,7 @@ fn test_iter_all_versions_mvcc_map_mut() { fn test_iter_all_versions_mvcc_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_mvcc(SkipMap::map_anon(map_options).unwrap()); + iter_all_versions_mvcc(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -503,14 +417,12 @@ fn test_iter_all_versions_mvcc_map_anon() { fn test_iter_all_versions_mvcc_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_mvcc( - SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap(), - ); + iter_all_versions_mvcc(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } fn ordering() { - let l = SkipMap::with_options_and_comparator(TEST_OPTIONS, Descend).unwrap(); + let l = SkipList::with_comparator(TEST_OPTIONS, Descend).unwrap(); l.get_or_insert(1, b"a1", b"a1").unwrap(); l.get_or_insert(2, b"a2", b"a2").unwrap(); @@ -538,22 +450,22 @@ fn get_mvcc(l: SkipMap) { let ent = l.get(1, b"a").unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.get(2, b"a").unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.get(3, b"a").unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.get(4, b"a").unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); assert!(l.get(0, b"b").is_none()); assert!(l.get(1, b"b").is_none()); @@ -564,41 +476,41 @@ fn get_mvcc(l: SkipMap) { let ent = l.get(1, b"c").unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.get(2, b"c").unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.get(3, b"c").unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.get(4, b"c").unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); assert!(l.get(5, b"d").is_none()); } #[test] fn test_get_mvcc() { - run(|| get_mvcc(SkipMap::with_options(TEST_OPTIONS).unwrap())); + run(|| get_mvcc(SkipList::new(TEST_OPTIONS).unwrap())); } #[test] fn test_get_mvcc_unify() { - run(|| get_mvcc(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())); + run(|| get_mvcc(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_get_mvcc_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_get_mvcc_map_mut"); let open_options = OpenOptions::default() @@ -606,7 +518,7 @@ fn test_get_mvcc_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - get_mvcc(SkipMap::map_mut(p, open_options, map_options).unwrap()); + get_mvcc(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -615,7 +527,7 @@ fn test_get_mvcc_map_mut() { fn test_get_mvcc_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - get_mvcc(SkipMap::map_anon(map_options).unwrap()); + get_mvcc(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -624,7 +536,7 @@ fn test_get_mvcc_map_anon() { fn test_get_mvcc_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - get_mvcc(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + get_mvcc(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -642,62 +554,62 @@ fn gt_in(l: SkipMap) { let ent = l.lower_bound(1, Bound::Excluded(b"")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(2, Bound::Excluded(b"")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(3, Bound::Excluded(b"")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.lower_bound(1, Bound::Excluded(b"a")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(2, Bound::Excluded(b"a")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(3, Bound::Excluded(b"a")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.lower_bound(1, Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(2, Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(3, Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.lower_bound(4, Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.lower_bound(5, Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c3"); - assert_eq!(ent.trailer().version(), 5); + assert_eq!(ent.version(), 5); let ent = l.lower_bound(6, Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c3"); - assert_eq!(ent.trailer().version(), 5); + assert_eq!(ent.version(), 5); assert!(l.lower_bound(1, Bound::Excluded(b"c")).is_none()); assert!(l.lower_bound(2, Bound::Excluded(b"c")).is_none()); @@ -709,19 +621,19 @@ fn gt_in(l: SkipMap) { #[test] fn test_gt() { - run(|| gt_in(SkipMap::with_options(TEST_OPTIONS).unwrap())); + run(|| gt_in(SkipList::new(TEST_OPTIONS).unwrap())); } #[test] fn test_gt_unify() { - run(|| gt_in(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())); + run(|| gt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_gt_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_gt_map_mut"); let open_options = OpenOptions::default() @@ -729,7 +641,7 @@ fn test_gt_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - gt_in(SkipMap::map_mut(p, open_options, map_options).unwrap()); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -738,7 +650,7 @@ fn test_gt_map_mut() { fn test_gt_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - gt_in(SkipMap::map_anon(map_options).unwrap()); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -747,7 +659,7 @@ fn test_gt_map_anon() { fn test_gt_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - gt_in(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -757,71 +669,71 @@ fn ge_in(l: SkipMap) { l.get_or_insert(1, b"c", b"c1").unwrap(); l.get_or_insert(3, b"c", b"c2").unwrap(); - assert!(l.lower_bound(0, Bound::Included(b"a")).is_none()); - assert!(l.lower_bound(0, Bound::Included(b"b")).is_none()); - assert!(l.lower_bound(0, Bound::Included(b"c")).is_none()); + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"a")).is_none()); + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"b")).is_none()); + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"c")).is_none()); let ent = l.lower_bound(1, Bound::Included(b"a")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(2, Bound::Included(b"a")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(3, Bound::Included(b"a")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.lower_bound(4, Bound::Included(b"a")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.lower_bound(1, Bound::Included(b"b")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(2, Bound::Included(b"b")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(3, Bound::Included(b"b")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.lower_bound(4, Bound::Included(b"b")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.lower_bound(1, Bound::Included(b"c")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(2, Bound::Included(b"c")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.lower_bound(3, Bound::Included(b"c")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.lower_bound(4, Bound::Included(b"c")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); - assert!(l.lower_bound(0, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"d")).is_none()); assert!(l.lower_bound(1, Bound::Included(b"d")).is_none()); assert!(l.lower_bound(2, Bound::Included(b"d")).is_none()); assert!(l.lower_bound(3, Bound::Included(b"d")).is_none()); @@ -830,19 +742,19 @@ fn ge_in(l: SkipMap) { #[test] fn test_ge() { - run(|| ge_in(SkipMap::with_options(TEST_OPTIONS).unwrap())); + run(|| ge_in(SkipList::new(TEST_OPTIONS).unwrap())); } #[test] fn test_ge_unify() { - run(|| ge_in(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())); + run(|| ge_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_ge_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_ge_map_mut"); let open_options = OpenOptions::default() @@ -850,7 +762,7 @@ fn test_ge_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - ge_in(SkipMap::map_mut(p, open_options, map_options).unwrap()); + ge_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -859,7 +771,7 @@ fn test_ge_map_mut() { fn test_ge_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - ge_in(SkipMap::map_anon(map_options).unwrap()); + ge_in(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -868,7 +780,7 @@ fn test_ge_map_anon() { fn test_ge_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - ge_in(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + ge_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -878,106 +790,106 @@ fn le_in(l: SkipMap) { l.get_or_insert(1, b"c", b"c1").unwrap(); l.get_or_insert(3, b"c", b"c2").unwrap(); - assert!(l.upper_bound(0, Bound::Included(b"a")).is_none()); - assert!(l.upper_bound(0, Bound::Included(b"b")).is_none()); - assert!(l.upper_bound(0, Bound::Included(b"c")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"a")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"b")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"c")).is_none()); let ent = l.upper_bound(1, Bound::Included(b"a")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(2, Bound::Included(b"a")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(3, Bound::Included(b"a")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(4, Bound::Included(b"a")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(1, Bound::Included(b"b")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(2, Bound::Included(b"b")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(3, Bound::Included(b"b")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(4, Bound::Included(b"b")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(1, Bound::Included(b"c")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(2, Bound::Included(b"c")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(3, Bound::Included(b"c")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(4, Bound::Included(b"c")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(1, Bound::Included(b"d")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(2, Bound::Included(b"d")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(3, Bound::Included(b"d")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(4, Bound::Included(b"d")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); } #[test] fn test_le() { - run(|| le_in(SkipMap::with_options(TEST_OPTIONS).unwrap())); + run(|| le_in(SkipList::new(TEST_OPTIONS).unwrap())); } #[test] fn test_le_unify() { - run(|| le_in(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())); + run(|| le_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_le_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_le_map_mut"); let open_options = OpenOptions::default() @@ -985,7 +897,7 @@ fn test_le_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - gt_in(SkipMap::map_mut(p, open_options, map_options).unwrap()); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -994,7 +906,7 @@ fn test_le_map_mut() { fn test_le_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - gt_in(SkipMap::map_anon(map_options).unwrap()); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -1003,7 +915,7 @@ fn test_le_map_anon() { fn test_le_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - gt_in(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -1013,81 +925,81 @@ fn lt_in(l: SkipMap) { l.get_or_insert(1, b"c", b"c1").unwrap(); l.get_or_insert(3, b"c", b"c2").unwrap(); - assert!(l.upper_bound(0, Bound::Excluded(b"a")).is_none()); - assert!(l.upper_bound(0, Bound::Excluded(b"b")).is_none()); - assert!(l.upper_bound(0, Bound::Excluded(b"c")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"b")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"c")).is_none()); assert!(l.upper_bound(1, Bound::Excluded(b"a")).is_none()); assert!(l.upper_bound(2, Bound::Excluded(b"a")).is_none()); let ent = l.upper_bound(1, Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(2, Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(3, Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(4, Bound::Excluded(b"b")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(1, Bound::Excluded(b"c")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(2, Bound::Excluded(b"c")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(3, Bound::Excluded(b"c")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(4, Bound::Excluded(b"c")).unwrap(); assert_eq!(ent.key(), b"a"); assert_eq!(ent.value(), b"a2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(1, Bound::Excluded(b"d")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(2, Bound::Excluded(b"d")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c1"); - assert_eq!(ent.trailer().version(), 1); + assert_eq!(ent.version(), 1); let ent = l.upper_bound(3, Bound::Excluded(b"d")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); let ent = l.upper_bound(4, Bound::Excluded(b"d")).unwrap(); assert_eq!(ent.key(), b"c"); assert_eq!(ent.value(), b"c2"); - assert_eq!(ent.trailer().version(), 3); + assert_eq!(ent.version(), 3); } #[test] fn test_lt() { - run(|| lt_in(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| lt_in(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] fn test_lt_unify() { - run(|| lt_in(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())) + run(|| lt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) } #[test] @@ -1101,7 +1013,7 @@ fn test_lt_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - lt_in(SkipMap::map_mut(p, open_options, map_options).unwrap()); + lt_in(unsafe { SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap() }); } #[test] @@ -1110,7 +1022,7 @@ fn test_lt_map_mut() { fn test_lt_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - lt_in(SkipMap::map_anon(map_options).unwrap()); + lt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -1119,25 +1031,25 @@ fn test_lt_map_anon() { fn test_lt_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - lt_in(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + lt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } -fn test_basic_large_testcases_in(l: Arc) { - #[cfg(not(miri))] +fn test_basic_large_testcases_in(l: SkipMap) { let n = 1000; #[cfg(miri)] let n = 200; //takes about 30s on miri, that's large enough for i in 0..n { - l.get_or_insert(0, &key(i), &new_value(i)).unwrap(); + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i)) + .unwrap(); } for i in 0..n { let k = key(i); - let ent = l.get(0, &k).unwrap(); + let ent = l.get(MIN_VERSION, &k).unwrap(); assert_eq!(new_value(i), ent.value()); - assert_eq!(ent.trailer().version(), 0); + assert_eq!(ent.version(), 0); assert_eq!(ent.key(), k); } @@ -1147,7 +1059,7 @@ fn test_basic_large_testcases_in(l: Arc) { #[test] fn test_basic_large_testcases() { run(|| { - let l = Arc::new(SkipMap::with_options(TEST_OPTIONS).unwrap()); + let l = SkipList::new(TEST_OPTIONS).unwrap(); test_basic_large_testcases_in(l); }) } @@ -1155,7 +1067,7 @@ fn test_basic_large_testcases() { #[test] fn test_basic_large_testcases_unify() { run(|| { - let l = Arc::new(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap()); + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap(); test_basic_large_testcases_in(l); }) } @@ -1164,7 +1076,7 @@ fn test_basic_large_testcases_unify() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_basic_large_testcases_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir .path() @@ -1174,7 +1086,7 @@ fn test_basic_large_testcases_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - let l = Arc::new(SkipMap::map_mut(p, open_options, map_options).unwrap()); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); test_basic_large_testcases_in(l); }) } @@ -1184,7 +1096,7 @@ fn test_basic_large_testcases_map_mut() { fn test_basic_large_testcases_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - let l = Arc::new(SkipMap::map_anon(map_options).unwrap()); + let l = SkipList::map_anon(Options::new(), map_options).unwrap(); test_basic_large_testcases_in(l); }) } @@ -1194,35 +1106,35 @@ fn test_basic_large_testcases_map_anon() { fn test_basic_large_testcases_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - let l = Arc::new(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + let l = SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap(); test_basic_large_testcases_in(l); }) } #[cfg(feature = "std")] -fn test_concurrent_basic_runner(l: Arc) { +fn test_concurrent_basic_runner(l: SkipMap) { #[cfg(not(any(miri, feature = "loom")))] const N: usize = 1000; #[cfg(any(miri, feature = "loom"))] const N: usize = 5; - let mut wg = Arc::new(()); for i in 0..N { - let w = wg.clone(); let l = l.clone(); std::thread::spawn(move || { - l.get_or_insert(0, &key(i), &new_value(i)).unwrap(); - drop(w); + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i)) + .unwrap(); }); } - while Arc::get_mut(&mut wg).is_none() {} + while l.refs() > 1 {} for i in 0..N { - let w = wg.clone(); let l = l.clone(); std::thread::spawn(move || { let k = key(i); - assert_eq!(l.get(0, &k).unwrap().value(), new_value(i), "broken: {i}"); - drop(w); + assert_eq!( + l.get(MIN_VERSION, &k).unwrap().value(), + new_value(i), + "broken: {i}" + ); }); } } @@ -1231,11 +1143,7 @@ fn test_concurrent_basic_runner(l: Arc) { #[cfg(feature = "std")] fn test_concurrent_basic() { run(|| { - let l = Arc::new( - SkipMap::with_options(TEST_OPTIONS) - .unwrap() - .with_yield_now(), - ); + let l = SkipList::new(TEST_OPTIONS).unwrap().with_yield_now(); test_concurrent_basic_runner(l); }) } @@ -1244,11 +1152,7 @@ fn test_concurrent_basic() { #[cfg(feature = "std")] fn test_concurrent_basic_unify() { run(|| { - let l = Arc::new( - SkipMap::with_options(UNIFY_TEST_OPTIONS) - .unwrap() - .with_yield_now(), - ); + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap().with_yield_now(); test_concurrent_basic_runner(l); }) } @@ -1257,7 +1161,7 @@ fn test_concurrent_basic_unify() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_concurrent_basic_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_concurrent_basic_map_mut"); let open_options = OpenOptions::default() @@ -1265,11 +1169,9 @@ fn test_concurrent_basic_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - let l = Arc::new( - SkipMap::map_mut(p, open_options, map_options) - .unwrap() - .with_yield_now(), - ); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options) + .unwrap() + .with_yield_now(); test_concurrent_basic_runner(l); }) } @@ -1279,9 +1181,11 @@ fn test_concurrent_basic_map_mut() { fn test_concurrent_basic_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - test_concurrent_basic_runner(Arc::new( - SkipMap::map_anon(map_options).unwrap().with_yield_now(), - )); + test_concurrent_basic_runner( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); }) } @@ -1290,16 +1194,16 @@ fn test_concurrent_basic_map_anon() { fn test_concurrent_basic_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - test_concurrent_basic_runner(Arc::new( - SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options) + test_concurrent_basic_runner( + SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options) .unwrap() .with_yield_now(), - )); + ); }) } -#[cfg(feature = "std")] -fn test_concurrent_basic_big_values_runner(mut l: Arc) { +#[cfg(all(feature = "std", not(miri)))] +fn test_concurrent_basic_big_values_runner(l: SkipMap) { #[cfg(not(any(miri, feature = "loom")))] const N: usize = 100; #[cfg(any(miri, feature = "loom"))] @@ -1308,50 +1212,52 @@ fn test_concurrent_basic_big_values_runner(mut l: Arc) { for i in 0..N { let l = l.clone(); std::thread::spawn(move || { - l.get_or_insert(0, &key(i), &big_value(i)).unwrap(); + l.get_or_insert(MIN_VERSION, &key(i), &big_value(i)) + .unwrap(); }); } - while Arc::get_mut(&mut l).is_none() {} + while l.refs() > 1 {} // assert_eq!(N, l.len()); for i in 0..N { let l = l.clone(); std::thread::spawn(move || { let k = key(i); - assert_eq!(l.get(0, &k).unwrap().value(), big_value(i), "broken: {i}"); + assert_eq!( + l.get(MIN_VERSION, &k).unwrap().value(), + big_value(i), + "broken: {i}" + ); }); } - while Arc::get_mut(&mut l).is_none() {} + while l.refs() > 1 {} } #[test] -#[cfg(feature = "std")] +#[cfg(all(feature = "std", not(miri)))] fn test_concurrent_basic_big_values() { run(|| { - test_concurrent_basic_big_values_runner(Arc::new( - SkipMap::with_options(BIG_TEST_OPTIONS) - .unwrap() - .with_yield_now(), - )); + test_concurrent_basic_big_values_runner( + SkipList::new(BIG_TEST_OPTIONS).unwrap().with_yield_now(), + ); }) } #[test] -#[cfg(feature = "std")] +#[cfg(all(feature = "std", not(miri)))] fn test_concurrent_basic_big_values_unify() { run(|| { - test_concurrent_basic_big_values_runner(Arc::new( - SkipMap::with_options(UNIFY_BIG_TEST_OPTIONS) + test_concurrent_basic_big_values_runner( + SkipList::new(UNIFY_BIG_TEST_OPTIONS) .unwrap() .with_yield_now(), - )); + ); }) } #[test] -#[cfg(feature = "memmap")] -#[cfg_attr(miri, ignore)] +#[cfg(all(feature = "memmap", not(miri)))] fn test_concurrent_basic_big_values_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir .path() @@ -1361,42 +1267,44 @@ fn test_concurrent_basic_big_values_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - test_concurrent_basic_big_values_runner(Arc::new( - SkipMap::map_mut(p, open_options, map_options) + test_concurrent_basic_big_values_runner( + SkipList::map_mut(p, Options::new(), open_options, map_options) .unwrap() .with_yield_now(), - )); + ); }) } #[test] -#[cfg(feature = "memmap")] +#[cfg(all(feature = "memmap", not(miri)))] fn test_concurrent_basic_big_values_map_anon() { run(|| { let map_options = MmapOptions::default().len(120 << 20); - test_concurrent_basic_big_values_runner(Arc::new( - SkipMap::map_anon(map_options).unwrap().with_yield_now(), - )); + test_concurrent_basic_big_values_runner( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); }) } #[test] -#[cfg(feature = "memmap")] +#[cfg(all(feature = "memmap", not(miri)))] fn test_concurrent_basic_big_values_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(120 << 20); - test_concurrent_basic_big_values_runner(Arc::new( - SkipMap::map_anon_with_options(UNIFY_BIG_TEST_OPTIONS, map_options) + test_concurrent_basic_big_values_runner( + SkipList::map_anon(UNIFY_BIG_TEST_OPTIONS, map_options) .unwrap() .with_yield_now(), - )); + ); }) } #[cfg(feature = "std")] -fn concurrent_one_key(l: Arc) { +fn concurrent_one_key(l: SkipMap) { #[cfg(not(any(miri, feature = "loom")))] - const N: usize = 100; + const N: usize = 5; #[cfg(any(miri, feature = "loom"))] const N: usize = 5; @@ -1405,7 +1313,7 @@ fn concurrent_one_key(l: Arc) { let wg = wg.add(1); let l = l.clone(); std::thread::spawn(move || { - let _ = l.get_or_insert(0, b"thekey", &make_value(i)); + let _ = l.get_or_insert(MIN_VERSION, b"thekey", &make_value(i)); wg.done(); }); } @@ -1418,12 +1326,12 @@ fn concurrent_one_key(l: Arc) { let l = l.clone(); let saw_value = saw_value.clone(); std::thread::spawn(move || { - let ent = l.get(0, b"thekey").unwrap(); + let ent = l.get(MIN_VERSION, b"thekey").unwrap(); let val = ent.value(); let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); assert!((0..N).contains(&num)); - let mut it = l.iter_all_versions(0); + let mut it = l.iter_all_versions(MIN_VERSION); let ent = it.seek_lower_bound(Bound::Included(b"thekey")).unwrap(); let val = ent.value().unwrap(); let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); @@ -1444,11 +1352,7 @@ fn concurrent_one_key(l: Arc) { #[cfg(feature = "std")] fn test_concurrent_one_key() { run(|| { - concurrent_one_key(Arc::new( - SkipMap::with_options(TEST_OPTIONS) - .unwrap() - .with_yield_now(), - )); + concurrent_one_key(SkipList::new(TEST_OPTIONS).unwrap().with_yield_now()); }) } @@ -1456,11 +1360,7 @@ fn test_concurrent_one_key() { #[cfg(feature = "std")] fn test_concurrent_one_key_unify() { run(|| { - concurrent_one_key(Arc::new( - SkipMap::with_options(UNIFY_TEST_OPTIONS) - .unwrap() - .with_yield_now(), - )); + concurrent_one_key(SkipList::new(UNIFY_TEST_OPTIONS).unwrap().with_yield_now()); }) } @@ -1468,7 +1368,7 @@ fn test_concurrent_one_key_unify() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_concurrent_one_key_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_concurrent_one_key_map_mut"); let open_options = OpenOptions::default() @@ -1476,11 +1376,11 @@ fn test_concurrent_one_key_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - concurrent_one_key(Arc::new( - SkipMap::map_mut(p, open_options, map_options) + concurrent_one_key( + SkipList::map_mut(p, Options::new(), open_options, map_options) .unwrap() .with_yield_now(), - )); + ); }) } @@ -1489,9 +1389,11 @@ fn test_concurrent_one_key_map_mut() { fn test_concurrent_one_key_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - concurrent_one_key(Arc::new( - SkipMap::map_anon(map_options).unwrap().with_yield_now(), - )); + concurrent_one_key( + SkipList::map_anon(Options::new(), map_options) + .unwrap() + .with_yield_now(), + ); }) } @@ -1500,11 +1402,11 @@ fn test_concurrent_one_key_map_anon() { fn test_concurrent_one_key_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - concurrent_one_key(Arc::new( - SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options) + concurrent_one_key( + SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options) .unwrap() .with_yield_now(), - )); + ); }) } @@ -1512,11 +1414,11 @@ fn iter_all_versions_next(l: SkipMap) { const N: usize = 100; for i in (0..N).rev() { - l.get_or_insert(0, &make_int_key(i), &make_value(i)) + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) .unwrap(); } - let mut it = l.iter_all_versions(0); + let mut it = l.iter_all_versions(MIN_VERSION); let mut ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); for i in 0..N { assert_eq!(ent.key(), make_int_key(i)); @@ -1531,19 +1433,19 @@ fn iter_all_versions_next(l: SkipMap) { #[test] fn test_iter_all_versions_next() { - run(|| iter_all_versions_next(SkipMap::with_options(TEST_OPTIONS).unwrap())); + run(|| iter_all_versions_next(SkipList::new(TEST_OPTIONS).unwrap())); } #[test] fn test_iter_all_versions_next_unify() { - run(|| iter_all_versions_next(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())); + run(|| iter_all_versions_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versions_next_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir .path() @@ -1553,7 +1455,9 @@ fn test_iter_all_versions_next_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - iter_all_versions_next(SkipMap::map_mut(p, open_options, map_options).unwrap()); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); }) } @@ -1562,7 +1466,7 @@ fn test_iter_all_versions_next_map_mut() { fn test_iter_all_versions_next_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_next(SkipMap::map_anon(map_options).unwrap()); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -1571,9 +1475,7 @@ fn test_iter_all_versions_next_map_anon() { fn test_iter_all_versions_next_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_next( - SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap(), - ); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -1581,12 +1483,12 @@ fn range_next(l: SkipMap) { const N: usize = 100; for i in (0..N).rev() { - l.get_or_insert(0, &make_int_key(i), &make_value(i)) + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) .unwrap(); } let upper = make_int_key(50); - let mut it = l.range(0, ..=upper.as_slice()); + let mut it = l.range(MIN_VERSION, ..=upper.as_slice()); let mut ent = it.seek_lower_bound(Bound::Unbounded); for i in 0..N { if i <= 50 { @@ -1607,19 +1509,19 @@ fn range_next(l: SkipMap) { #[test] fn test_range_next() { - run(|| range_next(SkipMap::with_options(TEST_OPTIONS).unwrap())); + run(|| range_next(SkipList::new(TEST_OPTIONS).unwrap())); } #[test] fn test_range_next_unify() { - run(|| range_next(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())); + run(|| range_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_next_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_range_next_map_mut"); let open_options = OpenOptions::default() @@ -1627,7 +1529,9 @@ fn test_range_next_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - iter_all_versions_next(SkipMap::map_mut(p, open_options, map_options).unwrap()); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); }) } @@ -1636,7 +1540,7 @@ fn test_range_next_map_mut() { fn test_range_next_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_next(SkipMap::map_anon(map_options).unwrap()); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -1645,9 +1549,7 @@ fn test_range_next_map_anon() { fn test_range_next_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_next( - SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap(), - ); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -1655,11 +1557,11 @@ fn iter_all_versions_prev(l: SkipMap) { const N: usize = 100; for i in 0..N { - l.get_or_insert(0, &make_int_key(i), &make_value(i)) + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) .unwrap(); } - let mut it = l.iter_all_versions(0); + let mut it = l.iter_all_versions(MIN_VERSION); let mut ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); for i in (0..N).rev() { assert_eq!(ent.key(), make_int_key(i)); @@ -1674,14 +1576,14 @@ fn iter_all_versions_prev(l: SkipMap) { #[test] fn test_iter_all_versions_next_back() { - run(|| iter_all_versions_prev(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| iter_all_versions_prev(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versions_prev_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir .path() @@ -1691,7 +1593,9 @@ fn test_iter_all_versions_prev_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - iter_all_versions_prev(SkipMap::map_mut(p, open_options, map_options).unwrap()); + iter_all_versions_prev( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); }) } @@ -1700,7 +1604,7 @@ fn test_iter_all_versions_prev_map_mut() { fn test_iter_all_versions_prev_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_prev(SkipMap::map_anon(map_options).unwrap()); + iter_all_versions_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -1709,9 +1613,7 @@ fn test_iter_all_versions_prev_map_anon() { fn test_iter_all_versions_prev_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_prev( - SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap(), - ); + iter_all_versions_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -1719,12 +1621,12 @@ fn range_prev(l: SkipMap) { const N: usize = 100; for i in 0..N { - l.get_or_insert(0, &make_int_key(i), &make_value(i)) + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) .unwrap(); } let lower = make_int_key(50); - let mut it = l.range(0, lower.as_slice()..); + let mut it = l.range(MIN_VERSION, lower.as_slice()..); let mut ent = it.seek_upper_bound(Bound::Unbounded); for i in (0..N).rev() { if i >= 50 { @@ -1745,19 +1647,19 @@ fn range_prev(l: SkipMap) { #[test] fn test_range_prev() { - run(|| range_prev(SkipMap::with_options(TEST_OPTIONS).unwrap())); + run(|| range_prev(SkipList::new(TEST_OPTIONS).unwrap())); } #[test] fn test_range_prev_unify() { - run(|| range_prev(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())); + run(|| range_prev(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_prev_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_range_prev_map_mut"); let open_options = OpenOptions::default() @@ -1765,7 +1667,7 @@ fn test_range_prev_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - range_prev(SkipMap::map_mut(p, open_options, map_options).unwrap()); + range_prev(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -1774,7 +1676,7 @@ fn test_range_prev_map_mut() { fn test_range_prev_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - range_prev(SkipMap::map_anon(map_options).unwrap()); + range_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -1783,7 +1685,7 @@ fn test_range_prev_map_anon() { fn test_range_prev_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - range_prev(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + range_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -1792,11 +1694,11 @@ fn iter_all_versions_seek_ge(l: SkipMap) { for i in (0..N).rev() { let v = i * 10 + 1000; - l.get_or_insert(0, &make_int_key(v), &make_value(v)) + l.get_or_insert(MIN_VERSION, &make_int_key(v), &make_value(v)) .unwrap(); } - let mut it = l.iter_all_versions(0); + let mut it = l.iter_all_versions(MIN_VERSION); let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); assert_eq!(ent.key(), make_int_key(1000)); assert_eq!(ent.value().unwrap(), make_value(1000)); @@ -1828,7 +1730,7 @@ fn iter_all_versions_seek_ge(l: SkipMap) { let ent = it.seek_lower_bound(Bound::Included(b"99999")); assert!(ent.is_none()); - l.get_or_insert(0, &[], &[]).unwrap(); + l.get_or_insert(MIN_VERSION, &[], &[]).unwrap(); let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); assert_eq!(ent.key(), &[]); assert_eq!(ent.value().unwrap(), &[]); @@ -1840,19 +1742,19 @@ fn iter_all_versions_seek_ge(l: SkipMap) { #[test] fn test_iter_all_versions_seek_ge() { - run(|| iter_all_versions_seek_ge(SkipMap::with_options(TEST_OPTIONS).unwrap())); + run(|| iter_all_versions_seek_ge(SkipList::new(TEST_OPTIONS).unwrap())); } #[test] fn test_iter_all_versions_seek_ge_unify() { - run(|| iter_all_versions_seek_ge(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())); + run(|| iter_all_versions_seek_ge(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versions_seek_ge_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir .path() @@ -1862,7 +1764,9 @@ fn test_iter_all_versions_seek_ge_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - iter_all_versions_seek_ge(SkipMap::map_mut(p, open_options, map_options).unwrap()); + iter_all_versions_seek_ge( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); }) } @@ -1871,7 +1775,7 @@ fn test_iter_all_versions_seek_ge_map_mut() { fn test_iter_all_versions_seek_ge_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_seek_ge(SkipMap::map_anon(map_options).unwrap()); + iter_all_versions_seek_ge(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -1880,9 +1784,7 @@ fn test_iter_all_versions_seek_ge_map_anon() { fn test_iter_all_versions_seek_ge_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_seek_ge( - SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap(), - ); + iter_all_versions_seek_ge(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -1891,11 +1793,11 @@ fn iter_all_versions_seek_lt(l: SkipMap) { for i in (0..N).rev() { let v = i * 10 + 1000; - l.get_or_insert(0, &make_int_key(v), &make_value(v)) + l.get_or_insert(MIN_VERSION, &make_int_key(v), &make_value(v)) .unwrap(); } - let mut it = l.iter_all_versions(0); + let mut it = l.iter_all_versions(MIN_VERSION); assert!(it.seek_upper_bound(Bound::Excluded(b"")).is_none()); let ent = it.seek_upper_bound(Bound::Excluded(b"01000")); @@ -1913,8 +1815,7 @@ fn iter_all_versions_seek_lt(l: SkipMap) { assert_eq!(ent.key(), make_int_key(1990)); assert_eq!(ent.value().unwrap(), make_value(1990)); - l.get_or_insert(0, &[], &[]).unwrap(); - assert!(l.lt(0, &[]).is_none()); + l.get_or_insert(MIN_VERSION, &[], &[]).unwrap(); let ent = it.seek_upper_bound(Bound::Excluded(b"")); assert!(ent.is_none()); @@ -1926,19 +1827,19 @@ fn iter_all_versions_seek_lt(l: SkipMap) { #[test] fn test_iter_all_versions_seek_lt() { - run(|| iter_all_versions_seek_lt(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| iter_all_versions_seek_lt(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] fn test_iter_all_versions_seek_lt_unify() { - run(|| iter_all_versions_seek_lt(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())) + run(|| iter_all_versions_seek_lt(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versions_seek_lt_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir .path() @@ -1948,7 +1849,9 @@ fn test_iter_all_versions_seek_lt_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - iter_all_versions_seek_lt(SkipMap::map_mut(p, open_options, map_options).unwrap()); + iter_all_versions_seek_lt( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); }) } @@ -1957,7 +1860,7 @@ fn test_iter_all_versions_seek_lt_map_mut() { fn test_iter_all_versions_seek_lt_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_seek_lt(SkipMap::map_anon(map_options).unwrap()); + iter_all_versions_seek_lt(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -1966,21 +1869,19 @@ fn test_iter_all_versions_seek_lt_map_anon() { fn test_iter_all_versions_seek_lt_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_all_versions_seek_lt( - SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap(), - ); + iter_all_versions_seek_lt(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } fn range(l: SkipMap) { for i in 1..10 { - l.get_or_insert(0, &make_int_key(i), &make_value(i)) + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) .unwrap(); } let k3 = make_int_key(3); let k7 = make_int_key(7); - let mut it = l.range(0, k3.as_slice()..k7.as_slice()).clone(); + let mut it = l.range(MIN_VERSION, k3.as_slice()..k7.as_slice()).clone(); assert_eq!(it.bounds(), &(k3.as_slice()..k7.as_slice())); for i in 3..=6 { @@ -2062,19 +1963,19 @@ fn range(l: SkipMap) { #[test] fn test_range() { - run(|| range(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| range(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] fn test_range_unify() { - run(|| range(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())) + run(|| range(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_range_map_mut"); let open_options = OpenOptions::default() @@ -2082,7 +1983,7 @@ fn test_range_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - range(SkipMap::map_mut(p, open_options, map_options).unwrap()); + range(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -2091,7 +1992,7 @@ fn test_range_map_mut() { fn test_range_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - range(SkipMap::map_anon(map_options).unwrap()); + range(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -2100,7 +2001,7 @@ fn test_range_map_anon() { fn test_range_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - range(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + range(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -2108,7 +2009,7 @@ fn iter_latest(l: SkipMap) { const N: usize = 100; for i in 0..N { - l.get_or_insert(0, &make_int_key(i), &make_value(i)) + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) .unwrap(); } @@ -2136,19 +2037,19 @@ fn iter_latest(l: SkipMap) { #[test] fn test_iter_latest() { - run(|| iter_latest(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| iter_latest(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] fn test_iter_latest_unify() { - run(|| iter_latest(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())) + run(|| iter_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_latest_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_iter_latest_map_mut"); let open_options = OpenOptions::default() @@ -2156,7 +2057,7 @@ fn test_iter_latest_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - iter_latest(SkipMap::map_mut(p, open_options, map_options).unwrap()); + iter_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -2165,7 +2066,7 @@ fn test_iter_latest_map_mut() { fn test_iter_latest_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_latest(SkipMap::map_anon(map_options).unwrap()); + iter_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -2174,7 +2075,7 @@ fn test_iter_latest_map_anon() { fn test_iter_latest_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - iter_latest(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + iter_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -2182,7 +2083,7 @@ fn range_latest(l: SkipMap) { const N: usize = 100; for i in 0..N { - l.get_or_insert(0, &make_int_key(i), &make_value(i)) + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) .unwrap(); } @@ -2196,7 +2097,7 @@ fn range_latest(l: SkipMap) { .unwrap(); } - let mut it = l.range(4, ..); + let mut it = l.range::<[u8], _>(4, ..); let mut num = 0; for i in 0..N { let ent = it.next().unwrap(); @@ -2210,19 +2111,19 @@ fn range_latest(l: SkipMap) { #[test] fn test_range_latest() { - run(|| range_latest(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| range_latest(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] fn test_range_latest_unify() { - run(|| range_latest(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())) + run(|| range_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_latest_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_range_latest_map_mut"); let open_options = OpenOptions::default() @@ -2230,7 +2131,7 @@ fn test_range_latest_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - range_latest(SkipMap::map_mut(p, open_options, map_options).unwrap()); + range_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -2239,7 +2140,7 @@ fn test_range_latest_map_mut() { fn test_range_latest_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - range_latest(SkipMap::map_anon(map_options).unwrap()); + range_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -2248,7 +2149,7 @@ fn test_range_latest_map_anon() { fn test_range_latest_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - range_latest(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + range_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -2256,7 +2157,7 @@ fn test_range_latest_map_anon_unify() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_reopen_mmap() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("reopen_skipmap"); { @@ -2265,32 +2166,91 @@ fn test_reopen_mmap() { .read(true) .write(true); let map_options = MmapOptions::default(); - let l = SkipMap::map_mut(&p, open_options, map_options).unwrap(); + let l = SkipMap::map_mut(&p, Options::new(), open_options, map_options).unwrap(); for i in 0..1000 { - l.get_or_insert(0, &key(i), &new_value(i)).unwrap(); + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i)) + .unwrap(); } l.flush().unwrap(); } let open_options = OpenOptions::default().read(true); let map_options = MmapOptions::default(); - let l = SkipMap::::map(&p, open_options, map_options, 0).unwrap(); + let l = SkipMap::map(&p, Options::new(), open_options, map_options).unwrap(); assert_eq!(1000, l.len()); for i in 0..1000 { let k = key(i); - let ent = l.get(0, &k).unwrap(); + let ent = l.get(MIN_VERSION, &k).unwrap(); assert_eq!(new_value(i), ent.value()); - assert_eq!(ent.trailer().version(), 0); + assert_eq!(ent.version(), 0); assert_eq!(ent.key(), k); } }) } +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap_with_reserved() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap_with_reserved"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map_mut( + &p, + Options::new().with_reserved(5), + open_options, + map_options, + ) + .unwrap(); + for i in 0..1000 { + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i)) + .unwrap(); + } + l.flush().unwrap(); + let slice = l.reserved_slice_mut(); + assert_eq!(slice.len(), 5); + for i in 0..5 { + slice[i] = i as u8; + } + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map( + &p, + Options::new().with_reserved(5), + open_options, + map_options, + ) + .unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), 0); + assert_eq!(ent.key(), k); + } + + let slice = l.reserved_slice(); + assert_eq!(slice.len(), 5); + for i in 0..5 { + assert_eq!(slice[i], i as u8); + } + }) +} + #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_reopen_mmap2() { - run(|| { + run(|| unsafe { use rand::seq::SliceRandom; let dir = tempfile::tempdir().unwrap(); @@ -2301,21 +2261,32 @@ fn test_reopen_mmap2() { .read(true) .write(true); let map_options = MmapOptions::default(); - let l = SkipMap::map_mut_with_comparator(&p, open_options, map_options, Ascend).unwrap(); + let l = + SkipMap::map_mut_with_comparator(&p, Options::new(), open_options, map_options, Ascend) + .unwrap(); let mut data = (0..1000).collect::>(); data.shuffle(&mut rand::thread_rng()); - for i in data { + for i in &data { + let i = *i; l.get_or_insert(i as u64, &key(i), &new_value(i)).unwrap(); } l.flush_async().unwrap(); assert_eq!(l.max_version(), 999); assert_eq!(l.min_version(), 0); + + for i in data { + let k = key(i); + let ent = l.get(i as u64, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), i as u64); + assert_eq!(ent.key(), k); + } } let open_options = OpenOptions::default().read(true); let map_options = MmapOptions::default(); - let l = SkipMap::::map_with_comparator(&p, open_options, map_options, Ascend, 0) - .unwrap(); + let l = + SkipMap::map_with_comparator(&p, Options::new(), open_options, map_options, Ascend).unwrap(); assert_eq!(1000, l.len()); let mut data = (0..1000).collect::>(); data.shuffle(&mut rand::thread_rng()); @@ -2323,7 +2294,7 @@ fn test_reopen_mmap2() { let k = key(i); let ent = l.get(i as u64, &k).unwrap(); assert_eq!(new_value(i), ent.value()); - assert_eq!(ent.trailer().version(), i as u64); + assert_eq!(ent.version(), i as u64); assert_eq!(ent.key(), k); } assert_eq!(l.max_version(), 999); @@ -2350,38 +2321,40 @@ fn get_or_insert_with_value(l: SkipMap) { let encoded_size = alice.encoded_size() as u32; - l.get_or_insert_with_value::<()>(1, b"alice", encoded_size, |val| { + let vb = ValueBuilder::new(encoded_size, |val| { assert_eq!(val.capacity(), encoded_size as usize); assert!(val.is_empty()); - val.write(&alice.id.to_le_bytes()).unwrap(); + val.put_u32_le(alice.id).unwrap(); assert_eq!(val.len(), 4); assert_eq!(val.remaining(), encoded_size as usize - 4); assert_eq!(&*val, alice.id.to_le_bytes()); val[..4].copy_from_slice(&alice.id.to_be_bytes()); assert_eq!(&*val, alice.id.to_be_bytes()); - val.write(alice.name.as_bytes()).unwrap(); + val.put_slice(alice.name.as_bytes()).unwrap(); assert_eq!(val.len(), encoded_size as usize); - let err = val.write(&[1]).unwrap_err(); + let err = val.put_slice(&[1]).unwrap_err(); assert_eq!( std::string::ToString::to_string(&err), "buffer does not have enough space (remaining 0, want 1)" ); Ok(()) - }) - .unwrap(); + }); + + l.get_or_insert_with_value_builder::<()>(1, b"alice", vb) + .unwrap(); } #[test] fn test_get_or_insert_with_value() { run(|| { - get_or_insert_with_value(SkipMap::with_options(TEST_OPTIONS).unwrap()); + get_or_insert_with_value(SkipList::new(TEST_OPTIONS).unwrap()); }) } #[test] fn test_get_or_insert_with_value_unify() { run(|| { - get_or_insert_with_value(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap()); + get_or_insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); }) } @@ -2389,7 +2362,7 @@ fn test_get_or_insert_with_value_unify() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_get_or_insert_with_value_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir .path() @@ -2399,7 +2372,9 @@ fn test_get_or_insert_with_value_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - get_or_insert_with_value(SkipMap::map_mut(p, open_options, map_options).unwrap()); + get_or_insert_with_value( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); }) } @@ -2408,7 +2383,7 @@ fn test_get_or_insert_with_value_map_mut() { fn test_get_or_insert_with_value_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - get_or_insert_with_value(SkipMap::map_anon(map_options).unwrap()); + get_or_insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -2417,9 +2392,7 @@ fn test_get_or_insert_with_value_map_anon() { fn test_get_or_insert_with_value_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - get_or_insert_with_value( - SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap(), - ); + get_or_insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -2431,51 +2404,48 @@ fn get_or_insert_with(l: SkipMap) { let encoded_size = alice.encoded_size() as u32; - l.get_or_insert_with::<()>( - 1, - u27::new(5), - |key| { - key.write(b"alice").unwrap(); - Ok(()) - }, - encoded_size, - |val| { - assert_eq!(val.capacity(), encoded_size as usize); - assert!(val.is_empty()); - val.write(&alice.id.to_le_bytes()).unwrap(); - assert_eq!(val.len(), 4); - assert_eq!(val.remaining(), encoded_size as usize - 4); - assert_eq!(&*val, alice.id.to_le_bytes()); - val[..4].copy_from_slice(&alice.id.to_be_bytes()); - assert_eq!(&*val, alice.id.to_be_bytes()); - val.write(alice.name.as_bytes()).unwrap(); - assert_eq!(val.len(), encoded_size as usize); - let err = val.write(&[1]).unwrap_err(); - assert_eq!( - std::string::ToString::to_string(&err), - "buffer does not have enough space (remaining 0, want 1)" - ); - Ok(()) - }, - ) - .unwrap(); + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_builders::<(), ()>(1, kb, vb).unwrap(); } #[test] fn test_get_or_insert_with() { - run(|| get_or_insert_with(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| get_or_insert_with(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] fn test_get_or_insert_with_unify() { - run(|| get_or_insert_with(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())) + run(|| get_or_insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_get_or_insert_with_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_get_or_insert_with_map_mut"); let open_options = OpenOptions::default() @@ -2483,7 +2453,7 @@ fn test_get_or_insert_with_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - get_or_insert_with(SkipMap::map_mut(p, open_options, map_options).unwrap()); + get_or_insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -2492,7 +2462,7 @@ fn test_get_or_insert_with_map_mut() { fn test_get_or_insert_with_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - get_or_insert_with(SkipMap::map_anon(map_options).unwrap()); + get_or_insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -2501,7 +2471,7 @@ fn test_get_or_insert_with_map_anon() { fn test_get_or_insert_with_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - get_or_insert_with(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + get_or_insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -2509,14 +2479,14 @@ fn insert_in(l: SkipMap) { let k = 0u64.to_le_bytes(); for i in 0..100 { let v = new_value(i); - let old = l.insert(0, &k, &v).unwrap(); + let old = l.insert(MIN_VERSION, &k, &v).unwrap(); if let Some(old) = old { assert_eq!(old.key(), k); assert_eq!(old.value(), new_value(i - 1)); } } - let ent = l.get(0, &k).unwrap(); + let ent = l.get(MIN_VERSION, &k).unwrap(); assert_eq!(ent.key(), k); assert_eq!(ent.value(), new_value(99)); } @@ -2524,14 +2494,14 @@ fn insert_in(l: SkipMap) { #[test] fn test_insert_in() { run(|| { - insert_in(SkipMap::with_options(TEST_OPTIONS).unwrap()); + insert_in(SkipList::new(TEST_OPTIONS).unwrap()); }) } #[test] fn test_insert_in_unify() { run(|| { - insert_in(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap()); + insert_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); }) } @@ -2539,7 +2509,7 @@ fn test_insert_in_unify() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_insert_in_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_insert_in_map_mut"); let open_options = OpenOptions::default() @@ -2547,7 +2517,7 @@ fn test_insert_in_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - insert_in(SkipMap::map_mut(p, open_options, map_options).unwrap()); + insert_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -2556,7 +2526,7 @@ fn test_insert_in_map_mut() { fn test_insert_in_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - insert_in(SkipMap::map_anon(map_options).unwrap()); + insert_in(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -2565,7 +2535,7 @@ fn test_insert_in_map_anon() { fn test_insert_in_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - insert_in(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + insert_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -2577,50 +2547,53 @@ fn insert_with_value(l: SkipMap) { let encoded_size = alice.encoded_size() as u32; - l.insert_with_value::<()>(1, b"alice", encoded_size, |val| { + let vb = ValueBuilder::new(encoded_size, |val| { assert_eq!(val.capacity(), encoded_size as usize); assert!(val.is_empty()); - val.write(&alice.id.to_le_bytes()).unwrap(); + val.put_u32_le(alice.id).unwrap(); assert_eq!(val.len(), 4); assert_eq!(val.remaining(), encoded_size as usize - 4); assert_eq!(val, alice.id.to_le_bytes()); val[..4].copy_from_slice(&alice.id.to_be_bytes()); assert_eq!(val, alice.id.to_be_bytes()); - val.write(alice.name.as_bytes()).unwrap(); + val.put_slice(alice.name.as_bytes()).unwrap(); assert_eq!(val.len(), encoded_size as usize); - let err = val.write(&[1]).unwrap_err(); + let err = val.put_slice(&[1]).unwrap_err(); assert_eq!( std::string::ToString::to_string(&err), "buffer does not have enough space (remaining 0, want 1)" ); Ok(()) - }) - .unwrap(); + }); + + l.insert_with_value_builder::<()>(1, b"alice", vb).unwrap(); let alice2 = Person { id: 2, name: std::string::String::from("Alice"), }; + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + let old = l - .insert_with_value::<()>(1, b"alice", encoded_size, |val| { - assert_eq!(val.capacity(), encoded_size as usize); - assert!(val.is_empty()); - val.write(&alice2.id.to_le_bytes()).unwrap(); - assert_eq!(val.len(), 4); - assert_eq!(val.remaining(), encoded_size as usize - 4); - assert_eq!(&*val, alice2.id.to_le_bytes()); - val[..4].copy_from_slice(&alice2.id.to_be_bytes()); - assert_eq!(&*val, alice2.id.to_be_bytes()); - val.write(alice2.name.as_bytes()).unwrap(); - assert_eq!(val.len(), encoded_size as usize); - let err = val.write(&[1]).unwrap_err(); - assert_eq!( - std::string::ToString::to_string(&err), - "buffer does not have enough space (remaining 0, want 1)" - ); - Ok(()) - }) + .insert_with_value_builder::<()>(1, b"alice", vb) .unwrap() .unwrap(); @@ -2634,19 +2607,19 @@ fn insert_with_value(l: SkipMap) { #[test] fn test_insert_with_value() { - run(|| insert_with_value(SkipMap::with_options(TEST_OPTIONS).unwrap())); + run(|| insert_with_value(SkipList::new(TEST_OPTIONS).unwrap())); } #[test] fn test_insert_with_value_unify() { - run(|| insert_with_value(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())); + run(|| insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_insert_with_value_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir .path() @@ -2656,7 +2629,7 @@ fn test_insert_with_value_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - insert_with_value(SkipMap::map_mut(p, open_options, map_options).unwrap()); + insert_with_value(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -2665,7 +2638,7 @@ fn test_insert_with_value_map_mut() { fn test_insert_with_value_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - insert_with_value(SkipMap::map_anon(map_options).unwrap()); + insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -2674,7 +2647,7 @@ fn test_insert_with_value_map_anon() { fn test_insert_with_value_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - insert_with_value(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } @@ -2686,68 +2659,57 @@ fn insert_with(l: SkipMap) { let encoded_size = alice.encoded_size() as u32; - l.insert_with::<()>( - 1, - u27::new(5), - |key| { - key.write(b"alice").unwrap(); - Ok(()) - }, - encoded_size, - |val| { - assert_eq!(val.capacity(), encoded_size as usize); - assert!(val.is_empty()); - val.write(&alice.id.to_le_bytes()).unwrap(); - assert_eq!(val.len(), 4); - assert_eq!(val.remaining(), encoded_size as usize - 4); - assert_eq!(val, alice.id.to_le_bytes()); - val[..4].copy_from_slice(&alice.id.to_be_bytes()); - assert_eq!(val, alice.id.to_be_bytes()); - val.write(alice.name.as_bytes()).unwrap(); - assert_eq!(val.len(), encoded_size as usize); - let err = val.write(&[1]).unwrap_err(); - assert_eq!( - std::string::ToString::to_string(&err), - "buffer does not have enough space (remaining 0, want 1)" - ); - Ok(()) - }, - ) - .unwrap(); + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size as u32, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_builders::<(), ()>(1, kb, vb).unwrap(); let alice2 = Person { id: 2, name: std::string::String::from("Alice"), }; + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); let old = l - .insert_with::<()>( - 1, - u27::new(5), - |key| { - key.write(b"alice").unwrap(); - Ok(()) - }, - encoded_size, - |val| { - assert_eq!(val.capacity(), encoded_size as usize); - assert!(val.is_empty()); - val.write(&alice2.id.to_le_bytes()).unwrap(); - assert_eq!(val.len(), 4); - assert_eq!(val.remaining(), encoded_size as usize - 4); - assert_eq!(&*val, alice2.id.to_le_bytes()); - val[..4].copy_from_slice(&alice2.id.to_be_bytes()); - assert_eq!(&*val, alice2.id.to_be_bytes()); - val.write(alice2.name.as_bytes()).unwrap(); - assert_eq!(val.len(), encoded_size as usize); - let err = val.write(&[1]).unwrap_err(); - assert_eq!( - std::string::ToString::to_string(&err), - "buffer does not have enough space (remaining 0, want 1)" - ); - Ok(()) - }, - ) + .insert_with_builders::<(), ()>(1, kb, vb) .unwrap() .unwrap(); @@ -2761,19 +2723,19 @@ fn insert_with(l: SkipMap) { #[test] fn test_insert_with() { - run(|| insert_with(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| insert_with(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] fn test_insert_with_unify() { - run(|| insert_with(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())) + run(|| insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_insert_with_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_insert_with_map_mut"); let open_options = OpenOptions::default() @@ -2781,7 +2743,7 @@ fn test_insert_with_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - insert_with(SkipMap::map_mut(p, open_options, map_options).unwrap()); + insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -2790,7 +2752,7 @@ fn test_insert_with_map_mut() { fn test_insert_with_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - insert_with(SkipMap::map_anon(map_options).unwrap()); + insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -2799,30 +2761,30 @@ fn test_insert_with_map_anon() { fn test_insert_with_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - insert_with(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } fn get_or_remove(l: SkipMap) { for i in 0..100 { let v = new_value(i); - l.insert(0, &key(i), &v).unwrap(); + l.insert(MIN_VERSION, &key(i), &v).unwrap(); } for i in 0..100 { let k = key(i); - let old = l.get_or_remove(0, &k).unwrap().unwrap(); + let old = l.get_or_remove(MIN_VERSION, &k).unwrap().unwrap(); assert_eq!(old.key(), k); assert_eq!(old.value(), new_value(i)); - let old = l.get_or_remove(0, &k).unwrap().unwrap(); + let old = l.get_or_remove(MIN_VERSION, &k).unwrap().unwrap(); assert_eq!(old.key(), k); assert_eq!(old.value(), new_value(i)); } for i in 0..100 { let k = key(i); - let ent = l.get(0, &k).unwrap(); + let ent = l.get(MIN_VERSION, &k).unwrap(); assert_eq!(ent.key(), k); assert_eq!(ent.value(), new_value(i)); } @@ -2830,19 +2792,19 @@ fn get_or_remove(l: SkipMap) { #[test] fn test_get_or_remove() { - run(|| get_or_remove(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| get_or_remove(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] fn test_get_or_remove_unify() { - run(|| get_or_remove(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())) + run(|| get_or_remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_get_or_remove_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_get_or_remove_map_mut"); let open_options = OpenOptions::default() @@ -2850,7 +2812,7 @@ fn test_get_or_remove_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - get_or_remove(SkipMap::map_mut(p, open_options, map_options).unwrap()); + get_or_remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -2859,7 +2821,7 @@ fn test_get_or_remove_map_mut() { fn test_get_or_remove_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - get_or_remove(SkipMap::map_anon(map_options).unwrap()); + get_or_remove(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -2868,53 +2830,53 @@ fn test_get_or_remove_map_anon() { fn test_get_or_remove_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - get_or_remove(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + get_or_remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } fn remove(l: SkipMap) { for i in 0..100 { let v = new_value(i); - l.insert(0, &key(i), &v).unwrap(); + l.insert(MIN_VERSION, &key(i), &v).unwrap(); } for i in 0..100 { let k = key(i); // no race, remove should succeed let old = l - .compare_remove(0, &k, Ordering::SeqCst, Ordering::Acquire) + .compare_remove(MIN_VERSION, &k, Ordering::SeqCst, Ordering::Acquire) .unwrap(); assert!(old.is_none()); // key already removed let old = l - .compare_remove(0, &k, Ordering::SeqCst, Ordering::Acquire) + .compare_remove(MIN_VERSION, &k, Ordering::SeqCst, Ordering::Acquire) .unwrap(); assert!(old.is_none()); } for i in 0..100 { let k = key(i); - let ent = l.get(0, &k); + let ent = l.get(MIN_VERSION, &k); assert!(ent.is_none()); } } #[test] fn test_remove() { - run(|| remove(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| remove(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] fn test_remove_unify() { - run(|| remove(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())) + run(|| remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_remove_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_remove_map_mut"); let open_options = OpenOptions::default() @@ -2922,7 +2884,7 @@ fn test_remove_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - remove(SkipMap::map_mut(p, open_options, map_options).unwrap()); + remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } @@ -2931,7 +2893,7 @@ fn test_remove_map_mut() { fn test_remove_map_anon() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - remove(SkipMap::map_anon(map_options).unwrap()); + remove(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -2940,14 +2902,14 @@ fn test_remove_map_anon() { fn test_remove_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - remove(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } fn remove2(l: SkipMap) { for i in 0..100 { let v = new_value(i); - l.insert(0, &key(i), &v).unwrap(); + l.insert(MIN_VERSION, &key(i), &v).unwrap(); } for i in 0..100 { @@ -2960,33 +2922,33 @@ fn remove2(l: SkipMap) { // no-race, remove should succeed let old = l - .compare_remove(0, &k, Ordering::SeqCst, Ordering::Acquire) + .compare_remove(MIN_VERSION, &k, Ordering::SeqCst, Ordering::Acquire) .unwrap(); assert!(old.is_none()); } for i in 0..100 { let k = key(i); - let ent = l.get(0, &k); + let ent = l.get(MIN_VERSION, &k); assert!(ent.is_none()); } } #[test] fn test_remove2() { - run(|| remove2(SkipMap::with_options(TEST_OPTIONS).unwrap())) + run(|| remove2(SkipList::new(TEST_OPTIONS).unwrap())) } #[test] fn test_remove2_unify() { - run(|| remove2(SkipMap::with_options(UNIFY_TEST_OPTIONS).unwrap())) + run(|| remove2(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_remove2_map_mut() { - run(|| { + run(|| unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_remove2_map_mut"); let open_options = OpenOptions::default() @@ -2994,16 +2956,16 @@ fn test_remove2_map_mut() { .read(true) .write(true); let map_options = MmapOptions::default(); - remove2(SkipMap::map_mut(p, open_options, map_options).unwrap()); + remove2(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); }) } #[test] #[cfg(feature = "memmap")] fn test_remove2_map_anon() { - run(|| { + run(|| unsafe { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - remove2(SkipMap::map_anon(map_options).unwrap()); + remove2(SkipList::map_anon(Options::new(), map_options).unwrap()); }) } @@ -3012,6 +2974,6 @@ fn test_remove2_map_anon() { fn test_remove2_map_anon_unify() { run(|| { let map_options = MmapOptions::default().len(ARENA_SIZE as u32); - remove2(SkipMap::map_anon_with_options(UNIFY_TEST_OPTIONS, map_options).unwrap()); + remove2(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); }) } diff --git a/src/sync/trailed.rs b/src/sync/trailed.rs new file mode 100644 index 0000000..673df88 --- /dev/null +++ b/src/sync/trailed.rs @@ -0,0 +1,1424 @@ +use core::{borrow::Borrow, marker::PhantomData}; + +use super::*; + +use among::Among; +use base::{EntryRef, Iter}; + +type Allocator = GenericAllocator, Arena>; +type SkipList = base::SkipList, C>; + +node_pointer!(TrailedNode); + +/// A node that supports trailer. +#[repr(C)] +pub struct TrailedNode { + // A byte slice is 24 bytes. We are trying to save space here. + /// Multiple parts of the value are encoded as a single u64 so that it + /// can be atomically loaded and stored: + /// value offset: u32 (bits 0-31) + /// value size : u32 (bits 32-63) + value: AtomicValuePointer, + // Immutable. No need to lock to access key. + key_offset: u32, + // Immutable. No need to lock to access key. + key_size_and_height: u32, + trailer: PhantomData, + // ** DO NOT REMOVE BELOW COMMENT** + // The below field will be attached after the node, have to comment out + // this field, because each node will not use the full height, the code will + // not allocate the full size of the tower. + // + // Most nodes do not need to use the full height of the tower, since the + // probability of each successive level decreases exponentially. Because + // these elements are never accessed, they do not need to be allocated. + // Therefore, when a node is allocated in the arena, its memory footprint + // is deliberately truncated to not include unneeded tower elements. + // + // All accesses to elements should use CAS operations, with no need to lock. + // pub(super) tower: [Link; self.opts.max_height], +} + +impl core::fmt::Debug for TrailedNode { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let (key_size, height) = decode_key_size_and_height(self.key_size_and_height); + let (value_offset, value_size) = decode_value_pointer(self.value.0.load(Ordering::Relaxed)); + f.debug_struct("Node") + .field("value_offset", &value_offset) + .field("value_size", &value_size) + .field("key_offset", &self.key_offset) + .field("key_size", &key_size) + .field("height", &height) + .finish() + } +} + +impl WithTrailer for TrailedNode {} + +impl Node for TrailedNode { + type Link = Link; + + type Trailer = T; + + type ValuePointer = AtomicValuePointer; + + type Pointer = NodePointer; + + fn full(value_offset: u32, max_height: u8) -> Self { + Self { + value: AtomicValuePointer::new(value_offset, 0), + key_offset: 0, + key_size_and_height: encode_key_size_and_height(0, max_height), + trailer: PhantomData, + } + } + + #[inline] + fn value_pointer(&self) -> &Self::ValuePointer { + &self.value + } + + #[inline] + fn set_value_pointer(&mut self, offset: u32, size: u32) { + self.value = AtomicValuePointer::new(offset, size); + } + + #[inline] + fn clear_value( + &self, + arena: &A, + success: Ordering, + failure: Ordering, + ) -> Result<(), (u32, u32)> { + self + .value + .compare_remove(success, failure) + .map(|(_, old_len)| { + if old_len != REMOVE { + arena.increase_discarded(old_len); + } + }) + } + + #[inline] + fn set_key_size_and_height(&mut self, key_size_and_height: u32) { + self.key_size_and_height = key_size_and_height; + } + + #[inline] + fn set_key_offset(&mut self, key_offset: u32) { + self.key_offset = key_offset; + } + + #[inline] + fn version(&self) -> Version { + 0 + } + + #[inline] + fn set_version(&mut self, _: Version) {} + + #[inline] + fn key_size_and_height(&self) -> u32 { + self.key_size_and_height + } + + #[inline] + fn key_offset(&self) -> u32 { + self.key_offset + } +} + +/// A fast, lock-free, thread-safe ARENA based `SkipMap` that supports trailed structure, forward and backward iteration. +/// +/// If you want to use in non-concurrent environment, you can use [`unsync::trailed::SkipMap`]. +#[repr(transparent)] +pub struct SkipMap(SkipList); + +impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl SkipMap { + /// Create a new skipmap with default options. + /// + /// **Note:** The capacity stands for how many memory allocated, + /// it does not mean the skiplist can store `cap` entries. + /// + /// + /// + /// **What the difference between this method and [`SkipMap::mmap_anon`]?** + /// + /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// 2. Where as [`SkipMap::mmap_anon`] will use mmap anonymous to require memory from the OS. + /// If you require very large contiguous memory regions, `mmap` might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// [`SkipMap::mmap_anon`]: #method.mmap_anon + pub fn new(opts: Options) -> Result { + Self::with_comparator(opts, Ascend) + } + + /// Create a new memory map file backed with default options. + /// + /// **Note:** The capacity stands for how many memory mmaped, + /// it does not mean the skipmap can store `cap` entries. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map_mut>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_mut_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Open an exist file and mmap it to create skipmap. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Create a new memory map backed skipmap with default options. + /// + /// **What the difference between this method and [`SkipMap::new`]?** + /// + /// 1. This method will use mmap anonymous to require memory from the OS directly. + /// If you require very large contiguous memory regions, this method might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// 2. Where as [`SkipMap::new`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + /// + /// [`SkipMap::new`]: #method.new + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn map_anon(opts: Options, mmap_options: MmapOptions) -> std::io::Result { + Self::map_anon_with_comparator(opts, mmap_options, Ascend) + } + + #[cfg(all(test, feature = "std"))] + #[inline] + pub(crate) fn with_yield_now(self) -> Self { + Self(self.0.with_yield_now()) + } +} + +impl SkipMap { + /// Returns the reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + #[inline] + pub fn reserved_slice(&self) -> &[u8] { + self.0.arena.reserved_slice() + } + + /// Returns the mutable reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + /// + /// # Safety + /// - The caller need to make sure there is no data-race + /// + /// # Panics + /// - If in read-only mode, it will panic. + #[inline] + #[allow(clippy::mut_from_ref)] + pub unsafe fn reserved_slice_mut(&self) -> &mut [u8] { + self.0.arena.reserved_slice_mut() + } + /// Returns the path of the mmap file, only returns `Some` when the ARENA is backed by a mmap file. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn path(&self) -> Option<&std::sync::Arc> { + self.0.arena.path() + } + + /// Sets remove on drop, only works on mmap with a file backend. + /// + /// Default is `false`. + /// + /// > **WARNING:** Once set to `true`, the backed file will be removed when the allocator is dropped, even though the file is opened in + /// > read-only mode. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn remove_on_drop(&self, val: bool) { + self.0.remove_on_drop(val); + } + + /// Returns the offset of the data section in the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the offset of the data section in the ARENA. + #[inline] + pub fn data_offset(&self) -> usize { + self.0.data_offset() + } + + /// Returns the magic version number of the [`SkipMap`]. + /// + /// This value can be used to check the compatibility for application using [`SkipMap`]. + #[inline] + pub fn magic_version(&self) -> u16 { + self.0.magic_version() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + pub fn height(&self) -> u8 { + self.0.height() + } + + /// Returns the number of remaining bytes can be allocated by the arena. + #[inline] + pub fn remaining(&self) -> usize { + self.0.remaining() + } + + /// Returns the number of bytes that have allocated from the arena. + #[inline] + pub fn allocated(&self) -> usize { + self.0.allocated() + } + + /// Returns the capacity of the arena. + #[inline] + pub fn capacity(&self) -> usize { + self.0.capacity() + } + + /// Returns the number of entries in the skipmap. + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns true if the skipmap is empty. + #[inline] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Gets the number of pointers to this `SkipMap` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). + #[inline] + pub fn refs(&self) -> usize { + self.0.refs() + } + + /// Returns how many bytes are discarded by the ARENA. + #[inline] + pub fn discarded(&self) -> u32 { + self.0.discarded() + } + + /// Returns the comparator used to compare keys. + #[inline] + pub fn comparator(&self) -> &C { + self.0.comparator() + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, Options}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + pub fn random_height(&self) -> Height { + self.0.random_height() + } + + /// Returns the estimated size of a node with the given height and key/value sizes. + /// + /// **Note**: The returned size is only an estimate and may not be accurate, which means that the actual size is less than or equal to the returned size. + #[inline] + pub fn estimated_node_size(height: Height, key_size: usize, value_size: usize) -> usize { + SkipList::::estimated_node_size(height, key_size, value_size) + } + + /// Like [`SkipMap::new`], but with a custom [`Comparator`]. + #[inline] + pub fn with_comparator(opts: Options, cmp: C) -> Result { + SkipList::with_comparator(opts, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_mut_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with [`Options`], a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::map_mut_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::map_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map_anon`], but with a custom [`Comparator`]. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn map_anon_with_comparator( + opts: Options, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_anon_with_comparator(opts, mmap_options, cmp).map(Self) + } + + /// Clear the skiplist to empty and re-initialize. + /// + /// # Safety + /// - The current pointers get from the ARENA cannot be used anymore after calling this method. + /// - This method is not thread-safe. + /// + /// # Example + /// + /// Undefine behavior: + /// + /// ```ignore + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world").unwrap(); + /// + /// let data = map.get(b"hello").unwrap(); + /// + /// map.clear().unwrap(); + /// + /// let w = data[0]; // undefined behavior + /// ``` + pub unsafe fn clear(&mut self) -> Result<(), Error> { + self.0.clear() + } + + /// Flushes outstanding memory map modifications to disk. + /// + /// When this method returns with a non-error result, + /// all outstanding changes to a file-backed memory map are guaranteed to be durably stored. + /// The file's metadata (including last modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush(&self) -> std::io::Result<()> { + self.0.flush() + } + + /// Asynchronously flushes outstanding memory map modifications to disk. + /// + /// This method initiates flushing modified pages to durable storage, but it will not wait for + /// the operation to complete before returning. The file's metadata (including last + /// modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush_async(&self) -> std::io::Result<()> { + self.0.flush_async() + } +} + +impl SkipMap { + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](SkipMap::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + pub fn insert<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .insert_at_height(MIN_VERSION, self.random_height(), key, value, trailer) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](SkipMap::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, Options}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// + /// let height = map.random_height(); + /// map.insert_at_height(height, b"hello", b"world", 10).unwrap(); + /// ``` + pub fn insert_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .insert_at_height(MIN_VERSION, height, key, value, trailer) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_value_builder::(b"alice", vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.0.insert_at_height_with_value_builder( + MIN_VERSION, + self.random_height(), + key, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height_with_value_builder`](SkipMap::get_or_insert_at_height_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_value_builder::(height, b"alice", vb, 10) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self + .0 + .insert_at_height_with_value_builder(MIN_VERSION, height, key, value_builder, trailer) + } + + /// Inserts a new key-value pair if it does not yet exist. + /// + /// Unlike [`insert`](SkipMap::insert), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + pub fn get_or_insert<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .get_or_insert_at_height(MIN_VERSION, self.random_height(), key, value, trailer) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](SkipMap::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + pub fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .get_or_insert_at_height(MIN_VERSION, height, key, value, trailer) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_value_builder`](SkipMap::insert_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// l.get_or_insert_with_value_builder::(b"alice", vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.0.get_or_insert_at_height_with_value_builder( + MIN_VERSION, + self.random_height(), + key, + value_builder, + trailer, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](SkipMap::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_value_builder::(height, b"alice", vb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.0.get_or_insert_at_height_with_value_builder( + MIN_VERSION, + height, + key, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_builders::<(), ()>(kb, vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_builders<'a, KE, VE>( + &'a self, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.0.insert_at_height_with_builders( + MIN_VERSION, + self.random_height(), + key_builder, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_builders::<(), ()>(height, kb, vb, 10) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_builders<'a, KE, VE>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self + .0 + .insert_at_height_with_builders(MIN_VERSION, height, key_builder, value_builder, trailer) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_builders`](SkipMap::insert_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.get_or_insert_with_builders::<(), ()>(kb, vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_builders<'a, KE, VE>( + &'a self, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.0.get_or_insert_at_height_with_builders( + MIN_VERSION, + self.random_height(), + key_builder, + value_builder, + trailer, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](SkipMap::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_builders::<(), ()>(height, kb, vb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.0.get_or_insert_at_height_with_builders( + MIN_VERSION, + height, + key_builder, + value_builder, + trailer, + ) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove`](SkipMap::get_or_remove), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + #[inline] + pub fn compare_remove<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + trailer: T, + success: Ordering, + failure: Ordering, + ) -> Result>>, Error> { + self.0.compare_remove_at_height( + MIN_VERSION, + self.random_height(), + key, + trailer, + success, + failure, + ) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove_at_height`](SkipMap::get_or_remove_at_height), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + pub fn compare_remove_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + trailer: T, + success: Ordering, + failure: Ordering, + ) -> Result>>, Error> { + self + .0 + .compare_remove_at_height(MIN_VERSION, height, key, trailer, success, failure) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove`](SkipMap::compare_remove), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + #[inline] + pub fn get_or_remove<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .get_or_remove_at_height(MIN_VERSION, self.random_height(), key, trailer) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height`](SkipMap::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, Options}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world", 10).unwrap(); + /// + /// let height = map.random_height(); + /// map.get_or_remove_at_height(height, b"hello", 10).unwrap(); + /// ``` + pub fn get_or_remove_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .get_or_remove_at_height(MIN_VERSION, height, key, trailer) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove`](SkipMap::compare_remove), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, KeyBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// l.get_or_remove_with_builder::(kb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_remove_with_builder<'a, 'b: 'a, E>( + &'a self, + key_builder: KeyBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.0.get_or_remove_at_height_with_builder( + MIN_VERSION, + self.random_height(), + key_builder, + trailer, + ) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height`](SkipMap::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, KeyBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// let height = l.random_height(); + /// l.get_or_remove_at_height_with_builder::(height, kb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_remove_at_height_with_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self + .0 + .get_or_remove_at_height_with_builder(MIN_VERSION, height, key_builder, trailer) + } +} + +impl SkipMap { + /// Returns `true` if the key exists in the map. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, Options}; + /// use core::sync::atomic::Ordering; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world", 10).unwrap(); + /// + /// map.compare_remove(b"hello", 10, Ordering::Relaxed, Ordering::Relaxed).unwrap(); + /// + /// assert!(!map.contains_key(b"hello")); + /// ``` + #[inline] + pub fn contains_key<'a, 'b: 'a>(&'a self, key: &'b [u8]) -> bool { + self.0.contains_key(MIN_VERSION, key) + } + + /// Returns the first entry in the map. + pub fn first(&self) -> Option>> { + self.0.first(MIN_VERSION) + } + + /// Returns the last entry in the map. + pub fn last(&self) -> Option>> { + self.0.last(MIN_VERSION) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::trailed::SkipMap, Options}; + /// use core::sync::atomic::Ordering; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world", 10).unwrap(); + /// + /// let ent = map.get(b"hello").unwrap(); + /// assert_eq!(ent.value(), b"world"); + /// + /// map.compare_remove(b"hello", 10, Ordering::Relaxed, Ordering::Relaxed).unwrap(); + /// + /// assert!(map.get(b"hello").is_none()); + /// ``` + pub fn get<'a, 'b: 'a>(&'a self, key: &'b [u8]) -> Option>> { + self.0.get(MIN_VERSION, key) + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + pub fn upper_bound<'a, 'b: 'a>( + &'a self, + upper: Bound<&'b [u8]>, + ) -> Option>> { + self.0.upper_bound(MIN_VERSION, upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + pub fn lower_bound<'a, 'b: 'a>( + &'a self, + lower: Bound<&'b [u8]>, + ) -> Option>> { + self.0.lower_bound(MIN_VERSION, lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + pub fn iter(&self) -> Iter, C> { + self.0.iter(MIN_VERSION) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + pub fn range<'a, Q, R>(&'a self, range: R) -> Iter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range(MIN_VERSION, range) + } +} diff --git a/src/sync/versioned.rs b/src/sync/versioned.rs new file mode 100644 index 0000000..8501c8f --- /dev/null +++ b/src/sync/versioned.rs @@ -0,0 +1,1499 @@ +use core::borrow::Borrow; + +use super::*; + +use among::Among; +use base::{AllVersionsIter, EntryRef, Iter, VersionedEntryRef}; + +type Allocator = GenericAllocator; +type SkipList = base::SkipList; + +node_pointer!(VersionedNode); + +/// A node that supports version. +#[repr(C)] +pub struct VersionedNode { + // A byte slice is 24 bytes. We are trying to save space here. + /// Multiple parts of the value are encoded as a single u64 so that it + /// can be atomically loaded and stored: + /// value offset: u32 (bits 0-31) + /// value size : u32 (bits 32-63) + value: AtomicValuePointer, + // Immutable. No need to lock to access key. + key_offset: u32, + // Immutable. No need to lock to access key. + key_size_and_height: u32, + version: u64, + // ** DO NOT REMOVE BELOW COMMENT** + // The below field will be attached after the node, have to comment out + // this field, because each node will not use the full height, the code will + // not allocate the full size of the tower. + // + // Most nodes do not need to use the full height of the tower, since the + // probability of each successive level decreases exponentially. Because + // these elements are never accessed, they do not need to be allocated. + // Therefore, when a node is allocated in the arena, its memory footprint + // is deliberately truncated to not include unneeded tower elements. + // + // All accesses to elements should use CAS operations, with no need to lock. + // pub(super) tower: [Link; self.opts.max_height], +} + +impl core::fmt::Debug for VersionedNode { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let (key_size, height) = decode_key_size_and_height(self.key_size_and_height); + let (value_offset, value_size) = decode_value_pointer(self.value.0.load(Ordering::Relaxed)); + f.debug_struct("Node") + .field("value_offset", &value_offset) + .field("value_size", &value_size) + .field("key_offset", &self.key_offset) + .field("key_size", &key_size) + .field("height", &height) + .finish() + } +} + +impl WithVersion for VersionedNode {} + +impl Node for VersionedNode { + type Link = Link; + + type Trailer = (); + + type ValuePointer = AtomicValuePointer; + + type Pointer = NodePointer; + + fn full(value_offset: u32, max_height: u8) -> Self { + Self { + value: AtomicValuePointer::new(value_offset, 0), + key_offset: 0, + key_size_and_height: encode_key_size_and_height(0, max_height), + version: MIN_VERSION, + } + } + + #[inline] + fn value_pointer(&self) -> &Self::ValuePointer { + &self.value + } + + #[inline] + fn set_value_pointer(&mut self, offset: u32, size: u32) { + self.value = AtomicValuePointer::new(offset, size); + } + + #[inline] + fn clear_value( + &self, + arena: &A, + success: Ordering, + failure: Ordering, + ) -> Result<(), (u32, u32)> { + self + .value + .compare_remove(success, failure) + .map(|(_, old_len)| { + if old_len != REMOVE { + arena.increase_discarded(old_len); + } + }) + } + + #[inline] + fn set_key_size_and_height(&mut self, key_size_and_height: u32) { + self.key_size_and_height = key_size_and_height; + } + + #[inline] + fn set_key_offset(&mut self, key_offset: u32) { + self.key_offset = key_offset; + } + + #[inline] + fn version(&self) -> Version { + self.version + } + + #[inline] + fn set_version(&mut self, version: Version) { + self.version = version; + } + + #[inline] + fn key_size_and_height(&self) -> u32 { + self.key_size_and_height + } + + #[inline] + fn key_offset(&self) -> u32 { + self.key_offset + } +} + +/// A fast, lock-free, thread-safe ARENA based `SkipMap` that supports multiple versions, forward and backward iteration. +/// +/// If you want to use in non-concurrent environment, you can use [`unsync::versioned::SkipMap`]. +#[repr(transparent)] +pub struct SkipMap(SkipList); + +impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl SkipMap { + /// Create a new skipmap with default options. + /// + /// **Note:** The capacity stands for how many memory allocated, + /// it does not mean the skiplist can store `cap` entries. + /// + /// + /// + /// **What the difference between this method and [`SkipMap::mmap_anon`]?** + /// + /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// 2. Where as [`SkipMap::mmap_anon`] will use mmap anonymous to require memory from the OS. + /// If you require very large contiguous memory regions, `mmap` might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// [`SkipMap::mmap_anon`]: #method.mmap_anon + pub fn new(opts: Options) -> Result { + Self::with_comparator(opts, Ascend) + } + + /// Create a new memory map file backed with default options. + /// + /// **Note:** The capacity stands for how many memory mmaped, + /// it does not mean the skipmap can store `cap` entries. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map_mut>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_mut_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Open an exist file and mmap it to create skipmap. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Create a new memory map backed skipmap with default options. + /// + /// **What the difference between this method and [`SkipMap::new`]?** + /// + /// 1. This method will use mmap anonymous to require memory from the OS directly. + /// If you require very large contiguous memory regions, this method might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// 2. Where as [`SkipMap::new`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// [`SkipMap::new`]: #method.new + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn map_anon(opts: Options, mmap_options: MmapOptions) -> std::io::Result { + Self::map_anon_with_comparator(opts, mmap_options, Ascend) + } +} + +impl SkipMap { + /// Returns the reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + #[inline] + pub fn reserved_slice(&self) -> &[u8] { + self.0.arena.reserved_slice() + } + + /// Returns the mutable reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + /// + /// # Safety + /// - The caller need to make sure there is no data-race + /// + /// # Panics + /// - If in read-only mode, it will panic. + #[inline] + #[allow(clippy::mut_from_ref)] + pub unsafe fn reserved_slice_mut(&self) -> &mut [u8] { + self.0.arena.reserved_slice_mut() + } + /// Returns the path of the mmap file, only returns `Some` when the ARENA is backed by a mmap file. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn path(&self) -> Option<&std::sync::Arc> { + self.0.arena.path() + } + + /// Sets remove on drop, only works on mmap with a file backend. + /// + /// Default is `false`. + /// + /// > **WARNING:** Once set to `true`, the backed file will be removed when the allocator is dropped, even though the file is opened in + /// > read-only mode. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn remove_on_drop(&self, val: bool) { + self.0.remove_on_drop(val); + } + + /// Returns the offset of the data section in the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the offset of the data section in the ARENA. + #[inline] + pub const fn data_offset(&self) -> usize { + self.0.data_offset() + } + + /// Returns the version number of the [`SkipMap`]. + #[inline] + pub fn version(&self) -> u16 { + self.0.magic_version() + } + + /// Returns the magic version number of the [`SkipMap`]. + /// + /// This value can be used to check the compatibility for application using [`SkipMap`]. + #[inline] + pub fn magic_version(&self) -> u16 { + self.0.magic_version() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + pub fn height(&self) -> u8 { + self.0.height() + } + + /// Returns the number of remaining bytes can be allocated by the arena. + #[inline] + pub fn remaining(&self) -> usize { + self.0.remaining() + } + + /// Returns the number of bytes that have allocated from the arena. + #[inline] + pub fn allocated(&self) -> usize { + self.0.allocated() + } + + /// Returns the capacity of the arena. + #[inline] + pub fn capacity(&self) -> usize { + self.0.capacity() + } + + /// Returns the number of entries in the skipmap. + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns true if the skipmap is empty. + #[inline] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Gets the number of pointers to this `SkipMap` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). + #[inline] + pub fn refs(&self) -> usize { + self.0.refs() + } + + /// Returns how many bytes are discarded by the ARENA. + #[inline] + pub fn discarded(&self) -> u32 { + self.0.discarded() + } + + /// Returns the maximum version of all entries in the map. + #[inline] + pub fn max_version(&self) -> u64 { + self.0.max_version() + } + + /// Returns the minimum version of all entries in the map. + #[inline] + pub fn min_version(&self) -> u64 { + self.0.min_version() + } + + /// Returns the comparator used to compare keys. + #[inline] + pub const fn comparator(&self) -> &C { + self.0.comparator() + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, Ascend, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + pub fn random_height(&self) -> Height { + self.0.random_height() + } + + /// Returns the estimated size of a node with the given height and key/value sizes. + /// + /// **Note**: The returned size is only an estimate and may not be accurate, which means that the actual size is less than or equal to the returned size. + #[inline] + pub fn estimated_node_size(height: Height, key_size: usize, value_size: usize) -> usize { + SkipList::::estimated_node_size(height, key_size, value_size) + } + + /// Like [`SkipMap::new`], but with a custom [`Comparator`]. + #[inline] + pub fn with_comparator(opts: Options, cmp: C) -> Result { + SkipList::with_comparator(opts, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_mut_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with [`Options`], a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::map_mut_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::map_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map_anon`], but with a custom [`Comparator`]. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn map_anon_with_comparator( + opts: Options, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_anon_with_comparator(opts, mmap_options, cmp).map(Self) + } + + /// Clear the skiplist to empty and re-initialize. + /// + /// # Safety + /// - The current pointers get from the ARENA cannot be used anymore after calling this method. + /// - This method is not thread-safe. + /// + /// # Example + /// + /// Undefine behavior: + /// + /// ```ignore + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(1u8, b"hello", b"world").unwrap(); + /// + /// let data = map.get(b"hello").unwrap(); + /// + /// map.clear().unwrap(); + /// + /// let w = data[0]; // undefined behavior + /// ``` + pub unsafe fn clear(&mut self) -> Result<(), Error> { + self.0.clear() + } + + /// Flushes outstanding memory map modifications to disk. + /// + /// When this method returns with a non-error result, + /// all outstanding changes to a file-backed memory map are guaranteed to be durably stored. + /// The file's metadata (including last modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush(&self) -> std::io::Result<()> { + self.0.flush() + } + + /// Asynchronously flushes outstanding memory map modifications to disk. + /// + /// This method initiates flushing modified pages to durable storage, but it will not wait for + /// the operation to complete before returning. The file's metadata (including last + /// modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush_async(&self) -> std::io::Result<()> { + self.0.flush_async() + } + + #[cfg(all(test, feature = "std"))] + #[inline] + pub(crate) fn with_yield_now(self) -> Self { + Self(self.0.with_yield_now()) + } +} + +impl SkipMap { + /// Returns `true` if the key exists in the map. + /// + /// This method will return `false` if the entry is marked as removed. If you want to check if the key exists even if it is marked as removed, + /// you can use [`contains_key_versioned`](SkipMap::contains_key_versioned). + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// map.get_or_remove(1, b"hello").unwrap(); + /// + /// assert!(!map.contains_key(1, b"hello")); + /// assert!(map.contains_key_versioned(1, b"hello")); + /// ``` + #[inline] + pub fn contains_key<'a, 'b: 'a>(&'a self, version: Version, key: &'b [u8]) -> bool { + self.0.contains_key(version, key) + } + + /// Returns `true` if the key exists in the map, even if it is marked as removed. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// map.get_or_remove(1, b"hello").unwrap(); + /// + /// assert!(!map.contains_key(1, b"hello")); + /// assert!(map.contains_key_versioned(1, b"hello")); + /// ``` + #[inline] + pub fn contains_key_versioned<'a, 'b: 'a>(&'a self, version: Version, key: &'b [u8]) -> bool { + self.0.contains_key_versioned(version, key) + } + + /// Returns the first entry in the map. + pub fn first(&self, version: Version) -> Option> { + self.0.first(version) + } + + /// Returns the last entry in the map. + pub fn last(&self, version: Version) -> Option> { + self.0.last(version) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// This method will return `None` if the entry is marked as removed. If you want to get the entry even if it is marked as removed, + /// you can use [`get_versioned`](SkipMap::get_versioned). + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// let ent = map.get(0, b"hello").unwrap(); + /// assert_eq!(ent.value(), b"world"); + /// + /// map.get_or_remove(1, b"hello").unwrap(); + /// + /// assert!(map.get(1, b"hello").is_none()); + /// ``` + pub fn get<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Option> { + self.0.get(version, key) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// The difference between `get` and `get_versioned` is that `get_versioned` will return the value even if the entry is removed. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// map.get_or_remove(1, b"hello").unwrap(); + /// + /// assert!(map.get(1, b"hello").is_none()); + /// + /// let ent = map.get_versioned(1, b"hello").unwrap(); + /// // value is None because the entry is marked as removed. + /// assert!(ent.value().is_none()); + /// ``` + pub fn get_versioned<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Option> { + self.0.get_versioned(version, key) + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + pub fn upper_bound<'a, 'b: 'a>( + &'a self, + version: Version, + upper: Bound<&'b [u8]>, + ) -> Option> { + self.0.upper_bound(version, upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + pub fn lower_bound<'a, 'b: 'a>( + &'a self, + version: Version, + lower: Bound<&'b [u8]>, + ) -> Option> { + self.0.lower_bound(version, lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + pub fn iter(&self, version: Version) -> Iter { + self.0.iter(version) + } + + /// Returns a new iterator, this iterator will yield all versions for all entries in the map less or equal to the given version. + #[inline] + pub fn iter_all_versions(&self, version: Version) -> AllVersionsIter { + self.0.iter_all_versions(version) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + pub fn range<'a, Q, R>(&'a self, version: Version, range: R) -> Iter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range(version, range) + } + + /// Returns a iterator that within the range, this iterator will yield all versions for all entries in the range less or equal to the given version. + #[inline] + pub fn range_all_versions<'a, Q, R>( + &'a self, + version: Version, + range: R, + ) -> AllVersionsIter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range_all_versions(version, range) + } +} + +impl SkipMap { + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](SkipMap::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + pub fn insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.0.insert(version, key, value, ()) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](SkipMap::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// let height = map.random_height(); + /// map.insert_at_height(0, height, b"hello", b"world").unwrap(); + /// ``` + #[inline] + pub fn insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.0.insert_at_height(version, height, key, value, ()) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_value_builder::(1, b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self.0.insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + (), + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_value_builder::(1, height, b"alice", vb) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .insert_at_height_with_value_builder(version, height, key, value_builder, ()) + } + + /// Inserts a new key-value pair if it does not yet exist. + /// + /// Unlike [`insert`](SkipMap::insert), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + pub fn get_or_insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .0 + .get_or_insert_at_height(version, self.random_height(), key, value, ()) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](SkipMap::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + pub fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .0 + .get_or_insert_at_height(version, height, key, value, ()) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_value_builder`](SkipMap::insert_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// l.get_or_insert_with_value_builder::(1, b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self.get_or_insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](SkipMap::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_value_builder::(1, height, b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .get_or_insert_at_height_with_value_builder(version, height, key, value_builder, ()) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_builders::<(), ()>(1, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self.0.insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + (), + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders_and_trailer`](SkipMap::get_or_insert_with_builders_and_trailer), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_builders::<(), ()>(1, height, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self + .0 + .insert_at_height_with_builders(version, height, key_builder, value_builder, ()) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_builders`](SkipMap::insert_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.get_or_insert_with_builders::<(), ()>(1, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self.0.get_or_insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + (), + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](SkipMap::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_builders::<(), ()>(1, height, kb, vb) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self + .0 + .get_or_insert_at_height_with_builders(version, height, key_builder, value_builder, ()) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove`](SkipMap::get_or_remove), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + #[inline] + pub fn compare_remove<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + success: Ordering, + failure: Ordering, + ) -> Result>, Error> { + self.compare_remove_at_height(version, self.random_height(), key, success, failure) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove_at_height`](SkipMap::get_or_remove_at_height), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + pub fn compare_remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + success: Ordering, + failure: Ordering, + ) -> Result>, Error> { + self + .0 + .compare_remove_at_height(version, height, key, (), success, failure) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove`](SkipMap::compare_remove), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + #[inline] + pub fn get_or_remove<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Result>, Error> { + self.get_or_remove_at_height(version, self.random_height(), key) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height`](SkipMap::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// let height = map.random_height(); + /// map.get_or_remove_at_height(0, height, b"hello").unwrap(); + /// ``` + #[inline] + pub fn get_or_remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + ) -> Result>, Error> { + self.0.get_or_remove_at_height(version, height, key, ()) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_with_builder_and_trailer`](SkipMap::compare_remove_with_builder_and_trailer), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, KeyBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// l.get_or_remove_with_builder::(1, kb) + /// .unwrap(); + /// ``` + pub fn get_or_remove_with_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .get_or_remove_at_height_with_builder(version, self.random_height(), key_builder, ()) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height_with_builder_and_trailer`](SkipMap::compare_remove_at_height_with_builder_and_trailer), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{sync::versioned::SkipMap, KeyBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// let height = l.random_height(); + /// l.get_or_remove_at_height_with_builder::(1, height, kb) + /// .unwrap(); + /// ``` + pub fn get_or_remove_at_height_with_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .get_or_remove_at_height_with_builder(version, height, key_builder, ()) + } +} diff --git a/src/types.rs b/src/types.rs index b648537..c77a305 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,234 +1,431 @@ -/// Returns when the bytes are too large to be written to the vacant buffer. -#[derive(Debug, Default, Clone, Copy)] -pub struct TooLarge { - remaining: usize, - write: usize, -} +use core::ops::{Add, AddAssign, Sub, SubAssign}; -impl core::fmt::Display for TooLarge { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!( - f, - "buffer does not have enough space (remaining {}, want {})", - self.remaining, self.write - ) - } -} +use arbitrary_int::{u27, u5, Number, TryNewError}; +pub use dbutils::buffer::*; -#[cfg(feature = "std")] -impl std::error::Error for TooLarge {} - -/// A vacant buffer in the skiplist. -#[must_use = "vacant buffer must be filled with bytes."] -#[derive(Debug)] -pub struct VacantBuffer<'a> { - value: &'a mut [u8], - len: usize, - cap: usize, - pub(crate) offset: u32, -} +const MAX_U5: u8 = (1 << 5) - 1; +const MAX_U27: u32 = (1 << 27) - 1; -impl<'a> VacantBuffer<'a> { - /// Fill the remaining space with the given byte. - pub fn fill(&mut self, byte: u8) { - self.len = self.cap; - self.value[self.len..].fill(byte); - } - - /// Write bytes to the vacant value. - pub fn write(&mut self, bytes: &[u8]) -> Result<(), TooLarge> { - let len = bytes.len(); - let remaining = self.cap - self.len; - if len > remaining { - return Err(TooLarge { - remaining, - write: len, - }); - } - - self.value[self.len..self.len + len].copy_from_slice(bytes); - self.len += len; - Ok(()) - } - - /// Write bytes to the vacant value without bounds checking. - /// - /// # Panics - /// - If a slice is larger than the remaining space. - pub fn write_unchecked(&mut self, bytes: &[u8]) { - let len = bytes.len(); - self.value[self.len..self.len + len].copy_from_slice(bytes); - self.len += len; - } - - /// Returns the capacity of the vacant value. - #[inline] - pub const fn capacity(&self) -> usize { - self.cap - } - - /// Returns the length of the vacant value. - #[inline] - pub const fn len(&self) -> usize { - self.len - } - - /// Returns `true` if the vacant value is empty. - #[inline] - pub const fn is_empty(&self) -> bool { - self.len == 0 - } - - /// Returns the remaining space of the vacant value. - #[inline] - pub const fn remaining(&self) -> usize { - self.cap - self.len - } - - #[inline] - pub(crate) fn new(cap: usize, offset: u32, value: &'a mut [u8]) -> Self { - Self { - value, - len: 0, - cap, - offset, - } - } -} +/// Version, used for MVCC purpose, it is a 56-bit unsigned integer. +pub type Version = u64; -impl<'a> core::ops::Deref for VacantBuffer<'a> { - type Target = [u8]; +macro_rules! impl_eq_and_ord { + ($name:ident($inner:ident < $upper:ident) -> [$($target:ident),+ $(,)?]) => { + $( + paste::paste! { + impl PartialEq<$target> for $name { + #[inline] + fn eq(&self, other: &$target) -> bool { + let val: $upper = self.0.into(); + val.eq(&(*other as $upper)) + } + } - fn deref(&self) -> &Self::Target { - &self.value[..self.len] - } + impl PartialOrd<$target> for $name { + #[inline] + fn partial_cmp(&self, other: &$target) -> Option { + let val: $upper = self.0.into(); + val.partial_cmp(&(*other as $upper)) + } + } + } + )* + }; } -impl<'a> core::ops::DerefMut for VacantBuffer<'a> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.value[..self.len] - } -} +macro_rules! impl_signed_eq_and_ord { + ($name:ident($inner:ident < $upper:ident) -> [$($target:ident),+ $(,)?]) => { + $( + paste::paste! { + impl PartialEq<$target> for $name { + #[inline] + fn eq(&self, other: &$target) -> bool { + let val: $upper = self.0.into(); + (val as i64).eq(&(*other as i64)) + } + } -impl<'a> AsRef<[u8]> for VacantBuffer<'a> { - fn as_ref(&self) -> &[u8] { - &self.value[..self.len] - } -} + impl PartialOrd<$target> for $name { + #[inline] + fn partial_cmp(&self, other: &$target) -> Option { + let val: $upper = self.0.into(); + (val as i64).partial_cmp(&(*other as i64)) + } + } -impl<'a> AsMut<[u8]> for VacantBuffer<'a> { - fn as_mut(&mut self) -> &mut [u8] { - &mut self.value[..self.len] - } -} + impl PartialEq<$name> for $target { + #[inline] + fn eq(&self, other: &$name) -> bool { + let val: $upper = other.0.into(); + (*self as i64).eq(&(val as i64)) + } + } -impl<'a> PartialEq<[u8]> for VacantBuffer<'a> { - fn eq(&self, other: &[u8]) -> bool { - self.value[..self.len].eq(other) - } + impl PartialOrd<$name> for $target { + #[inline] + fn partial_cmp(&self, other: &$name) -> Option { + let val: $upper = other.0.into(); + (*self as i64).partial_cmp(&(val as i64)) + } + } + } + )* + }; } -impl<'a> PartialEq> for [u8] { - fn eq(&self, other: &VacantBuffer<'a>) -> bool { - self.eq(&other.value[..other.len]) - } -} +macro_rules! impl_ops_for_ux_wrapper { + ($name:ident($inner:ident < $upper:ident) -> [$($target:ident),+ $(,)?]) => { + $( + paste::paste! { + impl Add<$target> for $name { + type Output = Self; -impl<'a> PartialEq<[u8]> for &VacantBuffer<'a> { - fn eq(&self, other: &[u8]) -> bool { - self.value[..self.len].eq(other) - } -} + fn add(self, rhs: $target) -> Self::Output { + let res = rhs as $upper + $upper::from(self.0); -impl<'a> PartialEq<&VacantBuffer<'a>> for [u8] { - fn eq(&self, other: &&VacantBuffer<'a>) -> bool { - self.eq(&other.value[..other.len]) - } -} + if res > [] { + panic!("attempt to add with overflow"); + } -impl<'a, const N: usize> PartialEq<[u8; N]> for VacantBuffer<'a> { - fn eq(&self, other: &[u8; N]) -> bool { - self.value[..self.len].eq(other.as_ref()) - } -} + Self($inner::new(res)) + } + } -impl<'a, const N: usize> PartialEq> for [u8; N] { - fn eq(&self, other: &VacantBuffer<'a>) -> bool { - self.as_ref().eq(&other.value[..other.len]) - } -} + impl AddAssign<$target> for $name { + fn add_assign(&mut self, rhs: $target) { + let res = rhs as $upper + $upper::from(self.0); -impl<'a, const N: usize> PartialEq<&VacantBuffer<'a>> for [u8; N] { - fn eq(&self, other: &&VacantBuffer<'a>) -> bool { - self.as_ref().eq(&other.value[..other.len]) - } -} + if res > [] { + panic!("attempt to add with overflow"); + } + + self.0 = $inner::new(res); + } + } + + impl Sub<$target> for $name { + type Output = Self; -impl<'a, const N: usize> PartialEq<[u8; N]> for &VacantBuffer<'a> { - fn eq(&self, other: &[u8; N]) -> bool { - self.value[..self.len].eq(other.as_ref()) - } + fn sub(self, rhs: $target) -> Self::Output { + let res = rhs as $upper - $upper::from(self.0); + + if res > [] { + panic!("attempt to substract with overflow"); + } + + Self($inner::new(res)) + } + } + + impl SubAssign<$target> for $name { + fn sub_assign(&mut self, rhs: $target) { + let res = rhs as $upper - $upper::from(self.0); + + if res > [] { + panic!("attempt to substract with overflow"); + } + + self.0 = $inner::new(res); + } + } + } + )* + }; } -impl<'a, const N: usize> PartialEq<&mut VacantBuffer<'a>> for [u8; N] { - fn eq(&self, other: &&mut VacantBuffer<'a>) -> bool { - self.as_ref().eq(&other.value[..other.len]) - } +macro_rules! impl_try_from_for_ux_wrapper { + ($name:ident($inner:ident < $upper:ident) -> [$($target:ident),+ $(,)?]) => { + $( + paste::paste! { + impl TryFrom<$target> for $name { + type Error = TryNewError; + + #[inline] + fn try_from(value: $target) -> Result { + $inner::try_new(value as $upper).map(Self) + } + } + + impl $name { + #[doc = "Try to create a" $name " from the given `" $target "`"] + #[inline] + pub fn [< try_from_ $target >](val: $target) -> Result { + $inner::try_new(val as $upper).map(Self) + } + + #[doc = " Creates a new " $name " from the given `" $target "`."] + /// + /// # Panics + #[doc = "- If the given value is greater than `" $inner "::MAX`."] + #[inline] + pub const fn [< from_ $target _unchecked>](val: $target) -> Self { + Self($inner::new(val as $upper)) + } + } + } + )* + }; } -impl<'a, const N: usize> PartialEq<[u8; N]> for &mut VacantBuffer<'a> { - fn eq(&self, other: &[u8; N]) -> bool { - self.value[..self.len].eq(other.as_ref()) - } +macro_rules! impl_from_for_ux_wrapper { + ($name:ident($inner:ident < $upper:ident) -> [$($target:ident),+ $(,)?]) => { + $( + paste::paste! { + impl From<$target> for $name { + #[inline] + fn from(version: $target) -> Self { + Self($inner::from(version)) + } + } + + impl $name { + #[doc = "Creates a new " $name " from the given `" $target "`."] + #[inline] + pub const fn [< from_ $target>](version: $target) -> Self { + Self($inner::new(version as $upper)) + } + } + } + )* + }; } -pub(crate) enum Key<'a, 'b: 'a> { - Occupied(&'b [u8]), - Vacant(VacantBuffer<'a>), - Pointer { - arena: &'a super::Arena, - offset: u32, - len: u32, - }, - Remove(&'b [u8]), - #[allow(dead_code)] - RemoveVacant(VacantBuffer<'a>), - RemovePointer { - arena: &'a super::Arena, - offset: u32, - len: u32, - }, +macro_rules! impl_into_for_ux_wrapper { + ($name:ident($inner:ident < $upper:ident) -> [$($target:ident),+ $(,)?]) => { + $( + paste::paste! { + impl From<$name> for $target { + #[inline] + fn from(version: $name) -> Self { + version.[< to_ $target>]() + } + } + + impl $name { + #[doc = "Converts the " $name " to a `" $target "`."] + #[inline] + pub fn [< to_ $target>](&self) -> $target { + let val: $upper = self.0.into(); + val as $target + } + } + } + )* + }; } -impl<'a, 'b: 'a> Key<'a, 'b> { - #[inline] - pub(crate) fn on_fail(&self, arena: &super::Arena) { - match self { - Self::Occupied(_) | Self::Remove(_) | Self::Pointer { .. } | Self::RemovePointer { .. } => {} - Self::Vacant(key) | Self::RemoveVacant(key) => unsafe { - arena.dealloc(key.offset, key.cap as u32); - }, - } - } - - #[inline] - pub(crate) fn is_remove(&self) -> bool { - matches!( - self, - Self::Remove(_) | Self::RemoveVacant(_) | Self::RemovePointer { .. } - ) - } +macro_rules! ux_wrapper { + ( + $( + $([$meta:meta])* + $name:ident($inner:ident < $upper:ident) { + min: $min:expr, + default: $default:expr, + $(bits: $bits:expr,)? + $(ord: [$($ord_target:ident),* $(,)?],)? + $(signed_ord: [$($signed_ord_target:ident),* $(,)?],)? + $(ops: [$($ops_target:ident),* $(,)?],)? + $(try_from: [$($try_from_target:ident),* $(,)?],)? + $(from: [$($from_target:ident),* $(,)?],)? + $(into: [$($into_target:ident),* $(,)?],)? + } + ),+ $(,)? + ) => { + $( + $(#[$meta])* + #[derive( + Debug, derive_more::Display, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, + )] + pub struct $name($inner); + + paste::paste! { + impl $name { + #[doc = "The maximum value of the " $name "."] + pub const MAX: Self = Self($inner::MAX); + + #[doc = "The minimum value of the " $name "."] + pub const MIN: Self = Self($inner::new($min)); + + #[doc = "Creates a new " $name " with the default value."] + #[inline] + pub const fn new() -> Self { + Self($inner::new($default)) + } + + /// Checked integer addition. Computes `self + rhs`, returning `None` if overflow occurred. + #[inline] + pub fn checked_add(self, rhs: Self) -> Option { + self.0.checked_add(rhs.0).map(Self) + } + + /// Checked integer subtraction. Computes `self - rhs`, returning `None` if overflow occurred. + #[inline] + pub fn checked_sub(self, rhs: Self) -> Option { + self.0.checked_sub(rhs.0).and_then(|val| { + if val < $inner::new($min) { + None + } else { + Some(Self(val)) + } + }) + } + + /// Wrapping (modular) addition. Computes `self + rhs`, wrapping around at the boundary of the type. + #[inline] + pub fn wrapping_add(self, rhs: Self) -> Self { + Self(self.0.wrapping_add(rhs.0).max($inner::new($min))) + } + + /// Wrapping (modular) subtraction. Computes `self - rhs`, wrapping around at the boundary of the type. + #[inline] + pub fn wrapping_sub(self, rhs: Self) -> Self { + let val = self.0.wrapping_sub(rhs.0); + if val < $inner::MIN { + Self::MAX + } else { + Self(val) + } + } + + $( + /// Create a native endian integer value from its representation as a byte array in big endian. + #[inline] + pub const fn from_be_bytes(bytes: [u8; { $bits >> 3 }]) -> Self { + Self(UInt::<$upper, $bits>::from_be_bytes(bytes)) + } + + /// Create a native endian integer value from its representation as a byte array in little endian. + #[inline] + pub const fn from_le_bytes(bytes: [u8; { $bits >> 3 }]) -> Self { + Self(UInt::<$upper, $bits>::from_le_bytes(bytes)) + } + + /// Returns the native endian representation of the integer as a byte array in big endian. + #[inline] + pub const fn to_be_bytes(self) -> [u8; { $bits >> 3 }] { + self.0.to_be_bytes() + } + + /// Returns the native endian representation of the integer as a byte array in little endian. + #[inline] + pub const fn to_le_bytes(self) -> [u8; { $bits >> 3 }] { + self.0.to_le_bytes() + } + )? + } + } + + impl Default for $name { + #[inline] + fn default() -> Self { + Self($inner::new($default)) + } + } + + $( + impl From<[u8; { $bits >> 3 }]> for $name { + #[inline] + fn from(bytes: [u8; { $bits >> 3 }]) -> Self { + Self($inner::from_be_bytes(bytes)) + } + } + + impl From<$name> for [u8; { $bits >> 3 }] { + #[inline] + fn from(value: $name) -> Self { + value.to_be_bytes() + } + } + )? + + impl From<$inner> for $name { + #[inline] + fn from(val: $inner) -> Self { + Self(val) + } + } + + impl From<$name> for $inner { + #[inline] + fn from(value: $name) -> Self { + value.0 + } + } + + impl Add for $name { + type Output = Self; + + #[inline] + fn add(self, rhs: Self) -> Self::Output { + Self(self.0.checked_add(rhs.0).expect("attempt to add with overflow")) + } + } + + impl AddAssign for $name { + #[inline] + fn add_assign(&mut self, rhs: Self) { + self.0 = self.0.checked_add(rhs.0).expect("attempt to add with overflow"); + } + } + + impl Sub for $name { + type Output = Self; + + fn sub(self, rhs: Self) -> Self::Output { + let val = self.0.checked_sub(rhs.0).expect("attempt to subtract with overflow"); + if val < $inner::MIN { + panic!("attempt to subtract with overflow"); + } + + Self(val) + } + } + + impl SubAssign for $name { + fn sub_assign(&mut self, rhs: Self) { + let val = self.0.checked_sub(rhs.0).expect("attempt to subtract with overflow"); + if val < $inner::MIN { + panic!("attempt to subtract with overflow"); + } + self.0 = val; + } + } + + $(impl_eq_and_ord!($name($inner < $upper) -> [$($ord_target),*]);)? + + $(impl_signed_eq_and_ord!($name($inner < $upper) -> [$($signed_ord_target),*]);)? + + $(impl_ops_for_ux_wrapper!($name($inner < $upper) -> [$($ops_target),*]);)? + + $(impl_try_from_for_ux_wrapper!($name($inner < $upper) -> [$($try_from_target),*]);)? + + $(impl_from_for_ux_wrapper!($name($inner < $upper) -> [$($from_target),*]);)? + + $(impl_into_for_ux_wrapper!($name($inner < $upper) -> [$($into_target),*]);)? + )* + }; } -impl<'a, 'b: 'a> AsRef<[u8]> for Key<'a, 'b> { - #[inline] - fn as_ref(&self) -> &[u8] { - match self { - Self::Occupied(key) | Self::Remove(key) => key, - Self::Vacant(key) | Self::RemoveVacant(key) => key.as_ref(), - Self::Pointer { arena, offset, len } | Self::RemovePointer { arena, offset, len } => unsafe { - arena.get_bytes(*offset as usize, *len as usize) - }, - } - } +ux_wrapper! { + [doc = "Height which is used to configure the maximum tower height of a skiplist, it is a 5-bit unsigned integer."] + Height(u5 < u8) { + min: 1, + default: 20, + ord: [u8, u16, u32, u64, usize], + signed_ord: [i8, i16, i32, i64, isize], + ops: [u8, u16, u32, u64, usize], + try_from: [u8, u16, u32, u64, usize], + into: [u8, u16, u32, u64, usize, u128], + }, + [doc = "KeySize which is used to represent a length of a key stored in the skiplist, it is a 27-bit unsigned integer."] + KeySize(u27 < u32) { + min: 0, + default: u16::MAX as u32, + ord: [u8, u16, u32, u64, usize], + signed_ord: [i8, i16, i32, i64, isize], + ops: [u8, u16, u32, u64, usize], + try_from: [u32, usize], + from: [u8, u16], + into: [u32, u64, usize], + }, } diff --git a/src/unsync.rs b/src/unsync.rs new file mode 100644 index 0000000..6993161 --- /dev/null +++ b/src/unsync.rs @@ -0,0 +1,419 @@ +pub use rarena_allocator::unsync::Arena; +use rarena_allocator::Allocator as _; + +use core::{ + cell::UnsafeCell, + ops::{Bound, RangeBounds}, +}; + +use super::{ + allocator::{Link as BaseLink, *}, + common::*, + *, +}; +use crate::VacantBuffer; + +use either::Either; + +/// Versioned header of the skipmap. +#[derive(Debug)] +#[repr(C)] +pub struct VersionedMeta { + /// The maximum MVCC version of the skiplist. + max_version: UnsafeCell, + /// The minimum MVCC version of the skiplist. + min_version: UnsafeCell, + len: UnsafeCell, + magic_version: u16, + /// Current height. 1 <= height <= 31. + height: UnsafeCell, + reserved_byte: u8, +} + +impl Header for VersionedMeta { + #[inline] + fn new(version: u16) -> Self { + Self { + max_version: UnsafeCell::new(0), + min_version: UnsafeCell::new(0), + magic_version: version, + height: UnsafeCell::new(1), + len: UnsafeCell::new(0), + reserved_byte: 0, + } + } + + #[inline] + fn magic_version(&self) -> u16 { + self.magic_version + } + + #[inline] + fn max_version(&self) -> u64 { + unsafe { *self.max_version.get() } + } + + #[inline] + fn min_version(&self) -> u64 { + unsafe { *self.min_version.get() } + } + + #[inline] + fn height(&self) -> u8 { + unsafe { *self.height.get() } + } + + #[inline] + fn len(&self) -> u32 { + unsafe { *self.len.get() } + } + + #[inline] + fn increase_len(&self) { + unsafe { + *self.len.get() += 1; + } + } + + fn update_max_version(&self, version: Version) { + unsafe { + let current = *self.max_version.get(); + if version > current { + *self.max_version.get() = version; + } + } + } + + fn update_min_version(&self, version: Version) { + unsafe { + let current = *self.min_version.get(); + if version < current { + *self.min_version.get() = version; + } + } + } + + #[inline] + fn compare_exchange_height_weak( + &self, + current: u8, + new: u8, + _: Ordering, + _: Ordering, + ) -> Result { + unsafe { + let height = self.height.get(); + assert_eq!( + current, *height, + "current height is not equal to the actual height in unsync version `VersionedMeta`" + ); + *height = new; + Ok(current) + } + } +} + +/// Header of the skipmap. +#[derive(Debug)] +#[repr(C)] +pub struct Meta { + len: UnsafeCell, + magic_version: u16, + /// Current height. 1 <= height <= 31. + height: UnsafeCell, + reserved_byte: u8, +} + +impl Header for Meta { + #[inline] + fn new(version: u16) -> Self { + Self { + magic_version: version, + height: UnsafeCell::new(1), + len: UnsafeCell::new(0), + reserved_byte: 0, + } + } + + #[inline] + fn magic_version(&self) -> u16 { + self.magic_version + } + + #[inline] + fn max_version(&self) -> u64 { + MIN_VERSION + } + + #[inline] + fn min_version(&self) -> u64 { + MIN_VERSION + } + + #[inline] + fn height(&self) -> u8 { + unsafe { *self.height.get() } + } + + #[inline] + fn len(&self) -> u32 { + unsafe { *self.len.get() } + } + + #[inline] + fn increase_len(&self) { + unsafe { + *self.len.get() += 1; + } + } + + fn update_max_version(&self, _: Version) {} + + fn update_min_version(&self, _: Version) {} + + #[inline] + fn compare_exchange_height_weak( + &self, + current: u8, + new: u8, + _: Ordering, + _: Ordering, + ) -> Result { + unsafe { + let height = self.height.get(); + assert_eq!( + current, *height, + "current height is not equal to the actual height in unsync version `Meta`" + ); + *height = new; + Ok(current) + } + } +} + +/// Atomic value pointer. +#[repr(C, align(8))] +pub struct UnsyncValuePointer(UnsafeCell); + +impl core::fmt::Debug for UnsyncValuePointer { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let (offset, len) = decode_value_pointer(unsafe { *self.0.get() }); + f.debug_struct("UnsyncValuePointer") + .field("offset", &offset) + .field("len", &len) + .finish() + } +} + +impl UnsyncValuePointer { + #[inline] + fn new(offset: u32, len: u32) -> Self { + Self(UnsafeCell::new(encode_value_pointer(offset, len))) + } + + #[inline] + fn compare_remove(&self, _: Ordering, _: Ordering) -> Result<(u32, u32), (u32, u32)> { + unsafe { + let ptr = self.0.get(); + let old = *ptr; + + let (offset, size) = decode_value_pointer(old); + *ptr = encode_value_pointer(offset, REMOVE); + + Ok((offset, size)) + } + } +} + +impl ValuePointer for UnsyncValuePointer { + const REMOVE: u32 = REMOVE; + + #[inline] + fn load(&self) -> (u32, u32) { + decode_value_pointer(unsafe { *self.0.get() }) + } + + #[inline] + fn swap(&self, offset: u32, len: u32) -> (u32, u32) { + let new = encode_value_pointer(offset, len); + unsafe { + let old = *self.0.get(); + *self.0.get() = new; + decode_value_pointer(old) + } + } +} + +/// Link to the previous and next node. +#[derive(Debug)] +#[repr(C)] +pub struct Link { + next_offset: UnsafeCell, + prev_offset: UnsafeCell, +} + +impl BaseLink for Link { + #[inline] + fn new(next_offset: u32, prev_offset: u32) -> Self { + Self { + next_offset: UnsafeCell::new(next_offset), + prev_offset: UnsafeCell::new(prev_offset), + } + } + + #[inline] + fn store_next_offset(&self, offset: u32, _: Ordering) { + unsafe { + *self.next_offset.get() = offset; + } + } + + #[inline] + fn store_prev_offset(&self, offset: u32, _: Ordering) { + unsafe { + *self.prev_offset.get() = offset; + } + } +} + +macro_rules! node_pointer { + ($node: ident $(<$t:ident>)?) => { + #[doc(hidden)] + #[derive(Debug)] + pub struct NodePointer $(<$t>)? { + offset: u32, + _m: core::marker::PhantomData<$node $(<$t>)?>, + } + + impl $(<$t>)? Clone for NodePointer $(<$t>)? { + fn clone(&self) -> Self { + *self + } + } + + impl $(<$t>)? Copy for NodePointer $(<$t>)? {} + + impl $(<$t: $crate::Trailer>)? $crate::allocator::NodePointer for NodePointer $(<$t>)? { + const NULL: Self = Self { + offset: 0, + _m: core::marker::PhantomData, + }; + + type Node = $node $(<$t>)?; + + #[inline] + fn is_null(&self) -> bool { + self.offset == 0 + } + + fn offset(&self) -> u32 { + self.offset + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn next_offset(&self, arena: &A, idx: usize) -> u32 { + unsafe { *self.tower(arena, idx).next_offset.get() } + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn prev_offset(&self, arena: &A, idx: usize) -> u32 { + unsafe { *self.tower(arena, idx).prev_offset.get() } + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn cas_prev_offset( + &self, + arena: &A, + idx: usize, + current: u32, + new: u32, + _: Ordering, + _: Ordering, + ) -> Result { + unsafe { + let tower = self.tower(arena, idx); + let ptr = tower.prev_offset.get(); + + let old = *ptr; + + assert_eq!(old, current, "current prev_offset is not equal to the actual prev_offset in unsync version `NodePointer`, it seems that you are using unsync version in concurrent environment"); + + *ptr = new; + Ok(old) + } + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the offset is less than the capacity of the arena and larger than 0. + unsafe fn cas_next_offset( + &self, + arena: &A, + idx: usize, + current: u32, + new: u32, + _: Ordering, + _: Ordering, + ) -> Result { + unsafe { + let tower = self.tower(arena, idx); + let ptr = tower.next_offset.get(); + + let old = *ptr; + + assert_eq!(old, current, "current next_offset is not equal to the actual next_offset in unsync version `NodePointer`, it seems that you are using unsync version in concurrent environment"); + + *ptr = new; + Ok(old) + } + } + + #[inline] + fn new(offset: u32) -> Self { + Self { + offset, + _m: core::marker::PhantomData, + } + } + + /// ## Safety + /// - the pointer must be valid + #[inline] + unsafe fn as_ref(&self, arena: &A) -> &Self::Node { + &*(arena.get_pointer(self.offset as usize) as *const Self::Node) + } + + /// ## Safety + /// - the pointer must be valid + #[inline] + unsafe fn as_mut(&self, arena: &A) -> &mut Self::Node { + &mut *(arena.get_pointer_mut(self.offset as usize) as *mut Self::Node) + } + } + }; +} + +/// A lock free ARENA based skiplist. See [`SkipList`](base::SkipList) for more information. +pub mod full; + +/// A skipmap implementation with version support. See [`SkipMap`](versioned::SkipMap) for more information. +pub mod versioned; + +/// A skipmap implementation with trailer support. See [`SkipMap`](trailed::SkipMap) for more information. +pub mod trailed; + +/// A skipmap implementation without trailer and version support. See [`SkipMap`](map::SkipMap) for more information. +pub mod map; + +#[cfg(test)] +mod tests; diff --git a/src/unsync/full.rs b/src/unsync/full.rs new file mode 100644 index 0000000..f04c704 --- /dev/null +++ b/src/unsync/full.rs @@ -0,0 +1,1506 @@ +use core::{borrow::Borrow, marker::PhantomData}; + +use among::Among; +use base::{AllVersionsIter, EntryRef, Iter, VersionedEntryRef}; + +use super::*; + +type Allocator = GenericAllocator, Arena>; +type SkipList = base::SkipList, C>; + +node_pointer!(FullNode); + +/// A raw node that supports both version and trailer. +#[repr(C)] +pub struct FullNode { + // A byte slice is 24 bytes. We are trying to save space here. + /// Multiple parts of the value are encoded as a single u64 so that it + /// can be atomically loaded and stored: + /// value offset: u32 (bits 0-31) + /// value size : u32 (bits 32-63) + value: UnsyncValuePointer, + // Immutable. No need to lock to access key. + key_offset: u32, + // Immutable. No need to lock to access key. + key_size_and_height: u32, + version: u64, + trailer: PhantomData, + // ** DO NOT REMOVE BELOW COMMENT** + // The below field will be attached after the node, have to comment out + // this field, because each node will not use the full height, the code will + // not allocate the full size of the tower. + // + // Most nodes do not need to use the full height of the tower, since the + // probability of each successive level decreases exponentially. Because + // these elements are never accessed, they do not need to be allocated. + // Therefore, when a node is allocated in the arena, its memory footprint + // is deliberately truncated to not include unneeded tower elements. + // + // All accesses to elements should use CAS operations, with no need to lock. + // pub(super) tower: [Link; self.opts.max_height], +} + +impl core::fmt::Debug for FullNode { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let (key_size, height) = decode_key_size_and_height(self.key_size_and_height); + let (value_offset, value_size) = self.value.load(); + f.debug_struct("Node") + .field("value_offset", &value_offset) + .field("value_size", &value_size) + .field("key_offset", &self.key_offset) + .field("key_size", &key_size) + .field("height", &height) + .finish() + } +} + +impl WithTrailer for FullNode {} +impl WithVersion for FullNode {} + +impl Node for FullNode { + type Link = Link; + + type Trailer = T; + + type ValuePointer = UnsyncValuePointer; + + type Pointer = NodePointer; + + fn full(value_offset: u32, max_height: u8) -> Self { + Self { + value: UnsyncValuePointer::new(value_offset, 0), + key_offset: 0, + key_size_and_height: encode_key_size_and_height(0, max_height), + version: MIN_VERSION, + trailer: PhantomData, + } + } + + #[inline] + fn value_pointer(&self) -> &Self::ValuePointer { + &self.value + } + + #[inline] + fn set_value_pointer(&mut self, offset: u32, size: u32) { + self.value = UnsyncValuePointer::new(offset, size); + } + + #[inline] + fn clear_value( + &self, + arena: &A, + success: Ordering, + failure: Ordering, + ) -> Result<(), (u32, u32)> { + self + .value + .compare_remove(success, failure) + .map(|(_, old_len)| { + if old_len != REMOVE { + arena.increase_discarded(old_len); + } + }) + } + + #[inline] + fn set_key_size_and_height(&mut self, key_size_and_height: u32) { + self.key_size_and_height = key_size_and_height; + } + + #[inline] + fn set_key_offset(&mut self, key_offset: u32) { + self.key_offset = key_offset; + } + + #[inline] + fn version(&self) -> Version { + self.version + } + + #[inline] + fn set_version(&mut self, version: Version) { + self.version = version; + } + + #[inline] + fn key_size_and_height(&self) -> u32 { + self.key_size_and_height + } + + #[inline] + fn key_offset(&self) -> u32 { + self.key_offset + } +} + +/// A fast, ARENA based `SkipMap` that supports trailed structure, multiple versions, forward and backward iteration. +/// +/// If you want to use in concurrent environment, you can use [`sync::full::SkipMap`]. +#[repr(transparent)] +pub struct SkipMap(pub(super) SkipList); + +impl Clone for SkipMap { + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl SkipMap { + /// Create a new skipmap with default options. + /// + /// **Note:** The capacity stands for how many memory allocated, + /// it does not mean the skiplist can store `cap` entries. + /// + /// + /// + /// **What the difference between this method and [`SkipMap::mmap_anon`]?** + /// + /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// 2. Where as [`SkipMap::mmap_anon`] will use mmap anonymous to require memory from the OS. + /// If you require very large contiguous memory regions, `mmap` might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// [`SkipMap::mmap_anon`]: #method.mmap_anon + pub fn new(opts: Options) -> Result { + Self::with_comparator(opts, Ascend) + } + + /// Create a new memory map file backed with default options. + /// + /// **Note:** The capacity stands for how many memory mmaped, + /// it does not mean the skipmap can store `cap` entries. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map_mut>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_mut_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Open an exist file and mmap it to create skipmap. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Create a new memory map backed skipmap with default options. + /// + /// **What the difference between this method and [`SkipMap::new`]?** + /// + /// 1. This method will use mmap anonymous to require memory from the OS directly. + /// If you require very large contiguous memory regions, this method might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// 2. Where as [`SkipMap::new`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// [`SkipMap::new`]: #method.new + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn map_anon(opts: Options, mmap_options: MmapOptions) -> std::io::Result { + Self::map_anon_with_comparator(opts, mmap_options, Ascend) + } +} + +impl SkipMap { + /// Returns the reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + #[inline] + pub fn reserved_slice(&self) -> &[u8] { + self.0.arena.reserved_slice() + } + + /// Returns the mutable reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + /// + /// # Safety + /// - The caller need to make sure there is no data-race + /// + /// # Panics + /// - If in read-only mode, it will panic. + #[inline] + #[allow(clippy::mut_from_ref)] + pub unsafe fn reserved_slice_mut(&self) -> &mut [u8] { + self.0.arena.reserved_slice_mut() + } + + /// Returns the path of the mmap file, only returns `Some` when the ARENA is backed by a mmap file. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn path(&self) -> Option<&std::rc::Rc> { + self.0.arena.path() + } + + /// Sets remove on drop, only works on mmap with a file backend. + /// + /// Default is `false`. + /// + /// > **WARNING:** Once set to `true`, the backed file will be removed when the allocator is dropped, even though the file is opened in + /// > read-only mode. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn remove_on_drop(&self, val: bool) { + self.0.remove_on_drop(val); + } + + /// Returns the offset of the data section in the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the offset of the data section in the ARENA. + #[inline] + pub fn data_offset(&self) -> usize { + self.0.data_offset() + } + + /// Returns the version number of the [`SkipMap`]. + #[inline] + pub fn version(&self) -> u16 { + self.0.version() + } + + /// Returns the magic version number of the [`SkipMap`]. + /// + /// This value can be used to check the compatibility for application using [`SkipMap`]. + #[inline] + pub fn magic_version(&self) -> u16 { + self.0.magic_version() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + pub fn height(&self) -> u8 { + self.0.height() + } + + /// Returns the number of remaining bytes can be allocated by the arena. + #[inline] + pub fn remaining(&self) -> usize { + self.0.remaining() + } + + /// Returns how many bytes are discarded by the ARENA. + #[inline] + pub fn discarded(&self) -> u32 { + self.0.discarded() + } + + /// Returns the number of bytes that have allocated from the arena. + #[inline] + pub fn allocated(&self) -> usize { + self.0.allocated() + } + + /// Returns the capacity of the arena. + #[inline] + pub fn capacity(&self) -> usize { + self.0.capacity() + } + + /// Returns the number of entries in the skipmap. + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns true if the skipmap is empty. + #[inline] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Gets the number of pointers to this `SkipMap` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). + #[inline] + pub fn refs(&self) -> usize { + self.0.refs() + } + + /// Returns the maximum version of all entries in the map. + #[inline] + pub fn max_version(&self) -> u64 { + self.0.max_version() + } + + /// Returns the minimum version of all entries in the map. + #[inline] + pub fn min_version(&self) -> u64 { + self.0.min_version() + } + + /// Returns the comparator used to compare keys. + #[inline] + pub fn comparator(&self) -> &C { + self.0.comparator() + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, Options, Ascend}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + pub fn random_height(&self) -> Height { + self.0.random_height() + } + + /// Returns the estimated size of a node with the given height and key/value sizes. + /// + /// **Note**: The returned size is only an estimate and may not be accurate, which means that the actual size is less than or equal to the returned size. + #[inline] + pub fn estimated_node_size(height: Height, key_size: usize, value_size: usize) -> usize { + SkipList::::estimated_node_size(height, key_size, value_size) + } + + /// Like [`SkipMap::new`], but with a custom [`Comparator`]. + #[inline] + pub fn with_comparator(opts: Options, cmp: C) -> Result { + SkipList::::with_comparator(opts, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::::map_mut_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with [`Options`], a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::::map_mut_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::::map_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::::map_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map_anon`], but with a custom [`Comparator`]. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn map_anon_with_comparator( + opts: Options, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::::map_anon_with_comparator(opts, mmap_options, cmp).map(Self) + } + + /// Clear the skiplist to empty and re-initialize. + /// + /// # Safety + /// - The current pointers get from the ARENA cannot be used anymore after calling this method. + /// - This method is not thread-safe. + /// + /// # Example + /// + /// Undefine behavior: + /// + /// ```ignore + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(1, b"hello", b"world").unwrap(); + /// + /// let data = map.get(b"hello").unwrap(); + /// + /// map.clear().unwrap(); + /// + /// let w = data[0]; // undefined behavior + /// ``` + pub unsafe fn clear(&mut self) -> Result<(), Error> { + self.0.clear() + } + + /// Flushes outstanding memory map modifications to disk. + /// + /// When this method returns with a non-error result, + /// all outstanding changes to a file-backed memory map are guaranteed to be durably stored. + /// The file's metadata (including last modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush(&self) -> std::io::Result<()> { + self.0.flush() + } + + /// Asynchronously flushes outstanding memory map modifications to disk. + /// + /// This method initiates flushing modified pages to durable storage, but it will not wait for + /// the operation to complete before returning. The file's metadata (including last + /// modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush_async(&self) -> std::io::Result<()> { + self.0.flush_async() + } +} + +impl SkipMap { + /// Returns `true` if the key exists in the map. + /// + /// This method will return `false` if the entry is marked as removed. If you want to check if the key exists even if it is marked as removed, + /// you can use [`contains_key_versioned`](SkipMap::contains_key_versioned). + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world", ()).unwrap(); + /// + /// map.get_or_remove(1, b"hello", ()).unwrap(); + /// + /// assert!(!map.contains_key(1, b"hello")); + /// assert!(map.contains_key_versioned(1, b"hello")); + /// ``` + #[inline] + pub fn contains_key<'a, 'b: 'a>(&'a self, version: Version, key: &'b [u8]) -> bool { + self.get(version, key).is_some() + } + + /// Returns `true` if the key exists in the map, even if it is marked as removed. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world", ()).unwrap(); + /// + /// map.get_or_remove(1, b"hello", ()).unwrap(); + /// + /// assert!(!map.contains_key(1, b"hello")); + /// assert!(map.contains_key_versioned(1, b"hello")); + /// ``` + #[inline] + pub fn contains_key_versioned<'a, 'b: 'a>(&'a self, version: Version, key: &'b [u8]) -> bool { + self.0.contains_key_versioned(version, key) + } + + /// Returns the first entry in the map. + pub fn first(&self, version: Version) -> Option>> { + self.iter(version).seek_lower_bound(Bound::Unbounded) + } + + /// Returns the last entry in the map. + pub fn last(&self, version: Version) -> Option>> { + self.iter(version).seek_upper_bound(Bound::Unbounded) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// This method will return `None` if the entry is marked as removed. If you want to get the entry even if it is marked as removed, + /// you can use [`get_versioned`](SkipMap::get_versioned). + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world", ()).unwrap(); + /// + /// let ent = map.get(0, b"hello").unwrap(); + /// assert_eq!(ent.value(), b"world"); + /// + /// map.get_or_remove(1, b"hello", ()).unwrap(); + /// + /// assert!(map.get(1, b"hello").is_none()); + /// ``` + pub fn get<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Option>> { + self.0.get(version, key) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// The difference between `get` and `get_versioned` is that `get_versioned` will return the value even if the entry is removed. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world", ()).unwrap(); + /// + /// map.get_or_remove(1, b"hello", ()).unwrap(); + /// + /// assert!(map.get(1, b"hello").is_none()); + /// + /// let ent = map.get_versioned(1, b"hello").unwrap(); + /// // value is None because the entry is marked as removed. + /// assert!(ent.value().is_none()); + /// ``` + pub fn get_versioned<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Option>> { + self.0.get_versioned(version, key) + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + pub fn upper_bound<'a, 'b: 'a>( + &'a self, + version: Version, + upper: Bound<&'b [u8]>, + ) -> Option>> { + self.iter(version).seek_upper_bound(upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + pub fn lower_bound<'a, 'b: 'a>( + &'a self, + version: Version, + lower: Bound<&'b [u8]>, + ) -> Option>> { + self.iter(version).seek_lower_bound(lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + pub fn iter(&self, version: Version) -> Iter, C> { + self.0.iter(version) + } + + /// Returns a new iterator, this iterator will yield all versions for all entries in the map less or equal to the given version. + #[inline] + pub fn iter_all_versions(&self, version: Version) -> AllVersionsIter, C> { + self.0.iter_all_versions(version) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + pub fn range<'a, Q, R>(&'a self, version: Version, range: R) -> Iter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range(version, range) + } + + /// Returns a iterator that within the range, this iterator will yield all versions for all entries in the range less or equal to the given version. + #[inline] + pub fn range_all_versions<'a, Q, R>( + &'a self, + version: Version, + range: R, + ) -> AllVersionsIter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range_all_versions(version, range) + } +} + +impl SkipMap { + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](SkipMap::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + pub fn insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self.insert_at_height(version, self.random_height(), key, value, trailer) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](SkipMap::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, Options}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// + /// let height = map.random_height(); + /// map.insert_at_height(0, height, b"hello", b"world", 10).unwrap(); + /// ``` + pub fn insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .insert_at_height(version, height, key, value, trailer) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_value_builder::(1, b"alice", vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height_with_value_builder`](SkipMap::get_or_insert_at_height_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_value_builder::(1, height, b"alice", vb, 10) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self + .0 + .insert_at_height_with_value_builder(version, height, key, value_builder, trailer) + } + + /// Inserts a new key-value pair if it does not yet exist. + /// + /// Unlike [`insert`](SkipMap::insert), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + pub fn get_or_insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self.get_or_insert_at_height(version, self.random_height(), key, value, trailer) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](SkipMap::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + pub fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .get_or_insert_at_height(version, height, key, value, trailer) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_value_builder`](SkipMap::insert_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// l.get_or_insert_with_value_builder::(1, b"alice", vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.get_or_insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + trailer, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](SkipMap::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_value_builder::(1, height, b"alice", vb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self + .0 + .get_or_insert_at_height_with_value_builder(version, height, key, value_builder, trailer) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_builders::<(), ()>(1, kb, vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_builders::<(), ()>(1, height, kb, vb, 10) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self + .0 + .insert_at_height_with_builders(version, height, key_builder, value_builder, trailer) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_builders`](SkipMap::insert_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.get_or_insert_with_builders::<(), ()>(1, kb, vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.get_or_insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + trailer, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](SkipMap::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_builders::<(), ()>(1, height, kb, vb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.0.get_or_insert_at_height_with_builders( + version, + height, + key_builder, + value_builder, + trailer, + ) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + #[inline] + pub fn remove<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self.remove_at_height( + version, + self.random_height(), + key, + trailer, + Ordering::Relaxed, + Ordering::Relaxed, + ) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + pub fn remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + trailer: T, + success: Ordering, + failure: Ordering, + ) -> Result>>, Error> { + self + .0 + .compare_remove_at_height(version, height, key, trailer, success, failure) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`remove`](SkipMap::remove), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + #[inline] + pub fn get_or_remove<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self.get_or_remove_at_height(version, self.random_height(), key, trailer) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`remove_at_height`](SkipMap::remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, Options}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world", 10).unwrap(); + /// + /// let height = map.random_height(); + /// map.get_or_remove_at_height(0, height, b"hello", 10).unwrap(); + /// ``` + pub fn get_or_remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .get_or_remove_at_height(version, height, key, trailer) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove`](SkipMap::compare_remove), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, KeyBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// l.get_or_remove_with_builder::(1, kb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_remove_with_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.get_or_remove_at_height_with_builder(version, self.random_height(), key_builder, trailer) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height`](SkipMap::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::full::SkipMap, KeyBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// let height = l.random_height(); + /// l.get_or_remove_at_height_with_builder::(1, height, kb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_remove_at_height_with_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self + .0 + .get_or_remove_at_height_with_builder(version, height, key_builder, trailer) + } +} diff --git a/src/unsync/map.rs b/src/unsync/map.rs new file mode 100644 index 0000000..4a2b9a8 --- /dev/null +++ b/src/unsync/map.rs @@ -0,0 +1,1205 @@ +use core::borrow::Borrow; + +use super::*; + +use among::Among; +use base::{EntryRef, Iter}; + +type Allocator = GenericAllocator; +type SkipList = base::SkipList; + +node_pointer!(RawNode); + +/// A raw node that does not support version and trailer. +#[repr(C)] +pub struct RawNode { + // A byte slice is 24 bytes. We are trying to save space here. + /// Multiple parts of the value are encoded as a single u64 so that it + /// can be atomically loaded and stored: + /// value offset: u32 (bits 0-31) + /// value size : u32 (bits 32-63) + value: UnsyncValuePointer, + // Immutable. No need to lock to access key. + key_offset: u32, + // Immutable. No need to lock to access key. + key_size_and_height: u32, + // ** DO NOT REMOVE BELOW COMMENT** + // The below field will be attached after the node, have to comment out + // this field, because each node will not use the full height, the code will + // not allocate the full size of the tower. + // + // Most nodes do not need to use the full height of the tower, since the + // probability of each successive level decreases exponentially. Because + // these elements are never accessed, they do not need to be allocated. + // Therefore, when a node is allocated in the arena, its memory footprint + // is deliberately truncated to not include unneeded tower elements. + // + // All accesses to elements should use CAS operations, with no need to lock. + // pub(super) tower: [Link; self.opts.max_height], +} + +impl core::fmt::Debug for RawNode { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let (key_size, height) = decode_key_size_and_height(self.key_size_and_height); + let (value_offset, value_size) = self.value.load(); + f.debug_struct("Node") + .field("value_offset", &value_offset) + .field("value_size", &value_size) + .field("key_offset", &self.key_offset) + .field("key_size", &key_size) + .field("height", &height) + .finish() + } +} + +impl Node for RawNode { + type Link = Link; + + type Trailer = (); + + type ValuePointer = UnsyncValuePointer; + + type Pointer = NodePointer; + + fn full(value_offset: u32, max_height: u8) -> Self { + Self { + value: UnsyncValuePointer::new(value_offset, 0), + key_offset: 0, + key_size_and_height: encode_key_size_and_height(0, max_height), + } + } + + #[inline] + fn value_pointer(&self) -> &Self::ValuePointer { + &self.value + } + + #[inline] + fn set_value_pointer(&mut self, offset: u32, size: u32) { + self.value = UnsyncValuePointer::new(offset, size); + } + + #[inline] + fn clear_value( + &self, + arena: &A, + success: Ordering, + failure: Ordering, + ) -> Result<(), (u32, u32)> { + self + .value + .compare_remove(success, failure) + .map(|(_, old_len)| { + if old_len != REMOVE { + arena.increase_discarded(old_len); + } + }) + } + + #[inline] + fn set_key_size_and_height(&mut self, key_size_and_height: u32) { + self.key_size_and_height = key_size_and_height; + } + + #[inline] + fn set_key_offset(&mut self, key_offset: u32) { + self.key_offset = key_offset; + } + + #[inline] + fn version(&self) -> Version { + 0 + } + + #[inline] + fn set_version(&mut self, _: Version) {} + + #[inline] + fn key_size_and_height(&self) -> u32 { + self.key_size_and_height + } + + #[inline] + fn key_offset(&self) -> u32 { + self.key_offset + } +} + +/// A fast, ARENA based `SkipMap` that supports forward and backward iteration. +/// +/// If you want to use in concurrent environment, you can use [`sync::map::SkipMap`]. +#[repr(transparent)] +pub struct SkipMap(SkipList); + +impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl SkipMap { + /// Create a new skipmap with default options. + /// + /// **Note:** The capacity stands for how many memory allocated, + /// it does not mean the skiplist can store `cap` entries. + /// + /// + /// + /// **What the difference between this method and [`SkipMap::mmap_anon`]?** + /// + /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// 2. Where as [`SkipMap::mmap_anon`] will use mmap anonymous to require memory from the OS. + /// If you require very large contiguous memory regions, `mmap` might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// [`SkipMap::mmap_anon`]: #method.mmap_anon + pub fn new(opts: Options) -> Result { + Self::with_comparator(opts, Ascend) + } + + /// Create a new memory map file backed with default options. + /// + /// **Note:** The capacity stands for how many memory mmaped, + /// it does not mean the skipmap can store `cap` entries. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map_mut>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_mut_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Open an exist file and mmap it to create skipmap. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Create a new memory map backed skipmap with default options. + /// + /// **What the difference between this method and [`SkipMap::new`]?** + /// + /// 1. This method will use mmap anonymous to require memory from the OS directly. + /// If you require very large contiguous memory regions, this method might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// 2. Where as [`SkipMap::new`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// [`SkipMap::new`]: #method.new + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn map_anon(opts: Options, mmap_options: MmapOptions) -> std::io::Result { + Self::map_anon_with_comparator(opts, mmap_options, Ascend) + } +} + +impl SkipMap { + /// Returns the reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + #[inline] + pub fn reserved_slice(&self) -> &[u8] { + self.0.arena.reserved_slice() + } + + /// Returns the mutable reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + /// + /// # Safety + /// - The caller need to make sure there is no data-race + /// + /// # Panics + /// - If in read-only mode, it will panic. + #[inline] + #[allow(clippy::mut_from_ref)] + pub unsafe fn reserved_slice_mut(&self) -> &mut [u8] { + self.0.arena.reserved_slice_mut() + } + + /// Returns the path of the mmap file, only returns `Some` when the ARENA is backed by a mmap file. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn path(&self) -> Option<&std::rc::Rc> { + self.0.arena.path() + } + + /// Sets remove on drop, only works on mmap with a file backend. + /// + /// Default is `false`. + /// + /// > **WARNING:** Once set to `true`, the backed file will be removed when the allocator is dropped, even though the file is opened in + /// > read-only mode. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn remove_on_drop(&self, val: bool) { + self.0.remove_on_drop(val); + } + + /// Returns the offset of the data section in the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the offset of the data section in the ARENA. + #[inline] + pub const fn data_offset(&self) -> usize { + self.0.data_offset() + } + + /// Returns the magic version number of the [`SkipMap`]. + /// + /// This value can be used to check the compatibility for application using [`SkipMap`]. + #[inline] + pub fn magic_version(&self) -> u16 { + self.0.magic_version() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + pub fn height(&self) -> u8 { + self.0.height() + } + + /// Returns the number of remaining bytes can be allocated by the arena. + #[inline] + pub fn remaining(&self) -> usize { + self.0.remaining() + } + + /// Returns the number of bytes that have allocated from the arena. + #[inline] + pub fn allocated(&self) -> usize { + self.0.allocated() + } + + /// Returns the capacity of the arena. + #[inline] + pub fn capacity(&self) -> usize { + self.0.capacity() + } + + /// Returns the number of entries in the skipmap. + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns true if the skipmap is empty. + #[inline] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Gets the number of pointers to this `SkipMap` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). + #[inline] + pub fn refs(&self) -> usize { + self.0.refs() + } + + /// Returns how many bytes are discarded by the ARENA. + #[inline] + pub fn discarded(&self) -> u32 { + self.0.discarded() + } + + /// Returns the comparator used to compare keys. + #[inline] + pub const fn comparator(&self) -> &C { + self.0.comparator() + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, Options, Ascend}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + pub fn random_height(&self) -> Height { + self.0.random_height() + } + + /// Returns the estimated size of a node with the given height and key/value sizes. + /// + /// **Note**: The returned size is only an estimate and may not be accurate, which means that the actual size is less than or equal to the returned size. + #[inline] + pub fn estimated_node_size(height: Height, key_size: usize, value_size: usize) -> usize { + SkipList::::estimated_node_size(height, key_size, value_size) + } + + /// Like [`SkipMap::new`], but with a custom [`Comparator`]. + #[inline] + pub fn with_comparator(opts: Options, cmp: C) -> Result { + SkipList::with_comparator(opts, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_mut_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with [`Options`], a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::map_mut_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::map_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map_anon`], but with a custom [`Comparator`]. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn map_anon_with_comparator( + opts: Options, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_anon_with_comparator(opts, mmap_options, cmp).map(Self) + } + + /// Clear the skiplist to empty and re-initialize. + /// + /// # Safety + /// - The current pointers get from the ARENA cannot be used anymore after calling this method. + /// - This method is not thread-safe. + /// + /// # Example + /// + /// Undefine behavior: + /// + /// ```ignore + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(1u8, b"hello", b"world").unwrap(); + /// + /// let data = map.get(b"hello").unwrap(); + /// + /// map.clear().unwrap(); + /// + /// let w = data[0]; // undefined behavior + /// ``` + pub unsafe fn clear(&mut self) -> Result<(), Error> { + self.0.clear() + } + + /// Flushes outstanding memory map modifications to disk. + /// + /// When this method returns with a non-error result, + /// all outstanding changes to a file-backed memory map are guaranteed to be durably stored. + /// The file's metadata (including last modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush(&self) -> std::io::Result<()> { + self.0.flush() + } + + /// Asynchronously flushes outstanding memory map modifications to disk. + /// + /// This method initiates flushing modified pages to durable storage, but it will not wait for + /// the operation to complete before returning. The file's metadata (including last + /// modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush_async(&self) -> std::io::Result<()> { + self.0.flush_async() + } +} + +impl SkipMap { + /// Returns `true` if the key exists in the map. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world").unwrap(); + /// + /// map.remove(b"hello").unwrap(); + /// + /// assert!(!map.contains_key(b"hello")); + /// ``` + #[inline] + pub fn contains_key<'a, 'b: 'a>(&'a self, key: &'b [u8]) -> bool { + self.0.contains_key(MIN_VERSION, key) + } + + /// Returns the first entry in the map. + pub fn first(&self) -> Option> { + self.0.first(MIN_VERSION) + } + + /// Returns the last entry in the map. + pub fn last(&self) -> Option> { + self.0.last(MIN_VERSION) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world").unwrap(); + /// + /// let ent = map.get(b"hello").unwrap(); + /// assert_eq!(ent.value(), b"world"); + /// + /// map.remove(b"hello").unwrap(); + /// + /// assert!(map.get(b"hello").is_none()); + /// ``` + pub fn get<'a, 'b: 'a>(&'a self, key: &'b [u8]) -> Option> { + self.0.get(MIN_VERSION, key) + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + pub fn upper_bound<'a, 'b: 'a>( + &'a self, + upper: Bound<&'b [u8]>, + ) -> Option> { + self.0.upper_bound(MIN_VERSION, upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + pub fn lower_bound<'a, 'b: 'a>( + &'a self, + lower: Bound<&'b [u8]>, + ) -> Option> { + self.0.lower_bound(MIN_VERSION, lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + pub fn iter(&self) -> Iter { + self.0.iter(MIN_VERSION) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + pub fn range<'a, Q, R>(&'a self, range: R) -> Iter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range(MIN_VERSION, range) + } +} + +impl SkipMap { + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](SkipMap::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + pub fn insert<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.0.insert(MIN_VERSION, key, value, ()) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](SkipMap::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// let height = map.random_height(); + /// map.insert_at_height(height, b"hello", b"world").unwrap(); + /// ``` + #[inline] + pub fn insert_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.0.insert_at_height(MIN_VERSION, height, key, value, ()) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_value_builder::(b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self.0.insert_at_height_with_value_builder( + MIN_VERSION, + self.random_height(), + key, + value_builder, + (), + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_value_builder::(height, b"alice", vb) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .insert_at_height_with_value_builder(MIN_VERSION, height, key, value_builder, ()) + } + + /// Inserts a new key-value pair if it does not yet exist. + /// + /// Unlike [`insert`](SkipMap::insert), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + pub fn get_or_insert<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .0 + .get_or_insert_at_height(MIN_VERSION, self.random_height(), key, value, ()) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](SkipMap::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + pub fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .0 + .get_or_insert_at_height(MIN_VERSION, height, key, value, ()) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_value_builder`](SkipMap::insert_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// l.get_or_insert_with_value_builder::(b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self.get_or_insert_at_height_with_value_builder(self.random_height(), key, value_builder) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](SkipMap::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_value_builder::(height, b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .get_or_insert_at_height_with_value_builder(MIN_VERSION, height, key, value_builder, ()) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_builders::<(), ()>(kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_builders<'a, KE, VE>( + &'a self, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self.0.insert_at_height_with_builders( + MIN_VERSION, + self.random_height(), + key_builder, + value_builder, + (), + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders_and_trailer`](SkipMap::get_or_insert_with_builders_and_trailer), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_builders::<(), ()>(height, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_at_height_with_builders<'a, KE, VE>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self + .0 + .insert_at_height_with_builders(MIN_VERSION, height, key_builder, value_builder, ()) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_builders`](SkipMap::insert_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.get_or_insert_with_builders::<(), ()>(kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_builders<'a, KE, VE>( + &'a self, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self.0.get_or_insert_at_height_with_builders( + MIN_VERSION, + self.random_height(), + key_builder, + value_builder, + (), + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](SkipMap::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::map::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_builders::<(), ()>(height, kb, vb) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self.0.get_or_insert_at_height_with_builders( + MIN_VERSION, + height, + key_builder, + value_builder, + (), + ) + } + + /// Removes the key-value pair if it exists. + #[inline] + pub fn remove<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + ) -> Result>, Error> { + self.remove_at_height(self.random_height(), key) + } + + /// Removes the key-value pair if it exists. + pub fn remove_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + ) -> Result>, Error> { + self.0.compare_remove_at_height( + MIN_VERSION, + height, + key, + (), + Ordering::Relaxed, + Ordering::Relaxed, + ) + } +} diff --git a/src/unsync/tests.rs b/src/unsync/tests.rs new file mode 100644 index 0000000..6f09bfe --- /dev/null +++ b/src/unsync/tests.rs @@ -0,0 +1,87 @@ +#![allow(warnings)] + +use super::*; +use crate::Descend; + +use std::format; + +use std::sync::Arc; + +use rarena_allocator::Freelist; +#[cfg(feature = "std")] +use wg::WaitGroup; + +const ARENA_SIZE: usize = 1 << 20; +#[cfg(feature = "std")] +const BIG_ARENA_SIZE: usize = 120 << 20; +const TEST_OPTIONS: Options = Options::new().with_capacity(ARENA_SIZE as u32); +const UNIFY_TEST_OPTIONS: Options = Options::new() + .with_capacity(ARENA_SIZE as u32) + .with_unify(true); +#[cfg(feature = "std")] +const BIG_TEST_OPTIONS: Options = Options::new().with_capacity(BIG_ARENA_SIZE as u32); +#[cfg(feature = "std")] +const UNIFY_BIG_TEST_OPTIONS: Options = Options::new() + .with_capacity(BIG_ARENA_SIZE as u32) + .with_unify(true); + +fn run(f: impl Fn() + Send + Sync + 'static) { + f(); +} + +/// Only used for testing + +pub fn key(i: usize) -> std::vec::Vec { + format!("{:05}", i).into_bytes() +} + +/// Only used for testing +#[cfg(feature = "std")] +pub fn big_value(i: usize) -> std::vec::Vec { + format!("{:01048576}", i).into_bytes() +} + +/// Only used for testing +pub fn new_value(i: usize) -> std::vec::Vec { + format!("{:05}", i).into_bytes() +} + +fn make_int_key(i: usize) -> std::vec::Vec { + format!("{:05}", i).into_bytes() +} + +fn make_value(i: usize) -> std::vec::Vec { + format!("v{:05}", i).into_bytes() +} + +#[test] +fn test_encode_decode_key_size() { + // Test cases + let test_cases = [ + (0, 0), // Minimum values + (1, 1), // Small values + (0x1FFFFFF, 0), // Maximum key_size, minimum height + (0, 0b11111), // Minimum key_size, maximum height + (0x1FFFFFF, 0b11111), // Maximum values + (0x1FFFFFF - 1, 0b11111 - 1), // One less than maximum values + (12345678, 31), // Random values + (0, 1), // Edge case: Minimum key_size, small height + (1, 0), // Edge case: Small key_size, minimum height + ]; + + for &(key_size, height) in &test_cases { + let encoded = encode_key_size_and_height(key_size, height); + let (decoded_key_size, decoded_height) = decode_key_size_and_height(encoded); + + assert_eq!(key_size, decoded_key_size); + assert_eq!(height, decoded_height); + } +} + +mod full; + +mod map; + +mod trailed; + +mod versioned; diff --git a/src/unsync/tests/full.rs b/src/unsync/tests/full.rs new file mode 100644 index 0000000..885faed --- /dev/null +++ b/src/unsync/tests/full.rs @@ -0,0 +1,2551 @@ +use super::*; + +type SkipList = crate::unsync::full::SkipMap; + +type SkipMap = crate::unsync::full::SkipMap<(), Ascend>; + +fn empty_in(l: SkipMap) { + let mut it = l.iter_all_versions(MIN_VERSION); + + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + assert!(it.seek_lower_bound(Bound::Included(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_lower_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Included(b"aaa")).is_none()); + assert!(l.first(MIN_VERSION).is_none()); + assert!(l.last(MIN_VERSION).is_none()); + assert!(l.0.ge(MIN_VERSION, b"aaa", false).is_none()); + assert!(l.0.lt(MIN_VERSION, b"aaa", false).is_none()); + assert!(l.0.gt(MIN_VERSION, b"aaa", false).is_none()); + assert!(l.0.le(MIN_VERSION, b"aaa", false).is_none()); + assert!(l.get(MIN_VERSION, b"aaa").is_none()); + assert!(!l.contains_key(MIN_VERSION, b"aaa")); + assert!(l.allocated() > 0); + assert!(l.capacity() > 0); + assert_eq!(l.remaining(), l.capacity() - l.allocated()); +} + +#[test] +fn test_empty() { + run(|| empty_in(SkipList::new(Options::new()).unwrap())); +} + +#[test] +fn test_empty_unify() { + run(|| empty_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_empty_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_empty_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(1000)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + + let x = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + empty_in(x); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn full_in(l: impl FnOnce(usize) -> SkipMap) { + let l = l(1000); + let mut found_arena_full = false; + + for i in 0..100 { + if let Err(e) = l.get_or_insert(0, &make_int_key(i), &make_value(i), ()) { + assert!(matches!( + e, + Error::Arena(ArenaError::InsufficientSpace { .. }) + )); + found_arena_full = true; + break; + } + } + + assert!(found_arena_full); +} + +#[test] +fn test_full() { + run(|| { + full_in(|n| { + SkipList::new( + Options::new() + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +fn test_full_unify() { + run(|| { + full_in(|n| { + SkipList::new( + UNIFY_TEST_OPTIONS + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_full_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_full_map_mut"); + + full_in(|n| { + let open_options = OpenOptions::default() + .create_new(Some(n as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + SkipList::map_mut( + p, + Options::new().with_freelist(Freelist::None), + open_options, + map_options, + ) + .unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon_unify() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +fn basic_in(mut l: SkipMap) { + // Try adding values. + l.get_or_insert(0, b"key1", &make_value(1), ()).unwrap(); + l.get_or_insert(0, b"key3", &make_value(3), ()).unwrap(); + l.get_or_insert(0, b"key2", &make_value(2), ()).unwrap(); + assert_eq!(l.comparator(), &Ascend); + + { + let mut it = l.iter_all_versions(0); + let ent = it.seek_lower_bound(Bound::Included(b"key1")).unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value().unwrap(), &make_value(1)); + assert_eq!(ent.version(), 0); + + let ent = it.seek_lower_bound(Bound::Included(b"key2")).unwrap(); + assert_eq!(ent.key(), b"key2"); + assert_eq!(ent.value().unwrap(), &make_value(2)); + assert_eq!(ent.version(), 0); + + let ent = it.seek_lower_bound(Bound::Included(b"key3")).unwrap(); + assert_eq!(ent.key(), b"key3"); + assert_eq!(ent.value().unwrap(), &make_value(3)); + assert_eq!(ent.version(), 0); + } + + l.get_or_insert(1, "a".as_bytes(), &[], ()).unwrap(); + l.get_or_insert(2, "a".as_bytes(), &[], ()).unwrap(); + + { + let mut it = l.iter_all_versions(2); + let ent = it.seek_lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 2); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + } + + l.get_or_insert(2, "b".as_bytes(), &[], ()).unwrap(); + l.get_or_insert(1, "b".as_bytes(), &[], ()).unwrap(); + + { + let mut it = l.iter_all_versions(2); + let ent = it.seek_lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 2); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + + let ent = it.entry().unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + } + + l.get_or_insert(2, b"b", &[], ()).unwrap().unwrap(); + + assert!(l.get_or_insert(2, b"c", &[], ()).unwrap().is_none()); + + unsafe { + l.clear().unwrap(); + } + + let l = l.clone(); + { + let mut it = l.iter_all_versions(0); + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + } + assert!(l.is_empty()); + + #[cfg(feature = "memmap")] + l.flush().unwrap(); + + #[cfg(feature = "memmap")] + l.flush_async().unwrap(); +} + +#[test] +fn test_basic() { + run(|| basic_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_basic_unify() { + run(|| basic_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_basic_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + basic_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_mvcc(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + + let mut it = l.iter_all_versions(0); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 0); + + let mut it = l.iter_all_versions(1); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut it = l.iter_all_versions(2); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut it = l.iter_all_versions(3); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 4); + + let mut it = l.iter_all_versions(0); + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + + let mut it = l.iter_all_versions(1); + let ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c1"); + assert_eq!(ent.version(), 1); + + let mut it = l.iter_all_versions(2); + let ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c1"); + assert_eq!(ent.version(), 1); + + let mut it = l.iter_all_versions(3); + + let ent = it.seek_upper_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = it.seek_upper_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = it.seek_lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = it.seek_lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c2"); + assert_eq!(ent.version(), 3); +} + +#[test] +fn test_iter_all_versions_mvcc() { + run(|| iter_all_versions_mvcc(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_mvcc_unify() { + run(|| iter_all_versions_mvcc(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_mvcc_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_mvcc_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_mvcc( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }); +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_mvcc_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_mvcc(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_mvcc_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_mvcc(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ordering() { + let l = SkipList::with_comparator(TEST_OPTIONS, Descend).unwrap(); + + l.get_or_insert(1, b"a1", b"a1", ()).unwrap(); + l.get_or_insert(2, b"a2", b"a2", ()).unwrap(); + l.get_or_insert(3, b"a3", b"a3", ()).unwrap(); + + let mut it = l.iter_all_versions(3); + for i in (1..=3).rev() { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), format!("a{i}").as_bytes()); + assert_eq!(ent.value().unwrap(), format!("a{i}").as_bytes()); + } +} + +#[test] +fn test_ordering() { + run(ordering); +} + +fn get_mvcc(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + + let ent = l.get(1, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(2, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(3, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.get(4, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + assert!(l.get(0, b"b").is_none()); + assert!(l.get(1, b"b").is_none()); + assert!(l.get(2, b"b").is_none()); + assert!(l.get(3, b"b").is_none()); + assert!(l.get(4, b"b").is_none()); + + let ent = l.get(1, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(2, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(3, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.get(4, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + assert!(l.get(5, b"d").is_none()); +} + +#[test] +fn test_get_mvcc() { + run(|| get_mvcc(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_get_mvcc_unify() { + run(|| get_mvcc(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_mvcc_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_mvcc_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_mvcc(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_mvcc_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_mvcc(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_mvcc_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_mvcc(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn gt_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + l.get_or_insert(5, b"c", b"c3", ()).unwrap(); + + assert!(l.lower_bound(0, Bound::Excluded(b"a")).is_none()); + assert!(l.lower_bound(0, Bound::Excluded(b"b")).is_none()); + assert!(l.lower_bound(0, Bound::Excluded(b"c")).is_none()); + + let ent = l.lower_bound(1, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(5, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c3"); + assert_eq!(ent.version(), 5); + + let ent = l.lower_bound(6, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c3"); + assert_eq!(ent.version(), 5); + + assert!(l.lower_bound(1, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(2, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(3, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(4, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(5, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(6, Bound::Excluded(b"c")).is_none()); +} + +#[test] +fn test_gt() { + run(|| gt_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_gt_unify() { + run(|| gt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_gt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_gt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ge_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"a")).is_none()); + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"b")).is_none()); + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"c")).is_none()); + + let ent = l.lower_bound(1, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(1, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(2, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(3, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(4, Bound::Included(b"d")).is_none()); +} + +#[test] +fn test_ge() { + run(|| ge_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_ge_unify() { + run(|| ge_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + ge_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn le_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"a")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"b")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"c")).is_none()); + + let ent = l.upper_bound(1, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); +} + +#[test] +fn test_le() { + run(|| le_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_le_unify() { + run(|| le_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_le_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_le_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn lt_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1", ()).unwrap(); + l.get_or_insert(3, b"a", b"a2", ()).unwrap(); + l.get_or_insert(1, b"c", b"c1", ()).unwrap(); + l.get_or_insert(3, b"c", b"c2", ()).unwrap(); + + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"b")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"c")).is_none()); + assert!(l.upper_bound(1, Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(2, Bound::Excluded(b"a")).is_none()); + + let ent = l.upper_bound(1, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); +} + +#[test] +fn test_lt() { + run(|| lt_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_lt_unify() { + run(|| lt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_lt_map_mut() { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + lt_in(unsafe { SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap() }); +} + +#[test] +#[cfg(feature = "memmap")] + +fn test_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn test_basic_large_testcases_in(l: SkipMap) { + let n = 1000; + + for i in 0..n { + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i), ()) + .unwrap(); + } + + for i in 0..n { + let k = key(i); + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), 0); + assert_eq!(ent.key(), k); + } + + assert_eq!(n, l.len()); +} + +#[test] +fn test_basic_large_testcases() { + run(|| { + let l = SkipList::new(TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +fn test_basic_large_testcases_unify() { + run(|| { + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_large_testcases_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_basic_large_testcases_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(Options::new(), map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +fn iter_all_versions_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let mut ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + for i in 0..N { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value().unwrap(), make_value(i)); + if i != N - 1 { + ent = it.next().unwrap(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_iter_all_versions_next() { + run(|| iter_all_versions_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_next_unify() { + run(|| iter_all_versions_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + let upper = make_int_key(50); + let mut it = l.range(MIN_VERSION, ..=upper.as_slice()); + let mut ent = it.seek_lower_bound(Bound::Unbounded); + for i in 0..N { + if i <= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next(); + } else { + assert!(ent.is_none()); + ent = it.next(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_range_next() { + run(|| range_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_next_unify() { + run(|| range_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let mut ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + for i in (0..N).rev() { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value().unwrap(), make_value(i)); + if i != 0 { + ent = it.next_back().unwrap(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_iter_all_versions_next_back() { + run(|| iter_all_versions_prev(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_prev( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + let lower = make_int_key(50); + let mut it = l.range(MIN_VERSION, lower.as_slice()..); + let mut ent = it.seek_upper_bound(Bound::Unbounded); + for i in (0..N).rev() { + if i >= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next_back(); + } else { + assert!(ent.is_none()); + ent = it.next_back(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range_prev() { + run(|| range_prev(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_prev_unify() { + run(|| range_prev(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_prev(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_ge(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(MIN_VERSION, &make_int_key(v), &make_value(v), ()) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value().unwrap(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01000")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value().unwrap(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01005")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value().unwrap(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01010")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value().unwrap(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01020")).unwrap(); + assert_eq!(ent.key(), make_int_key(1020)); + assert_eq!(ent.value().unwrap(), make_value(1020)); + + let ent = it.seek_lower_bound(Bound::Included(b"01200")).unwrap(); + assert_eq!(ent.key(), make_int_key(1200)); + assert_eq!(ent.value().unwrap(), make_value(1200)); + + let ent = it.seek_lower_bound(Bound::Included(b"01100")).unwrap(); + assert_eq!(ent.key(), make_int_key(1100)); + assert_eq!(ent.value().unwrap(), make_value(1100)); + + let ent = it.seek_lower_bound(Bound::Included(b"99999")); + assert!(ent.is_none()); + + l.get_or_insert(MIN_VERSION, &[], &[], ()).unwrap(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); + + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_ge() { + run(|| iter_all_versions_seek_ge(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_seek_ge_unify() { + run(|| iter_all_versions_seek_ge(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_ge( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_lt(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(MIN_VERSION, &make_int_key(v), &make_value(v), ()) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + assert!(it.seek_upper_bound(Bound::Excluded(b"")).is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01000")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01001")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value().unwrap(), make_value(1000)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01991")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value().unwrap(), make_value(1990)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"99999")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value().unwrap(), make_value(1990)); + + l.get_or_insert(MIN_VERSION, &[], &[], ()).unwrap(); + assert!(l.0.lt(MIN_VERSION, &[], false).is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"\x01")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_lt() { + run(|| iter_all_versions_seek_lt(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_all_versions_seek_lt_unify() { + run(|| iter_all_versions_seek_lt(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_lt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_lt( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range(l: SkipMap) { + for i in 1..10 { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + let k3 = make_int_key(3); + let k7 = make_int_key(7); + let mut it = l.range(MIN_VERSION, k3.as_slice()..k7.as_slice()).clone(); + assert_eq!(it.bounds(), &(k3.as_slice()..k7.as_slice())); + + for i in 3..=6 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 7..10 { + let k = make_int_key(i); + assert!(it.seek_lower_bound(Bound::Included(&k)).is_none()); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + let ent = it + .seek_lower_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + let ent = it + .seek_upper_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + for i in 4..=7 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i - 1)); + assert_eq!(ent.value(), make_value(i - 1)); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 1..4 { + let k = make_int_key(i); + assert!(it.seek_upper_bound(Bound::Excluded(&k)).is_none()); + } + + let ent = it + .seek_upper_bound(Bound::Excluded(&make_int_key(4))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range() { + run(|| range(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_unify() { + run(|| range(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + for i in 50..N { + l.get_or_insert(1, &make_int_key(i), &make_value(i + 1000), ()) + .unwrap(); + } + + for i in 0..50 { + l.get_or_insert(2, &make_int_key(i), &make_value(i + 1000), ()) + .unwrap(); + } + + let mut it = l.iter(4); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_iter_latest() { + run(|| iter_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_latest_unify() { + run(|| iter_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_iter_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i), ()) + .unwrap(); + } + + for i in 50..N { + l.get_or_insert(1, &make_int_key(i), &make_value(i + 1000), ()) + .unwrap(); + } + + for i in 0..50 { + l.get_or_insert(2, &make_int_key(i), &make_value(i + 1000), ()) + .unwrap(); + } + + let mut it = l.range::<[u8], _>(4, ..); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_range_latest() { + run(|| range_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_latest_unify() { + run(|| range_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map_mut(&p, Options::new(), open_options, map_options).unwrap(); + for i in 0..1000 { + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i), ()) + .unwrap(); + } + l.flush().unwrap(); + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map(&p, Options::new(), open_options, map_options).unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), 0); + assert_eq!(ent.key(), k); + } + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap2() { + run(|| unsafe { + use rand::seq::SliceRandom; + + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap2"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_mut_with_comparator(&p, Options::new(), open_options, map_options, Ascend) + .unwrap(); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in &data { + let i = *i; + l.get_or_insert(i as u64, &key(i), &new_value(i), ()) + .unwrap(); + } + l.flush_async().unwrap(); + assert_eq!(l.max_version(), 999); + assert_eq!(l.min_version(), 0); + + for i in data { + let k = key(i); + let ent = l.get(i as u64, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), i as u64); + assert_eq!(ent.key(), k); + } + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_with_comparator(&p, Options::new(), open_options, map_options, Ascend).unwrap(); + assert_eq!(1000, l.len()); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in data { + let k = key(i); + let ent = l.get(i as u64, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), i as u64); + assert_eq!(ent.key(), k); + } + assert_eq!(l.max_version(), 999); + assert_eq!(l.min_version(), 0); + }) +} + +struct Person { + id: u32, + name: std::string::String, +} + +impl Person { + fn encoded_size(&self) -> usize { + 4 + self.name.len() + } +} + +fn get_or_insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_value_builder::<()>(1, b"alice", vb, ()) + .unwrap(); +} + +#[test] +fn test_get_or_insert_with_value() { + run(|| { + get_or_insert_with_value(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_get_or_insert_with_value_unify() { + run(|| { + get_or_insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with_value( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn get_or_insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_builders::<(), ()>(1, kb, vb, ()) + .unwrap(); +} + +#[test] +fn test_get_or_insert_with() { + run(|| get_or_insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_get_or_insert_with_unify() { + run(|| get_or_insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_or_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_in(l: SkipMap) { + let k = 0u64.to_le_bytes(); + for i in 0..100 { + let v = new_value(i); + let old = l.insert(MIN_VERSION, &k, &v, ()).unwrap(); + if let Some(old) = old { + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i - 1)); + } + } + + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(ent.key(), k); + assert_eq!(ent.value(), new_value(99)); +} + +#[test] +fn test_insert_in() { + run(|| { + insert_in(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_insert_in_unify() { + run(|| { + insert_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_in_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_in_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_value_builder::<()>(1, b"alice", vb, ()) + .unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + let old = l + .insert_with_value_builder::<()>(1, b"alice", vb, ()) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(1, b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with_value() { + run(|| insert_with_value(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_insert_with_value_unify() { + run(|| insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with_value(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size as u32, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_builders::<(), ()>(1, kb, vb, ()).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + let old = l + .insert_with_builders::<(), ()>(1, kb, vb, ()) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(1, b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with() { + run(|| insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_insert_with_unify() { + run(|| insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(MIN_VERSION, &key(i), &v, ()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // no race, remove should succeed + let old = l.remove(MIN_VERSION, &k, ()).unwrap(); + assert!(old.is_none()); + + // key already removed + let old = l.remove(MIN_VERSION, &k, ()).unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove() { + run(|| remove(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove_unify() { + run(|| remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove2(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(MIN_VERSION, &key(i), &v, ()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // not found, remove should succeed + let old = l.remove(1, &k, ()).unwrap(); + assert!(old.is_none()); + + // no-race, remove should succeed + let old = l.remove(MIN_VERSION, &k, ()).unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove2() { + run(|| remove2(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove2_unify() { + run(|| remove2(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove2_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove2_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove2(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon() { + run(|| unsafe { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} diff --git a/src/unsync/tests/map.rs b/src/unsync/tests/map.rs new file mode 100644 index 0000000..8594997 --- /dev/null +++ b/src/unsync/tests/map.rs @@ -0,0 +1,2173 @@ +use super::*; + +type SkipList = crate::unsync::map::SkipMap; + +type SkipMap = crate::unsync::map::SkipMap; + +fn empty_in(l: SkipMap) { + let mut it = l.iter(); + + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + assert!(it.seek_lower_bound(Bound::Included(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_lower_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Included(b"aaa")).is_none()); + assert!(l.first().is_none()); + assert!(l.last().is_none()); + assert!(l.get(b"aaa").is_none()); + assert!(!l.contains_key(b"aaa")); + assert!(l.allocated() > 0); + assert!(l.capacity() > 0); + assert_eq!(l.remaining(), l.capacity() - l.allocated()); +} + +#[test] +fn test_empty() { + run(|| empty_in(SkipList::new(Options::new()).unwrap())); +} + +#[test] +fn test_empty_unify() { + run(|| empty_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_empty_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_empty_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(1000)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + + let x = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + empty_in(x); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn full_in(l: impl FnOnce(usize) -> SkipMap) { + let l = l(1000); + let mut found_arena_full = false; + + for i in 0..100 { + if let Err(e) = l.get_or_insert(&make_int_key(i), &make_value(i)) { + assert!(matches!( + e, + Error::Arena(ArenaError::InsufficientSpace { .. }) + )); + found_arena_full = true; + break; + } + } + + assert!(found_arena_full); +} + +#[test] +fn test_full() { + run(|| { + full_in(|n| { + SkipList::new( + Options::new() + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +fn test_full_unify() { + run(|| { + full_in(|n| { + SkipList::new( + UNIFY_TEST_OPTIONS + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_full_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_full_map_mut"); + + full_in(|n| { + let open_options = OpenOptions::default() + .create_new(Some(n as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + SkipList::map_mut( + p, + Options::new().with_freelist(Freelist::None), + open_options, + map_options, + ) + .unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon_unify() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +fn basic_in(mut l: SkipMap) { + // Try adding values. + l.get_or_insert(b"key1", &make_value(1)).unwrap(); + l.get_or_insert(b"key3", &make_value(3)).unwrap(); + l.get_or_insert(b"key2", &make_value(2)).unwrap(); + assert_eq!(l.comparator(), &Ascend); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"key1")).unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + + let ent = it.seek_lower_bound(Bound::Included(b"key2")).unwrap(); + assert_eq!(ent.key(), b"key2"); + assert_eq!(ent.value(), &make_value(2)); + + let ent = it.seek_lower_bound(Bound::Included(b"key3")).unwrap(); + assert_eq!(ent.key(), b"key3"); + assert_eq!(ent.value(), &make_value(3)); + } + + l.get_or_insert("a".as_bytes(), &[]).unwrap(); + l.get_or_insert("a".as_bytes(), &[]).unwrap(); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), &[]); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + } + + l.get_or_insert("b".as_bytes(), &[]).unwrap(); + l.get_or_insert("b".as_bytes(), &[]).unwrap(); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value(), &[]); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + + let ent = it.entry().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + } + + l.get_or_insert(b"b", &[]).unwrap().unwrap(); + + assert!(l.get_or_insert(b"c", &[]).unwrap().is_none()); + + unsafe { + l.clear().unwrap(); + } + + let l = l.clone(); + { + let mut it = l.iter(); + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + } + assert!(l.is_empty()); + + #[cfg(feature = "memmap")] + l.flush().unwrap(); + + #[cfg(feature = "memmap")] + l.flush_async().unwrap(); +} + +#[test] +fn test_basic() { + run(|| basic_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_basic_unify() { + run(|| basic_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_basic_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + basic_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ordering() { + let l = SkipList::with_comparator(TEST_OPTIONS, Descend).unwrap(); + + l.get_or_insert(b"a1", b"a1").unwrap(); + l.get_or_insert(b"a2", b"a2").unwrap(); + l.get_or_insert(b"a3", b"a3").unwrap(); + + let mut it = l.iter(); + for i in (1..=3).rev() { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), format!("a{i}").as_bytes()); + assert_eq!(ent.value(), format!("a{i}").as_bytes()); + } +} + +#[test] +fn test_ordering() { + run(ordering); +} + +fn get(l: SkipMap) { + l.get_or_insert(b"a", b"a1").unwrap(); + l.get_or_insert(b"a", b"a2").unwrap(); + l.get_or_insert(b"c", b"c1").unwrap(); + l.get_or_insert(b"c", b"c2").unwrap(); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + l.insert(b"a", b"a2").unwrap(); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + + let ent = l.get(b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.get(b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.get(b"d").is_none()); +} + +#[test] +fn test_get() { + run(|| get(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_get_unify() { + run(|| get(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn gt_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1").unwrap(); + l.get_or_insert(b"a", b"a2").unwrap(); + l.get_or_insert(b"c", b"c1").unwrap(); + l.get_or_insert(b"c", b"c2").unwrap(); + l.get_or_insert(b"c", b"c3").unwrap(); + + assert!(l.lower_bound(Bound::Excluded(b"a")).is_some()); + assert!(l.lower_bound(Bound::Excluded(b"b")).is_some()); + assert!(l.lower_bound(Bound::Excluded(b"c")).is_none()); + + let ent = l.lower_bound(Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.lower_bound(Bound::Excluded(b"c")).is_none()); +} + +#[test] +fn test_gt() { + run(|| gt_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_gt_unify() { + run(|| gt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_gt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_gt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ge_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1").unwrap(); + l.get_or_insert(b"a", b"a2").unwrap(); + l.get_or_insert(b"c", b"c1").unwrap(); + l.get_or_insert(b"c", b"c2").unwrap(); + + assert!(l.lower_bound(Bound::Included(b"a")).is_some()); + assert!(l.lower_bound(Bound::Included(b"b")).is_some()); + assert!(l.lower_bound(Bound::Included(b"c")).is_some()); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + l.insert(b"a", b"a2").unwrap(); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + + let ent = l.lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.lower_bound(Bound::Included(b"d")).is_none()); +} + +#[test] +fn test_ge() { + run(|| ge_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_ge_unify() { + run(|| ge_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + ge_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn le_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1").unwrap(); + l.get_or_insert(b"a", b"a2").unwrap(); + l.get_or_insert(b"c", b"c1").unwrap(); + l.get_or_insert(b"c", b"c2").unwrap(); + + assert!(l.upper_bound(Bound::Included(b"a")).is_some()); + assert!(l.upper_bound(Bound::Included(b"b")).is_some()); + assert!(l.upper_bound(Bound::Included(b"c")).is_some()); + + let ent = l.upper_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.upper_bound(Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); +} + +#[test] +fn test_le() { + run(|| le_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_le_unify() { + run(|| le_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_le_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_le_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn lt_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1").unwrap(); + l.get_or_insert(b"a", b"a2").unwrap(); + l.get_or_insert(b"c", b"c1").unwrap(); + l.get_or_insert(b"c", b"c2").unwrap(); + + assert!(l.upper_bound(Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(Bound::Excluded(b"b")).is_some()); + assert!(l.upper_bound(Bound::Excluded(b"c")).is_some()); + + let ent = l.upper_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); +} + +#[test] +fn test_lt() { + run(|| lt_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_lt_unify() { + run(|| lt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_lt_map_mut() { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + lt_in(unsafe { SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap() }); +} + +#[test] +#[cfg(feature = "memmap")] + +fn test_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn test_basic_large_testcases_in(l: SkipMap) { + let n = 1000; + + for i in 0..n { + l.get_or_insert(&key(i), &new_value(i)).unwrap(); + } + + for i in 0..n { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + + assert_eq!(ent.key(), k); + } + + assert_eq!(n, l.len()); +} + +#[test] +fn test_basic_large_testcases() { + run(|| { + let l = SkipList::new(TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +fn test_basic_large_testcases_unify() { + run(|| { + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_large_testcases_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_basic_large_testcases_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(Options::new(), map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +fn iter_all_versions_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + let mut it = l.iter(); + let mut ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + for i in 0..N { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + if i != N - 1 { + ent = it.next().unwrap(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_iter_all_versions_next() { + run(|| iter_all_versions_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_next_unify() { + run(|| iter_all_versions_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + let upper = make_int_key(50); + let mut it = l.range(..=upper.as_slice()); + let mut ent = it.seek_lower_bound(Bound::Unbounded); + for i in 0..N { + if i <= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next(); + } else { + assert!(ent.is_none()); + ent = it.next(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_range_next() { + run(|| range_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_next_unify() { + run(|| range_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + let mut it = l.iter(); + let mut ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + for i in (0..N).rev() { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + if i != 0 { + ent = it.next_back().unwrap(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_iter_all_versions_next_back() { + run(|| iter_all_versions_prev(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_prev( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + let lower = make_int_key(50); + let mut it = l.range(lower.as_slice()..); + let mut ent = it.seek_upper_bound(Bound::Unbounded); + for i in (0..N).rev() { + if i >= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next_back(); + } else { + assert!(ent.is_none()); + ent = it.next_back(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range_prev() { + run(|| range_prev(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_prev_unify() { + run(|| range_prev(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_prev(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_ge(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(&make_int_key(v), &make_value(v)).unwrap(); + } + + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01000")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01005")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01010")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01020")).unwrap(); + assert_eq!(ent.key(), make_int_key(1020)); + assert_eq!(ent.value(), make_value(1020)); + + let ent = it.seek_lower_bound(Bound::Included(b"01200")).unwrap(); + assert_eq!(ent.key(), make_int_key(1200)); + assert_eq!(ent.value(), make_value(1200)); + + let ent = it.seek_lower_bound(Bound::Included(b"01100")).unwrap(); + assert_eq!(ent.key(), make_int_key(1100)); + assert_eq!(ent.value(), make_value(1100)); + + let ent = it.seek_lower_bound(Bound::Included(b"99999")); + assert!(ent.is_none()); + + l.get_or_insert(&[], &[]).unwrap(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); + + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_ge() { + run(|| iter_all_versions_seek_ge(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_seek_ge_unify() { + run(|| iter_all_versions_seek_ge(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_ge( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_lt(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(&make_int_key(v), &make_value(v)).unwrap(); + } + + let mut it = l.iter(); + assert!(it.seek_upper_bound(Bound::Excluded(b"")).is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01000")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01001")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01991")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value(), make_value(1990)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"99999")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value(), make_value(1990)); + + l.get_or_insert(&[], &[]).unwrap(); + + let ent = it.seek_upper_bound(Bound::Excluded(b"")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"\x01")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_lt() { + run(|| iter_all_versions_seek_lt(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_all_versions_seek_lt_unify() { + run(|| iter_all_versions_seek_lt(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_lt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_lt( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range(l: SkipMap) { + for i in 1..10 { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + let k3 = make_int_key(3); + let k7 = make_int_key(7); + let mut it = l.range(k3.as_slice()..k7.as_slice()).clone(); + assert_eq!(it.bounds(), &(k3.as_slice()..k7.as_slice())); + + for i in 3..=6 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 7..10 { + let k = make_int_key(i); + assert!(it.seek_lower_bound(Bound::Included(&k)).is_none()); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + let ent = it + .seek_lower_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + let ent = it + .seek_upper_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + for i in 4..=7 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i - 1)); + assert_eq!(ent.value(), make_value(i - 1)); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 1..4 { + let k = make_int_key(i); + assert!(it.seek_upper_bound(Bound::Excluded(&k)).is_none()); + } + + let ent = it + .seek_upper_bound(Bound::Excluded(&make_int_key(4))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range() { + run(|| range(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_unify() { + run(|| range(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + for i in 50..N { + l.insert(&make_int_key(i), &make_value(i + 1000)).unwrap(); + } + + for i in 0..50 { + l.insert(&make_int_key(i), &make_value(i + 1000)).unwrap(); + } + + let mut it = l.iter(); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_iter_latest() { + run(|| iter_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_latest_unify() { + run(|| iter_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_iter_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i)).unwrap(); + } + + for i in 50..N { + l.insert(&make_int_key(i), &make_value(i + 1000)).unwrap(); + } + + for i in 0..50 { + l.insert(&make_int_key(i), &make_value(i + 1000)).unwrap(); + } + + let mut it = l.range::<[u8], _>(..); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_range_latest() { + run(|| range_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_latest_unify() { + run(|| range_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map_mut(&p, Options::new(), open_options, map_options).unwrap(); + for i in 0..1000 { + l.get_or_insert(&key(i), &new_value(i)).unwrap(); + } + l.flush().unwrap(); + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map(&p, Options::new(), open_options, map_options).unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + + assert_eq!(ent.key(), k); + } + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap2() { + run(|| unsafe { + use rand::seq::SliceRandom; + + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap2"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_mut_with_comparator(&p, Options::new(), open_options, map_options, Ascend) + .unwrap(); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in &data { + let i = *i; + l.get_or_insert(&key(i), &new_value(i)).unwrap(); + } + l.flush_async().unwrap(); + + for i in data { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.key(), k); + } + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_with_comparator(&p, Options::new(), open_options, map_options, Ascend).unwrap(); + assert_eq!(1000, l.len()); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in data { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.key(), k); + } + }) +} + +struct Person { + id: u32, + name: std::string::String, +} + +impl Person { + fn encoded_size(&self) -> usize { + 4 + self.name.len() + } +} + +fn get_or_insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_value_builder::<()>(b"alice", vb) + .unwrap(); +} + +#[test] +fn test_get_or_insert_with_value() { + run(|| { + get_or_insert_with_value(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_get_or_insert_with_value_unify() { + run(|| { + get_or_insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with_value( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn get_or_insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_builders::<(), ()>(kb, vb).unwrap(); +} + +#[test] +fn test_get_or_insert_with() { + run(|| get_or_insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_get_or_insert_with_unify() { + run(|| get_or_insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_or_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_in(l: SkipMap) { + let k = 0u64.to_le_bytes(); + for i in 0..100 { + let v = new_value(i); + let old = l.insert(&k, &v).unwrap(); + if let Some(old) = old { + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i - 1)); + } + } + + let ent = l.get(&k).unwrap(); + assert_eq!(ent.key(), k); + assert_eq!(ent.value(), new_value(99)); +} + +#[test] +fn test_insert_in() { + run(|| { + insert_in(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_insert_in_unify() { + run(|| { + insert_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_in_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_in_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_value_builder::<()>(b"alice", vb).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + let old = l + .insert_with_value_builder::<()>(b"alice", vb) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with_value() { + run(|| insert_with_value(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_insert_with_value_unify() { + run(|| insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with_value(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size as u32, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_builders::<(), ()>(kb, vb).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + let old = l.insert_with_builders::<(), ()>(kb, vb).unwrap().unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with() { + run(|| insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_insert_with_unify() { + run(|| insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(&key(i), &v).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // no race, remove should succeed + let old = l.remove(&k).unwrap(); + assert!(old.is_none()); + + // key already removed + let old = l.remove(&k).unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(&k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove() { + run(|| remove(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove_unify() { + run(|| remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove2(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(&key(i), &v).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // not found, remove should succeed + let old = l.remove(&k).unwrap(); + assert!(old.is_none()); + + // no-race, remove should succeed + let old = l.remove(&k).unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(&k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove2() { + run(|| remove2(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove2_unify() { + run(|| remove2(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove2_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove2_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove2(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon() { + run(|| unsafe { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} diff --git a/src/unsync/tests/trailed.rs b/src/unsync/tests/trailed.rs new file mode 100644 index 0000000..97d2350 --- /dev/null +++ b/src/unsync/tests/trailed.rs @@ -0,0 +1,2195 @@ +use super::*; + +type SkipList = crate::unsync::trailed::SkipMap; + +type SkipMap = crate::unsync::trailed::SkipMap; + +fn trailer() -> u64 { + 123456789 +} + +fn empty_in(l: SkipMap) { + let mut it = l.iter(); + + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + assert!(it.seek_lower_bound(Bound::Included(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_lower_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Included(b"aaa")).is_none()); + assert!(l.first().is_none()); + assert!(l.last().is_none()); + assert!(l.get(b"aaa").is_none()); + assert!(!l.contains_key(b"aaa")); + assert!(l.allocated() > 0); + assert!(l.capacity() > 0); + assert_eq!(l.remaining(), l.capacity() - l.allocated()); +} + +#[test] +fn test_empty() { + run(|| empty_in(SkipList::new(Options::new()).unwrap())); +} + +#[test] +fn test_empty_unify() { + run(|| empty_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_empty_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_empty_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(1000)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + + let x = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + empty_in(x); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn full_in(l: impl FnOnce(usize) -> SkipMap) { + let l = l(1000); + let mut found_arena_full = false; + + for i in 0..100 { + if let Err(e) = l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) { + assert!(matches!( + e, + Error::Arena(ArenaError::InsufficientSpace { .. }) + )); + found_arena_full = true; + break; + } + } + + assert!(found_arena_full); +} + +#[test] +fn test_full() { + run(|| { + full_in(|n| { + SkipList::new( + Options::new() + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +fn test_full_unify() { + run(|| { + full_in(|n| { + SkipList::new( + UNIFY_TEST_OPTIONS + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_full_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_full_map_mut"); + + full_in(|n| { + let open_options = OpenOptions::default() + .create_new(Some(n as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + SkipList::map_mut( + p, + Options::new().with_freelist(Freelist::None), + open_options, + map_options, + ) + .unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon_unify() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +fn basic_in(mut l: SkipMap) { + // Try adding values. + l.get_or_insert(b"key1", &make_value(1), trailer()).unwrap(); + l.get_or_insert(b"key3", &make_value(3), trailer()).unwrap(); + l.get_or_insert(b"key2", &make_value(2), trailer()).unwrap(); + assert_eq!(l.comparator(), &Ascend); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"key1")).unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + + let ent = it.seek_lower_bound(Bound::Included(b"key2")).unwrap(); + assert_eq!(ent.key(), b"key2"); + assert_eq!(ent.value(), &make_value(2)); + + let ent = it.seek_lower_bound(Bound::Included(b"key3")).unwrap(); + assert_eq!(ent.key(), b"key3"); + assert_eq!(ent.value(), &make_value(3)); + } + + l.get_or_insert("a".as_bytes(), &[], trailer()).unwrap(); + l.get_or_insert("a".as_bytes(), &[], trailer()).unwrap(); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), &[]); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + } + + l.get_or_insert("b".as_bytes(), &[], trailer()).unwrap(); + l.get_or_insert("b".as_bytes(), &[], trailer()).unwrap(); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value(), &[]); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + + let ent = it.entry().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), &make_value(1)); + } + + l.get_or_insert(b"b", &[], trailer()).unwrap().unwrap(); + + assert!(l.get_or_insert(b"c", &[], trailer()).unwrap().is_none()); + + unsafe { + l.clear().unwrap(); + } + + let l = l.clone(); + { + let mut it = l.iter(); + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + } + assert!(l.is_empty()); + + #[cfg(feature = "memmap")] + l.flush().unwrap(); + + #[cfg(feature = "memmap")] + l.flush_async().unwrap(); +} + +#[test] +fn test_basic() { + run(|| basic_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_basic_unify() { + run(|| basic_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_basic_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + basic_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ordering() { + let l = SkipList::with_comparator(TEST_OPTIONS, Descend).unwrap(); + + l.get_or_insert(b"a1", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a2", b"a2", trailer()).unwrap(); + l.get_or_insert(b"a3", b"a3", trailer()).unwrap(); + + let mut it = l.iter(); + for i in (1..=3).rev() { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), format!("a{i}").as_bytes()); + assert_eq!(ent.value(), format!("a{i}").as_bytes()); + } +} + +#[test] +fn test_ordering() { + run(ordering); +} + +fn get(l: SkipMap) { + l.get_or_insert(b"a", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a", b"a2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c1", trailer()).unwrap(); + l.get_or_insert(b"c", b"c2", trailer()).unwrap(); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + l.insert(b"a", b"a2", trailer()).unwrap(); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + + let ent = l.get(b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.get(b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.get(b"d").is_none()); +} + +#[test] +fn test_get() { + run(|| get(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_get_unify() { + run(|| get(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn gt_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a", b"a2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c1", trailer()).unwrap(); + l.get_or_insert(b"c", b"c2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c3", trailer()).unwrap(); + + assert!(l.lower_bound(Bound::Excluded(b"a")).is_some()); + assert!(l.lower_bound(Bound::Excluded(b"b")).is_some()); + assert!(l.lower_bound(Bound::Excluded(b"c")).is_none()); + + let ent = l.lower_bound(Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.lower_bound(Bound::Excluded(b"c")).is_none()); +} + +#[test] +fn test_gt() { + run(|| gt_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_gt_unify() { + run(|| gt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_gt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_gt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ge_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a", b"a2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c1", trailer()).unwrap(); + l.get_or_insert(b"c", b"c2", trailer()).unwrap(); + + assert!(l.lower_bound(Bound::Included(b"a")).is_some()); + assert!(l.lower_bound(Bound::Included(b"b")).is_some()); + assert!(l.lower_bound(Bound::Included(b"c")).is_some()); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + l.insert(b"a", b"a2", trailer()).unwrap(); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + + let ent = l.lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.lower_bound(Bound::Included(b"d")).is_none()); +} + +#[test] +fn test_ge() { + run(|| ge_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_ge_unify() { + run(|| ge_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + ge_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn le_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a", b"a2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c1", trailer()).unwrap(); + l.get_or_insert(b"c", b"c2", trailer()).unwrap(); + + assert!(l.upper_bound(Bound::Included(b"a")).is_some()); + assert!(l.upper_bound(Bound::Included(b"b")).is_some()); + assert!(l.upper_bound(Bound::Included(b"c")).is_some()); + + let ent = l.upper_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.upper_bound(Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); +} + +#[test] +fn test_le() { + run(|| le_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_le_unify() { + run(|| le_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_le_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_le_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn lt_in(l: SkipMap) { + l.get_or_insert(b"a", b"a1", trailer()).unwrap(); + l.get_or_insert(b"a", b"a2", trailer()).unwrap(); + l.get_or_insert(b"c", b"c1", trailer()).unwrap(); + l.get_or_insert(b"c", b"c2", trailer()).unwrap(); + + assert!(l.upper_bound(Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(Bound::Excluded(b"b")).is_some()); + assert!(l.upper_bound(Bound::Excluded(b"c")).is_some()); + + let ent = l.upper_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); +} + +#[test] +fn test_lt() { + run(|| lt_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_lt_unify() { + run(|| lt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_lt_map_mut() { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + lt_in(unsafe { SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap() }); +} + +#[test] +#[cfg(feature = "memmap")] + +fn test_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn test_basic_large_testcases_in(l: SkipMap) { + let n = 1000; + + for i in 0..n { + l.get_or_insert(&key(i), &new_value(i), trailer()).unwrap(); + } + + for i in 0..n { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + + assert_eq!(ent.key(), k); + } + + assert_eq!(n, l.len()); +} + +#[test] +fn test_basic_large_testcases() { + run(|| { + let l = SkipList::new(TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +fn test_basic_large_testcases_unify() { + run(|| { + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_large_testcases_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_basic_large_testcases_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(Options::new(), map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +fn iter_all_versions_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + let mut it = l.iter(); + let mut ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + for i in 0..N { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + if i != N - 1 { + ent = it.next().unwrap(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_iter_all_versions_next() { + run(|| iter_all_versions_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_next_unify() { + run(|| iter_all_versions_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + let upper = make_int_key(50); + let mut it = l.range(..=upper.as_slice()); + let mut ent = it.seek_lower_bound(Bound::Unbounded); + for i in 0..N { + if i <= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next(); + } else { + assert!(ent.is_none()); + ent = it.next(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_range_next() { + run(|| range_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_next_unify() { + run(|| range_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + let mut it = l.iter(); + let mut ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + for i in (0..N).rev() { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + if i != 0 { + ent = it.next_back().unwrap(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_iter_all_versions_next_back() { + run(|| iter_all_versions_prev(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_prev( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + let lower = make_int_key(50); + let mut it = l.range(lower.as_slice()..); + let mut ent = it.seek_upper_bound(Bound::Unbounded); + for i in (0..N).rev() { + if i >= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next_back(); + } else { + assert!(ent.is_none()); + ent = it.next_back(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range_prev() { + run(|| range_prev(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_prev_unify() { + run(|| range_prev(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_prev(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_ge(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(&make_int_key(v), &make_value(v), trailer()) + .unwrap(); + } + + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01000")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01005")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01010")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01020")).unwrap(); + assert_eq!(ent.key(), make_int_key(1020)); + assert_eq!(ent.value(), make_value(1020)); + + let ent = it.seek_lower_bound(Bound::Included(b"01200")).unwrap(); + assert_eq!(ent.key(), make_int_key(1200)); + assert_eq!(ent.value(), make_value(1200)); + + let ent = it.seek_lower_bound(Bound::Included(b"01100")).unwrap(); + assert_eq!(ent.key(), make_int_key(1100)); + assert_eq!(ent.value(), make_value(1100)); + + let ent = it.seek_lower_bound(Bound::Included(b"99999")); + assert!(ent.is_none()); + + l.get_or_insert(&[], &[], trailer()).unwrap(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); + + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_ge() { + run(|| iter_all_versions_seek_ge(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_seek_ge_unify() { + run(|| iter_all_versions_seek_ge(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_ge( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_lt(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(&make_int_key(v), &make_value(v), trailer()) + .unwrap(); + } + + let mut it = l.iter(); + assert!(it.seek_upper_bound(Bound::Excluded(b"")).is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01000")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01001")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value(), make_value(1000)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01991")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value(), make_value(1990)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"99999")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value(), make_value(1990)); + + l.get_or_insert(&[], &[], trailer()).unwrap(); + + let ent = it.seek_upper_bound(Bound::Excluded(b"")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"\x01")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_lt() { + run(|| iter_all_versions_seek_lt(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_all_versions_seek_lt_unify() { + run(|| iter_all_versions_seek_lt(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_lt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_lt( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range(l: SkipMap) { + for i in 1..10 { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + let k3 = make_int_key(3); + let k7 = make_int_key(7); + let mut it = l.range(k3.as_slice()..k7.as_slice()).clone(); + assert_eq!(it.bounds(), &(k3.as_slice()..k7.as_slice())); + + for i in 3..=6 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 7..10 { + let k = make_int_key(i); + assert!(it.seek_lower_bound(Bound::Included(&k)).is_none()); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + let ent = it + .seek_lower_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + let ent = it + .seek_upper_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + for i in 4..=7 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i - 1)); + assert_eq!(ent.value(), make_value(i - 1)); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 1..4 { + let k = make_int_key(i); + assert!(it.seek_upper_bound(Bound::Excluded(&k)).is_none()); + } + + let ent = it + .seek_upper_bound(Bound::Excluded(&make_int_key(4))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range() { + run(|| range(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_unify() { + run(|| range(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + for i in 50..N { + l.insert(&make_int_key(i), &make_value(i + 1000), trailer()) + .unwrap(); + } + + for i in 0..50 { + l.insert(&make_int_key(i), &make_value(i + 1000), trailer()) + .unwrap(); + } + + let mut it = l.iter(); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_iter_latest() { + run(|| iter_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_latest_unify() { + run(|| iter_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_iter_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(&make_int_key(i), &make_value(i), trailer()) + .unwrap(); + } + + for i in 50..N { + l.insert(&make_int_key(i), &make_value(i + 1000), trailer()) + .unwrap(); + } + + for i in 0..50 { + l.insert(&make_int_key(i), &make_value(i + 1000), trailer()) + .unwrap(); + } + + let mut it = l.range::<[u8], _>(..); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_range_latest() { + run(|| range_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_latest_unify() { + run(|| range_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map_mut(&p, Options::new(), open_options, map_options).unwrap(); + for i in 0..1000 { + l.get_or_insert(&key(i), &new_value(i), trailer()).unwrap(); + } + l.flush().unwrap(); + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map(&p, Options::new(), open_options, map_options).unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + + assert_eq!(ent.key(), k); + } + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap2() { + run(|| unsafe { + use rand::seq::SliceRandom; + + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap2"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_mut_with_comparator(&p, Options::new(), open_options, map_options, Ascend) + .unwrap(); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in &data { + let i = *i; + l.get_or_insert(&key(i), &new_value(i), trailer()).unwrap(); + } + l.flush_async().unwrap(); + + for i in data { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.key(), k); + } + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_with_comparator(&p, Options::new(), open_options, map_options, Ascend).unwrap(); + assert_eq!(1000, l.len()); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in data { + let k = key(i); + let ent = l.get(&k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.key(), k); + } + }) +} + +struct Person { + id: u32, + name: std::string::String, +} + +impl Person { + fn encoded_size(&self) -> usize { + 4 + self.name.len() + } +} + +fn get_or_insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_value_builder::<()>(b"alice", vb, trailer()) + .unwrap(); +} + +#[test] +fn test_get_or_insert_with_value() { + run(|| { + get_or_insert_with_value(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_get_or_insert_with_value_unify() { + run(|| { + get_or_insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with_value( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn get_or_insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_builders::<(), ()>(kb, vb, trailer()) + .unwrap(); +} + +#[test] +fn test_get_or_insert_with() { + run(|| get_or_insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_get_or_insert_with_unify() { + run(|| get_or_insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_or_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_in(l: SkipMap) { + let k = 0u64.to_le_bytes(); + for i in 0..100 { + let v = new_value(i); + let old = l.insert(&k, &v, trailer()).unwrap(); + if let Some(old) = old { + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i - 1)); + } + } + + let ent = l.get(&k).unwrap(); + assert_eq!(ent.key(), k); + assert_eq!(ent.value(), new_value(99)); +} + +#[test] +fn test_insert_in() { + run(|| { + insert_in(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_insert_in_unify() { + run(|| { + insert_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_in_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_in_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_value_builder::<()>(b"alice", vb, trailer()) + .unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + let old = l + .insert_with_value_builder::<()>(b"alice", vb, trailer()) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with_value() { + run(|| insert_with_value(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_insert_with_value_unify() { + run(|| insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with_value(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size as u32, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_builders::<(), ()>(kb, vb, trailer()).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + let old = l + .insert_with_builders::<(), ()>(kb, vb, trailer()) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with() { + run(|| insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_insert_with_unify() { + run(|| insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(&key(i), &v, trailer()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // no race, remove should succeed + let old = l.remove(&k, trailer()).unwrap(); + assert!(old.is_none()); + + // key already removed + let old = l.remove(&k, trailer()).unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(&k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove() { + run(|| remove(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove_unify() { + run(|| remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove2(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(&key(i), &v, trailer()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // not found, remove should succeed + let old = l.remove(&k, trailer()).unwrap(); + assert!(old.is_none()); + + // no-race, remove should succeed + let old = l.remove(&k, trailer()).unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(&k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove2() { + run(|| remove2(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove2_unify() { + run(|| remove2(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove2_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove2_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove2(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon() { + run(|| unsafe { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} diff --git a/src/unsync/tests/versioned.rs b/src/unsync/tests/versioned.rs new file mode 100644 index 0000000..609e1fa --- /dev/null +++ b/src/unsync/tests/versioned.rs @@ -0,0 +1,2601 @@ +use super::*; + +type SkipList = crate::unsync::versioned::SkipMap; + +type SkipMap = crate::unsync::versioned::SkipMap; + +fn empty_in(l: SkipMap) { + let mut it = l.iter_all_versions(MIN_VERSION); + + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + assert!(it.seek_lower_bound(Bound::Included(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_lower_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Included(b"aaa")).is_none()); + assert!(l.first(MIN_VERSION).is_none()); + assert!(l.last(MIN_VERSION).is_none()); + assert!(l.get(MIN_VERSION, b"aaa").is_none()); + assert!(!l.contains_key(MIN_VERSION, b"aaa")); + assert!(l.allocated() > 0); + assert!(l.capacity() > 0); + assert_eq!(l.remaining(), l.capacity() - l.allocated()); +} + +#[test] +fn test_empty() { + run(|| empty_in(SkipList::new(Options::new()).unwrap())); +} + +#[test] +fn test_empty_unify() { + run(|| empty_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_empty_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_empty_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(1000)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + + let x = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + empty_in(x); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_empty_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(1000); + empty_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn full_in(l: impl FnOnce(usize) -> SkipMap) { + let l = l(1000); + let mut found_arena_full = false; + + for i in 0..100 { + if let Err(e) = l.get_or_insert(0, &make_int_key(i), &make_value(i)) { + assert!(matches!( + e, + Error::Arena(ArenaError::InsufficientSpace { .. }) + )); + found_arena_full = true; + break; + } + } + + assert!(found_arena_full); +} + +#[test] +fn test_full() { + run(|| { + full_in(|n| { + SkipList::new( + Options::new() + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +fn test_full_unify() { + run(|| { + full_in(|n| { + SkipList::new( + UNIFY_TEST_OPTIONS + .with_capacity(n as u32) + .with_freelist(Freelist::None), + ) + .unwrap() + }) + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_full_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_full_map_mut"); + + full_in(|n| { + let open_options = OpenOptions::default() + .create_new(Some(n as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + SkipList::map_mut( + p, + Options::new().with_freelist(Freelist::None), + open_options, + map_options, + ) + .unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_full_map_anon_unify() { + run(|| { + full_in(|n| { + let map_options = MmapOptions::default().len(n as u32); + SkipList::map_anon(Options::new().with_freelist(Freelist::None), map_options).unwrap() + }); + }) +} + +fn basic_in(mut l: SkipMap) { + // Try adding values. + l.get_or_insert(0, b"key1", &make_value(1)).unwrap(); + l.get_or_insert(0, b"key3", &make_value(3)).unwrap(); + l.get_or_insert(0, b"key2", &make_value(2)).unwrap(); + assert_eq!(l.comparator(), &Ascend); + + { + let mut it = l.iter_all_versions(0); + let ent = it.seek_lower_bound(Bound::Included(b"key1")).unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value().unwrap(), &make_value(1)); + assert_eq!(ent.version(), 0); + + let ent = it.seek_lower_bound(Bound::Included(b"key2")).unwrap(); + assert_eq!(ent.key(), b"key2"); + assert_eq!(ent.value().unwrap(), &make_value(2)); + assert_eq!(ent.version(), 0); + + let ent = it.seek_lower_bound(Bound::Included(b"key3")).unwrap(); + assert_eq!(ent.key(), b"key3"); + assert_eq!(ent.value().unwrap(), &make_value(3)); + assert_eq!(ent.version(), 0); + } + + l.get_or_insert(1, "a".as_bytes(), &[]).unwrap(); + l.get_or_insert(2, "a".as_bytes(), &[]).unwrap(); + + { + let mut it = l.iter_all_versions(2); + let ent = it.seek_lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 2); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + } + + l.get_or_insert(2, "b".as_bytes(), &[]).unwrap(); + l.get_or_insert(1, "b".as_bytes(), &[]).unwrap(); + + { + let mut it = l.iter_all_versions(2); + let ent = it.seek_lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 2); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + + let ent = it.entry().unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + } + + l.get_or_insert(2, b"b", &[]).unwrap().unwrap(); + + assert!(l.get_or_insert(2, b"c", &[]).unwrap().is_none()); + + unsafe { + l.clear().unwrap(); + } + + let l = l.clone(); + { + let mut it = l.iter_all_versions(0); + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + } + assert!(l.is_empty()); + + #[cfg(feature = "memmap")] + l.flush().unwrap(); + + #[cfg(feature = "memmap")] + l.flush_async().unwrap(); +} + +#[test] +fn test_basic() { + run(|| basic_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_basic_unify() { + run(|| basic_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_basic_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + basic_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + basic_in(SkipList::map_anon(TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_mvcc(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1").unwrap(); + l.get_or_insert(3, b"a", b"a2").unwrap(); + l.get_or_insert(1, b"c", b"c1").unwrap(); + l.get_or_insert(3, b"c", b"c2").unwrap(); + + let mut it = l.iter_all_versions(0); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 0); + + let mut it = l.iter_all_versions(1); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut it = l.iter_all_versions(2); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut it = l.iter_all_versions(3); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 4); + + let mut it = l.iter_all_versions(0); + assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound(Bound::Unbounded).is_none()); + + let mut it = l.iter_all_versions(1); + let ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c1"); + assert_eq!(ent.version(), 1); + + let mut it = l.iter_all_versions(2); + let ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c1"); + assert_eq!(ent.version(), 1); + + let mut it = l.iter_all_versions(3); + + let ent = it.seek_upper_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value().unwrap(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = it.seek_upper_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = it.seek_lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = it.seek_lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value().unwrap(), b"c2"); + assert_eq!(ent.version(), 3); +} + +#[test] +fn test_iter_all_versions_mvcc() { + run(|| iter_all_versions_mvcc(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_mvcc_unify() { + run(|| iter_all_versions_mvcc(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_mvcc_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_mvcc_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_mvcc( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }); +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_mvcc_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_mvcc(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_mvcc_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_mvcc(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ordering() { + let l = SkipList::with_comparator(TEST_OPTIONS, Descend).unwrap(); + + l.get_or_insert(1, b"a1", b"a1").unwrap(); + l.get_or_insert(2, b"a2", b"a2").unwrap(); + l.get_or_insert(3, b"a3", b"a3").unwrap(); + + let mut it = l.iter_all_versions(3); + for i in (1..=3).rev() { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), format!("a{i}").as_bytes()); + assert_eq!(ent.value().unwrap(), format!("a{i}").as_bytes()); + } +} + +#[test] +fn test_ordering() { + run(ordering); +} + +fn get_mvcc(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1").unwrap(); + l.get_or_insert(3, b"a", b"a2").unwrap(); + l.get_or_insert(1, b"c", b"c1").unwrap(); + l.get_or_insert(3, b"c", b"c2").unwrap(); + + let ent = l.get(1, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(2, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(3, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.get(4, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + assert!(l.get(0, b"b").is_none()); + assert!(l.get(1, b"b").is_none()); + assert!(l.get(2, b"b").is_none()); + assert!(l.get(3, b"b").is_none()); + assert!(l.get(4, b"b").is_none()); + + let ent = l.get(1, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(2, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.get(3, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.get(4, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + assert!(l.get(5, b"d").is_none()); +} + +#[test] +fn test_get_mvcc() { + run(|| get_mvcc(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_get_mvcc_unify() { + run(|| get_mvcc(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_mvcc_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_mvcc_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_mvcc(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_mvcc_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_mvcc(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_mvcc_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_mvcc(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn gt_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1").unwrap(); + l.get_or_insert(3, b"a", b"a2").unwrap(); + l.get_or_insert(1, b"c", b"c1").unwrap(); + l.get_or_insert(3, b"c", b"c2").unwrap(); + l.get_or_insert(5, b"c", b"c3").unwrap(); + + assert!(l.lower_bound(0, Bound::Excluded(b"a")).is_none()); + assert!(l.lower_bound(0, Bound::Excluded(b"b")).is_none()); + assert!(l.lower_bound(0, Bound::Excluded(b"c")).is_none()); + + let ent = l.lower_bound(1, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(5, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c3"); + assert_eq!(ent.version(), 5); + + let ent = l.lower_bound(6, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c3"); + assert_eq!(ent.version(), 5); + + assert!(l.lower_bound(1, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(2, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(3, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(4, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(5, Bound::Excluded(b"c")).is_none()); + assert!(l.lower_bound(6, Bound::Excluded(b"c")).is_none()); +} + +#[test] +fn test_gt() { + run(|| gt_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_gt_unify() { + run(|| gt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_gt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_gt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_gt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn ge_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1").unwrap(); + l.get_or_insert(3, b"a", b"a2").unwrap(); + l.get_or_insert(1, b"c", b"c1").unwrap(); + l.get_or_insert(3, b"c", b"c2").unwrap(); + + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"a")).is_none()); + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"b")).is_none()); + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"c")).is_none()); + + let ent = l.lower_bound(1, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(1, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(2, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(3, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(4, Bound::Included(b"d")).is_none()); +} + +#[test] +fn test_ge() { + run(|| ge_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_ge_unify() { + run(|| ge_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + ge_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + ge_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn le_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1").unwrap(); + l.get_or_insert(3, b"a", b"a2").unwrap(); + l.get_or_insert(1, b"c", b"c1").unwrap(); + l.get_or_insert(3, b"c", b"c2").unwrap(); + + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"a")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"b")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"c")).is_none()); + + let ent = l.upper_bound(1, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); +} + +#[test] +fn test_le() { + run(|| le_in(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_le_unify() { + run(|| le_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_le_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_le_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + gt_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_le_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + gt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn lt_in(l: SkipMap) { + l.get_or_insert(1, b"a", b"a1").unwrap(); + l.get_or_insert(3, b"a", b"a2").unwrap(); + l.get_or_insert(1, b"c", b"c1").unwrap(); + l.get_or_insert(3, b"c", b"c2").unwrap(); + + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"b")).is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"c")).is_none()); + assert!(l.upper_bound(1, Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(2, Bound::Excluded(b"a")).is_none()); + + let ent = l.upper_bound(1, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.version(), 3); +} + +#[test] +fn test_lt() { + run(|| lt_in(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_lt_unify() { + run(|| lt_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_lt_map_mut() { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + lt_in(unsafe { SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap() }); +} + +#[test] +#[cfg(feature = "memmap")] + +fn test_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + lt_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn test_basic_large_testcases_in(l: SkipMap) { + let n = 1000; + + for i in 0..n { + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i)) + .unwrap(); + } + + for i in 0..n { + let k = key(i); + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), 0); + assert_eq!(ent.key(), k); + } + + assert_eq!(n, l.len()); +} + +#[test] +fn test_basic_large_testcases() { + run(|| { + let l = SkipList::new(TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +fn test_basic_large_testcases_unify() { + run(|| { + let l = SkipList::new(UNIFY_TEST_OPTIONS).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_basic_large_testcases_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_basic_large_testcases_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(Options::new(), map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_basic_large_testcases_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + let l = SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap(); + test_basic_large_testcases_in(l); + }) +} + +fn iter_all_versions_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let mut ent = it.seek_lower_bound(Bound::Unbounded).unwrap(); + for i in 0..N { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value().unwrap(), make_value(i)); + if i != N - 1 { + ent = it.next().unwrap(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_iter_all_versions_next() { + run(|| iter_all_versions_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_next_unify() { + run(|| iter_all_versions_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_next(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) + .unwrap(); + } + + let upper = make_int_key(50); + let mut it = l.range(MIN_VERSION, ..=upper.as_slice()); + let mut ent = it.seek_lower_bound(Bound::Unbounded); + for i in 0..N { + if i <= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next(); + } else { + assert!(ent.is_none()); + ent = it.next(); + } + } + + assert!(it.next().is_none()); +} + +#[test] +fn test_range_next() { + run(|| range_next(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_next_unify() { + run(|| range_next(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_next_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_next_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_next( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_next_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_next(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let mut ent = it.seek_upper_bound(Bound::Unbounded).unwrap(); + for i in (0..N).rev() { + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value().unwrap(), make_value(i)); + if i != 0 { + ent = it.next_back().unwrap(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_iter_all_versions_next_back() { + run(|| iter_all_versions_prev(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_prev( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_prev(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) + .unwrap(); + } + + let lower = make_int_key(50); + let mut it = l.range(MIN_VERSION, lower.as_slice()..); + let mut ent = it.seek_upper_bound(Bound::Unbounded); + for i in (0..N).rev() { + if i >= 50 { + { + let ent = ent.unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + ent = it.next_back(); + } else { + assert!(ent.is_none()); + ent = it.next_back(); + } + } + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range_prev() { + run(|| range_prev(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_range_prev_unify() { + run(|| range_prev(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_prev_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_prev_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_prev(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_prev_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_prev(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_ge(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(MIN_VERSION, &make_int_key(v), &make_value(v)) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value().unwrap(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01000")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value().unwrap(), make_value(1000)); + + let ent = it.seek_lower_bound(Bound::Included(b"01005")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value().unwrap(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01010")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010)); + assert_eq!(ent.value().unwrap(), make_value(1010)); + + let ent = it.seek_lower_bound(Bound::Included(b"01020")).unwrap(); + assert_eq!(ent.key(), make_int_key(1020)); + assert_eq!(ent.value().unwrap(), make_value(1020)); + + let ent = it.seek_lower_bound(Bound::Included(b"01200")).unwrap(); + assert_eq!(ent.key(), make_int_key(1200)); + assert_eq!(ent.value().unwrap(), make_value(1200)); + + let ent = it.seek_lower_bound(Bound::Included(b"01100")).unwrap(); + assert_eq!(ent.key(), make_int_key(1100)); + assert_eq!(ent.value().unwrap(), make_value(1100)); + + let ent = it.seek_lower_bound(Bound::Included(b"99999")); + assert!(ent.is_none()); + + l.get_or_insert(MIN_VERSION, &[], &[]).unwrap(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); + + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_ge() { + run(|| iter_all_versions_seek_ge(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_iter_all_versions_seek_ge_unify() { + run(|| iter_all_versions_seek_ge(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_ge_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_ge_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_ge( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_ge_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_ge(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_all_versions_seek_lt(l: SkipMap) { + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(MIN_VERSION, &make_int_key(v), &make_value(v)) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + assert!(it.seek_upper_bound(Bound::Excluded(b"")).is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01000")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01001")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000)); + assert_eq!(ent.value().unwrap(), make_value(1000)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01991")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value().unwrap(), make_value(1990)); + + let ent = it.seek_upper_bound(Bound::Excluded(b"99999")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990)); + assert_eq!(ent.value().unwrap(), make_value(1990)); + + l.get_or_insert(MIN_VERSION, &[], &[]).unwrap(); + + let ent = it.seek_upper_bound(Bound::Excluded(b"")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"\x01")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); +} + +#[test] +fn test_iter_all_versions_seek_lt() { + run(|| iter_all_versions_seek_lt(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_all_versions_seek_lt_unify() { + run(|| iter_all_versions_seek_lt(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_all_versions_seek_lt_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_iter_all_versions_seek_lt_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_all_versions_seek_lt( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_all_versions_seek_lt_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_all_versions_seek_lt(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range(l: SkipMap) { + for i in 1..10 { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) + .unwrap(); + } + + let k3 = make_int_key(3); + let k7 = make_int_key(7); + let mut it = l.range(MIN_VERSION, k3.as_slice()..k7.as_slice()).clone(); + assert_eq!(it.bounds(), &(k3.as_slice()..k7.as_slice())); + + for i in 3..=6 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 7..10 { + let k = make_int_key(i); + assert!(it.seek_lower_bound(Bound::Included(&k)).is_none()); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Included(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + let ent = it + .seek_lower_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + let ent = it + .seek_upper_bound(Bound::Included(&make_int_key(6))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + + assert!(it.next().is_none()); + + for i in 4..=7 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(i - 1)); + assert_eq!(ent.value(), make_value(i - 1)); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(6)); + assert_eq!(ent.value(), make_value(6)); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Excluded(&k)).unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + } + + for i in 1..4 { + let k = make_int_key(i); + assert!(it.seek_upper_bound(Bound::Excluded(&k)).is_none()); + } + + let ent = it + .seek_upper_bound(Bound::Excluded(&make_int_key(4))) + .unwrap(); + assert_eq!(ent.key(), make_int_key(3)); + assert_eq!(ent.value(), make_value(3)); + + assert!(it.next_back().is_none()); +} + +#[test] +fn test_range() { + run(|| range(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_unify() { + run(|| range(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn iter_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) + .unwrap(); + } + + for i in 50..N { + l.get_or_insert(1, &make_int_key(i), &make_value(i + 1000)) + .unwrap(); + } + + for i in 0..50 { + l.get_or_insert(2, &make_int_key(i), &make_value(i + 1000)) + .unwrap(); + } + + let mut it = l.iter(4); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_iter_latest() { + run(|| iter_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_iter_latest_unify() { + run(|| iter_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_iter_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_iter_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + iter_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_iter_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + iter_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn range_latest(l: SkipMap) { + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(MIN_VERSION, &make_int_key(i), &make_value(i)) + .unwrap(); + } + + for i in 50..N { + l.get_or_insert(1, &make_int_key(i), &make_value(i + 1000)) + .unwrap(); + } + + for i in 0..50 { + l.get_or_insert(2, &make_int_key(i), &make_value(i + 1000)) + .unwrap(); + } + + let mut it = l.range::<[u8], _>(4, ..); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i)); + assert_eq!(ent.value(), make_value(i + 1000)); + + num += 1; + } + assert_eq!(num, N); +} + +#[test] +fn test_range_latest() { + run(|| range_latest(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_range_latest_unify() { + run(|| range_latest(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_range_latest_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_range_latest_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + range_latest(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_range_latest_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + range_latest(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map_mut(&p, Options::new(), open_options, map_options).unwrap(); + for i in 0..1000 { + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i)) + .unwrap(); + } + l.flush().unwrap(); + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map(&p, Options::new(), open_options, map_options).unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), 0); + assert_eq!(ent.key(), k); + } + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap_with_reserved() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap_with_reserved"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map_mut( + &p, + Options::new().with_reserved(5), + open_options, + map_options, + ) + .unwrap(); + for i in 0..1000 { + l.get_or_insert(MIN_VERSION, &key(i), &new_value(i)) + .unwrap(); + } + l.flush().unwrap(); + let slice = l.reserved_slice_mut(); + assert_eq!(slice.len(), 5); + for i in 0..5 { + slice[i] = i as u8; + } + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = SkipMap::map( + &p, + Options::new().with_reserved(5), + open_options, + map_options, + ) + .unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), 0); + assert_eq!(ent.key(), k); + } + + let slice = l.reserved_slice(); + assert_eq!(slice.len(), 5); + for i in 0..5 { + assert_eq!(slice[i], i as u8); + } + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_reopen_mmap2() { + run(|| unsafe { + use rand::seq::SliceRandom; + + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("reopen_skipmap2"); + { + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_mut_with_comparator(&p, Options::new(), open_options, map_options, Ascend) + .unwrap(); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in &data { + let i = *i; + l.get_or_insert(i as u64, &key(i), &new_value(i)).unwrap(); + } + l.flush_async().unwrap(); + assert_eq!(l.max_version(), 999); + assert_eq!(l.min_version(), 0); + + for i in data { + let k = key(i); + let ent = l.get(i as u64, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), i as u64); + assert_eq!(ent.key(), k); + } + } + + let open_options = OpenOptions::default().read(true); + let map_options = MmapOptions::default(); + let l = + SkipMap::map_with_comparator(&p, Options::new(), open_options, map_options, Ascend).unwrap(); + assert_eq!(1000, l.len()); + let mut data = (0..1000).collect::>(); + data.shuffle(&mut rand::thread_rng()); + for i in data { + let k = key(i); + let ent = l.get(i as u64, &k).unwrap(); + assert_eq!(new_value(i), ent.value()); + assert_eq!(ent.version(), i as u64); + assert_eq!(ent.key(), k); + } + assert_eq!(l.max_version(), 999); + assert_eq!(l.min_version(), 0); + }) +} + +struct Person { + id: u32, + name: std::string::String, +} + +impl Person { + fn encoded_size(&self) -> usize { + 4 + self.name.len() + } +} + +fn get_or_insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_value_builder::<()>(1, b"alice", vb) + .unwrap(); +} + +#[test] +fn test_get_or_insert_with_value() { + run(|| { + get_or_insert_with_value(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_get_or_insert_with_value_unify() { + run(|| { + get_or_insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with_value( + SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap(), + ); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn get_or_insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.get_or_insert_with_builders::<(), ()>(1, kb, vb).unwrap(); +} + +#[test] +fn test_get_or_insert_with() { + run(|| get_or_insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_get_or_insert_with_unify() { + run(|| get_or_insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_get_or_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_get_or_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + get_or_insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_get_or_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + get_or_insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_in(l: SkipMap) { + let k = 0u64.to_le_bytes(); + for i in 0..100 { + let v = new_value(i); + let old = l.insert(MIN_VERSION, &k, &v).unwrap(); + if let Some(old) = old { + assert_eq!(old.key(), k); + assert_eq!(old.value(), new_value(i - 1)); + } + } + + let ent = l.get(MIN_VERSION, &k).unwrap(); + assert_eq!(ent.key(), k); + assert_eq!(ent.value(), new_value(99)); +} + +#[test] +fn test_insert_in() { + run(|| { + insert_in(SkipList::new(TEST_OPTIONS).unwrap()); + }) +} + +#[test] +fn test_insert_in_unify() { + run(|| { + insert_in(SkipList::new(UNIFY_TEST_OPTIONS).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_in_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_in_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_in(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_in_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_in(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with_value(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_value_builder::<()>(1, b"alice", vb).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_slice(&alice2.id.to_le_bytes()).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + let old = l + .insert_with_value_builder::<()>(1, b"alice", vb) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(1, b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with_value() { + run(|| insert_with_value(SkipList::new(TEST_OPTIONS).unwrap())); +} + +#[test] +fn test_insert_with_value_unify() { + run(|| insert_with_value(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())); +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_value_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir + .path() + .join("test_skipmap_get_or_insert_with_value_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with_value(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_value_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with_value(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn insert_with(l: SkipMap) { + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size() as u32; + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer| { + key.put_slice(b"alice").unwrap(); + Ok(()) + }); + + let vb = ValueBuilder::new(encoded_size as u32, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + + l.insert_with_builders::<(), ()>(1, kb, vb).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer| { + assert_eq!(val.capacity(), encoded_size as usize); + assert!(val.is_empty()); + val.put_slice(&alice2.id.to_le_bytes()).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size as usize - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size as usize); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "buffer does not have enough space (remaining 0, want 1)" + ); + Ok(()) + }); + let old = l + .insert_with_builders::<(), ()>(1, kb, vb) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(1, b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +#[test] +fn test_insert_with() { + run(|| insert_with(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_insert_with_unify() { + run(|| insert_with(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_insert_with_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_insert_with_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + insert_with(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_insert_with_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + insert_with(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(MIN_VERSION, &key(i), &v).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // no race, remove should succeed + let old = l.remove(MIN_VERSION, &k).unwrap(); + assert!(old.is_none()); + + // key already removed + let old = l.remove(MIN_VERSION, &k).unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove() { + run(|| remove(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove_unify() { + run(|| remove(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} + +fn remove2(l: SkipMap) { + for i in 0..100 { + let v = new_value(i); + l.insert(MIN_VERSION, &key(i), &v).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // not found, remove should succeed + let old = l.remove(1, &k).unwrap(); + assert!(old.is_none()); + + // no-race, remove should succeed + let old = l.remove(MIN_VERSION, &k).unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(MIN_VERSION, &k); + assert!(ent.is_none()); + } +} + +#[test] +fn test_remove2() { + run(|| remove2(SkipList::new(TEST_OPTIONS).unwrap())) +} + +#[test] +fn test_remove2_unify() { + run(|| remove2(SkipList::new(UNIFY_TEST_OPTIONS).unwrap())) +} + +#[test] +#[cfg(feature = "memmap")] +#[cfg_attr(miri, ignore)] +fn test_remove2_map_mut() { + run(|| unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join("test_skipmap_remove2_map_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u32)) + .read(true) + .write(true); + let map_options = MmapOptions::default(); + remove2(SkipList::map_mut(p, Options::new(), open_options, map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon() { + run(|| unsafe { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(Options::new(), map_options).unwrap()); + }) +} + +#[test] +#[cfg(feature = "memmap")] +fn test_remove2_map_anon_unify() { + run(|| { + let map_options = MmapOptions::default().len(ARENA_SIZE as u32); + remove2(SkipList::map_anon(UNIFY_TEST_OPTIONS, map_options).unwrap()); + }) +} diff --git a/src/unsync/trailed.rs b/src/unsync/trailed.rs new file mode 100644 index 0000000..f29c7db --- /dev/null +++ b/src/unsync/trailed.rs @@ -0,0 +1,1244 @@ +use core::{borrow::Borrow, marker::PhantomData}; + +use super::*; + +use among::Among; +use base::{EntryRef, Iter}; + +type Allocator = GenericAllocator, Arena>; +type SkipList = base::SkipList, C>; + +node_pointer!(TrailedNode); + +/// A node that supports both trailer. +#[repr(C)] +pub struct TrailedNode { + // A byte slice is 24 bytes. We are trying to save space here. + /// Multiple parts of the value are encoded as a single u64 so that it + /// can be atomically loaded and stored: + /// value offset: u32 (bits 0-31) + /// value size : u32 (bits 32-63) + value: UnsyncValuePointer, + // Immutable. No need to lock to access key. + key_offset: u32, + // Immutable. No need to lock to access key. + key_size_and_height: u32, + trailer: PhantomData, + // ** DO NOT REMOVE BELOW COMMENT** + // The below field will be attached after the node, have to comment out + // this field, because each node will not use the full height, the code will + // not allocate the full size of the tower. + // + // Most nodes do not need to use the full height of the tower, since the + // probability of each successive level decreases exponentially. Because + // these elements are never accessed, they do not need to be allocated. + // Therefore, when a node is allocated in the arena, its memory footprint + // is deliberately truncated to not include unneeded tower elements. + // + // All accesses to elements should use CAS operations, with no need to lock. + // pub(super) tower: [Link; self.opts.max_height], +} + +impl core::fmt::Debug for TrailedNode { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let (key_size, height) = decode_key_size_and_height(self.key_size_and_height); + let (value_offset, value_size) = self.value.load(); + f.debug_struct("Node") + .field("value_offset", &value_offset) + .field("value_size", &value_size) + .field("key_offset", &self.key_offset) + .field("key_size", &key_size) + .field("height", &height) + .finish() + } +} + +impl WithTrailer for TrailedNode {} + +impl Node for TrailedNode { + type Link = Link; + + type Trailer = T; + + type ValuePointer = UnsyncValuePointer; + + type Pointer = NodePointer; + + fn full(value_offset: u32, max_height: u8) -> Self { + Self { + value: UnsyncValuePointer::new(value_offset, 0), + key_offset: 0, + key_size_and_height: encode_key_size_and_height(0, max_height), + trailer: PhantomData, + } + } + + #[inline] + fn value_pointer(&self) -> &Self::ValuePointer { + &self.value + } + + #[inline] + fn set_value_pointer(&mut self, offset: u32, size: u32) { + self.value = UnsyncValuePointer::new(offset, size); + } + + #[inline] + fn clear_value( + &self, + arena: &A, + success: Ordering, + failure: Ordering, + ) -> Result<(), (u32, u32)> { + self + .value + .compare_remove(success, failure) + .map(|(_, old_len)| { + if old_len != REMOVE { + arena.increase_discarded(old_len); + } + }) + } + + #[inline] + fn set_key_size_and_height(&mut self, key_size_and_height: u32) { + self.key_size_and_height = key_size_and_height; + } + + #[inline] + fn set_key_offset(&mut self, key_offset: u32) { + self.key_offset = key_offset; + } + + #[inline] + fn version(&self) -> Version { + 0 + } + + #[inline] + fn set_version(&mut self, _: Version) {} + + #[inline] + fn key_size_and_height(&self) -> u32 { + self.key_size_and_height + } + + #[inline] + fn key_offset(&self) -> u32 { + self.key_offset + } +} + +/// A fast, ARENA based `SkipMap` that supports trailed structure, forward and backward iteration. +/// +/// If you want to use in concurrent environment, you can use [`sync::trailed::SkipMap`]. +#[repr(transparent)] +pub struct SkipMap(SkipList); + +impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl SkipMap { + /// Create a new skipmap with default options. + /// + /// **Note:** The capacity stands for how many memory allocated, + /// it does not mean the skiplist can store `cap` entries. + /// + /// + /// + /// **What the difference between this method and [`SkipMap::mmap_anon`]?** + /// + /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// 2. Where as [`SkipMap::mmap_anon`] will use mmap anonymous to require memory from the OS. + /// If you require very large contiguous memory regions, `mmap` might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// [`SkipMap::mmap_anon`]: #method.mmap_anon + pub fn new(opts: Options) -> Result { + Self::with_comparator(opts, Ascend) + } + + /// Create a new memory map file backed with default options. + /// + /// **Note:** The capacity stands for how many memory mmaped, + /// it does not mean the skipmap can store `cap` entries. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map_mut>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_mut_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Open an exist file and mmap it to create skipmap. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Create a new memory map backed skipmap with default options. + /// + /// **What the difference between this method and [`SkipMap::new`]?** + /// + /// 1. This method will use mmap anonymous to require memory from the OS directly. + /// If you require very large contiguous memory regions, this method might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// 2. Where as [`SkipMap::new`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + /// + /// [`SkipMap::new`]: #method.new + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn map_anon(opts: Options, mmap_options: MmapOptions) -> std::io::Result { + Self::map_anon_with_comparator(opts, mmap_options, Ascend) + } +} + +impl SkipMap { + /// Returns the reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + #[inline] + pub fn reserved_slice(&self) -> &[u8] { + self.0.arena.reserved_slice() + } + + /// Returns the mutable reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + /// + /// # Safety + /// - The caller need to make sure there is no data-race + /// + /// # Panics + /// - If in read-only mode, it will panic. + #[inline] + #[allow(clippy::mut_from_ref)] + pub unsafe fn reserved_slice_mut(&self) -> &mut [u8] { + self.0.arena.reserved_slice_mut() + } + + /// Returns the path of the mmap file, only returns `Some` when the ARENA is backed by a mmap file. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn path(&self) -> Option<&std::rc::Rc> { + self.0.arena.path() + } + + /// Sets remove on drop, only works on mmap with a file backend. + /// + /// Default is `false`. + /// + /// > **WARNING:** Once set to `true`, the backed file will be removed when the allocator is dropped, even though the file is opened in + /// > read-only mode. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn remove_on_drop(&self, val: bool) { + self.0.remove_on_drop(val); + } + + /// Returns the offset of the data section in the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the offset of the data section in the ARENA. + #[inline] + pub fn data_offset(&self) -> usize { + self.0.data_offset() + } + + /// Returns the magic version number of the [`SkipMap`]. + /// + /// This value can be used to check the compatibility for application using [`SkipMap`]. + #[inline] + pub fn magic_version(&self) -> u16 { + self.0.magic_version() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + pub fn height(&self) -> u8 { + self.0.height() + } + + /// Returns the number of remaining bytes can be allocated by the arena. + #[inline] + pub fn remaining(&self) -> usize { + self.0.remaining() + } + + /// Returns the number of bytes that have allocated from the arena. + #[inline] + pub fn allocated(&self) -> usize { + self.0.allocated() + } + + /// Returns the capacity of the arena. + #[inline] + pub fn capacity(&self) -> usize { + self.0.capacity() + } + + /// Returns the number of entries in the skipmap. + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns true if the skipmap is empty. + #[inline] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Gets the number of pointers to this `SkipMap` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). + #[inline] + pub fn refs(&self) -> usize { + self.0.refs() + } + + /// Returns how many bytes are discarded by the ARENA. + #[inline] + pub fn discarded(&self) -> u32 { + self.0.discarded() + } + + /// Returns the comparator used to compare keys. + #[inline] + pub fn comparator(&self) -> &C { + self.0.comparator() + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, Options}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + pub fn random_height(&self) -> Height { + self.0.random_height() + } + + /// Returns the estimated size of a node with the given height and key/value sizes. + /// + /// **Note**: The returned size is only an estimate and may not be accurate, which means that the actual size is less than or equal to the returned size. + #[inline] + pub fn estimated_node_size(height: Height, key_size: usize, value_size: usize) -> usize { + SkipList::::estimated_node_size(height, key_size, value_size) + } + + /// Like [`SkipMap::new`], but with a custom [`Comparator`]. + #[inline] + pub fn with_comparator(opts: Options, cmp: C) -> Result { + SkipList::with_comparator(opts, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_mut_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with [`Options`], a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::map_mut_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::map_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map_anon`], but with a custom [`Comparator`]. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn map_anon_with_comparator( + opts: Options, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_anon_with_comparator(opts, mmap_options, cmp).map(Self) + } + + /// Clear the skiplist to empty and re-initialize. + /// + /// # Safety + /// - The current pointers get from the ARENA cannot be used anymore after calling this method. + /// - This method is not thread-safe. + /// + /// # Example + /// + /// Undefine behavior: + /// + /// ```ignore + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world").unwrap(); + /// + /// let data = map.get(b"hello").unwrap(); + /// + /// map.clear().unwrap(); + /// + /// let w = data[0]; // undefined behavior + /// ``` + pub unsafe fn clear(&mut self) -> Result<(), Error> { + self.0.clear() + } + + /// Flushes outstanding memory map modifications to disk. + /// + /// When this method returns with a non-error result, + /// all outstanding changes to a file-backed memory map are guaranteed to be durably stored. + /// The file's metadata (including last modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush(&self) -> std::io::Result<()> { + self.0.flush() + } + + /// Asynchronously flushes outstanding memory map modifications to disk. + /// + /// This method initiates flushing modified pages to durable storage, but it will not wait for + /// the operation to complete before returning. The file's metadata (including last + /// modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush_async(&self) -> std::io::Result<()> { + self.0.flush_async() + } +} + +impl SkipMap { + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](SkipMap::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + pub fn insert<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .insert_at_height(MIN_VERSION, self.random_height(), key, value, trailer) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](SkipMap::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, Options}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// + /// let height = map.random_height(); + /// map.insert_at_height(height, b"hello", b"world", 10).unwrap(); + /// ``` + pub fn insert_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .insert_at_height(MIN_VERSION, height, key, value, trailer) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_value_builder::(b"alice", vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.0.insert_at_height_with_value_builder( + MIN_VERSION, + self.random_height(), + key, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height_with_value_builder`](SkipMap::get_or_insert_at_height_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_value_builder::(height, b"alice", vb, 10) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self + .0 + .insert_at_height_with_value_builder(MIN_VERSION, height, key, value_builder, trailer) + } + + /// Inserts a new key-value pair if it does not yet exist. + /// + /// Unlike [`insert`](SkipMap::insert), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + pub fn get_or_insert<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .get_or_insert_at_height(MIN_VERSION, self.random_height(), key, value, trailer) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](SkipMap::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + pub fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + value: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self + .0 + .get_or_insert_at_height(MIN_VERSION, height, key, value, trailer) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_value_builder`](SkipMap::insert_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// l.get_or_insert_with_value_builder::(b"alice", vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.0.get_or_insert_at_height_with_value_builder( + MIN_VERSION, + self.random_height(), + key, + value_builder, + trailer, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](SkipMap::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_value_builder::(height, b"alice", vb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + trailer: T, + ) -> Result>>, Either> { + self.0.get_or_insert_at_height_with_value_builder( + MIN_VERSION, + height, + key, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_builders::<(), ()>(kb, vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_builders<'a, KE, VE>( + &'a self, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.0.insert_at_height_with_builders( + MIN_VERSION, + self.random_height(), + key_builder, + value_builder, + trailer, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_builders::<(), ()>(height, kb, vb, 10) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_builders<'a, KE, VE>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self + .0 + .insert_at_height_with_builders(MIN_VERSION, height, key_builder, value_builder, trailer) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_builders`](SkipMap::insert_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.get_or_insert_with_builders::<(), ()>(kb, vb, 10) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_builders<'a, KE, VE>( + &'a self, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.0.get_or_insert_at_height_with_builders( + MIN_VERSION, + self.random_height(), + key_builder, + value_builder, + trailer, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](SkipMap::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_builders::<(), ()>(height, kb, vb, 10) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + trailer: T, + ) -> Result>>, Among> { + self.0.get_or_insert_at_height_with_builders( + MIN_VERSION, + height, + key_builder, + value_builder, + trailer, + ) + } + + /// Removes the key-value pair if it exists. + #[inline] + pub fn remove<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self.0.compare_remove_at_height( + MIN_VERSION, + self.random_height(), + key, + trailer, + Ordering::Relaxed, + Ordering::Relaxed, + ) + } + + /// Removes the key-value pair if it exists. + pub fn remove_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + trailer: T, + ) -> Result>>, Error> { + self.0.compare_remove_at_height( + MIN_VERSION, + height, + key, + trailer, + Ordering::Relaxed, + Ordering::Relaxed, + ) + } +} + +impl SkipMap { + /// Returns `true` if the key exists in the map. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, Options}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world", 10).unwrap(); + /// + /// map.remove(b"hello", 10).unwrap(); + /// + /// assert!(!map.contains_key(b"hello")); + /// ``` + #[inline] + pub fn contains_key<'a, 'b: 'a>(&'a self, key: &'b [u8]) -> bool { + self.0.contains_key(MIN_VERSION, key) + } + + /// Returns the first entry in the map. + pub fn first(&self) -> Option>> { + self.0.first(MIN_VERSION) + } + + /// Returns the last entry in the map. + pub fn last(&self) -> Option>> { + self.0.last(MIN_VERSION) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::trailed::SkipMap, Options}; + /// + /// let map = SkipMap::::new(Options::new()).unwrap(); + /// + /// map.insert(b"hello", b"world", 10).unwrap(); + /// + /// let ent = map.get(b"hello").unwrap(); + /// assert_eq!(ent.value(), b"world"); + /// + /// map.remove(b"hello", 10).unwrap(); + /// + /// assert!(map.get(b"hello").is_none()); + /// ``` + pub fn get<'a, 'b: 'a>(&'a self, key: &'b [u8]) -> Option>> { + self.0.get(MIN_VERSION, key) + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + pub fn upper_bound<'a, 'b: 'a>( + &'a self, + upper: Bound<&'b [u8]>, + ) -> Option>> { + self.0.upper_bound(MIN_VERSION, upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + pub fn lower_bound<'a, 'b: 'a>( + &'a self, + lower: Bound<&'b [u8]>, + ) -> Option>> { + self.0.lower_bound(MIN_VERSION, lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + pub fn iter(&self) -> Iter, C> { + self.0.iter(MIN_VERSION) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + pub fn range<'a, Q, R>(&'a self, range: R) -> Iter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range(MIN_VERSION, range) + } +} diff --git a/src/unsync/versioned.rs b/src/unsync/versioned.rs new file mode 100644 index 0000000..d68566a --- /dev/null +++ b/src/unsync/versioned.rs @@ -0,0 +1,1327 @@ +use core::borrow::Borrow; + +use super::*; + +use among::Among; +use base::{AllVersionsIter, EntryRef, Iter, VersionedEntryRef}; + +type Allocator = GenericAllocator; +type SkipList = base::SkipList; + +node_pointer!(VersionedNode); + +/// A node that supports version. +#[repr(C)] +pub struct VersionedNode { + // A byte slice is 24 bytes. We are trying to save space here. + /// Multiple parts of the value are encoded as a single u64 so that it + /// can be atomically loaded and stored: + /// value offset: u32 (bits 0-31) + /// value size : u32 (bits 32-63) + value: UnsyncValuePointer, + // Immutable. No need to lock to access key. + key_offset: u32, + // Immutable. No need to lock to access key. + key_size_and_height: u32, + version: u64, + // ** DO NOT REMOVE BELOW COMMENT** + // The below field will be attached after the node, have to comment out + // this field, because each node will not use the full height, the code will + // not allocate the full size of the tower. + // + // Most nodes do not need to use the full height of the tower, since the + // probability of each successive level decreases exponentially. Because + // these elements are never accessed, they do not need to be allocated. + // Therefore, when a node is allocated in the arena, its memory footprint + // is deliberately truncated to not include unneeded tower elements. + // + // All accesses to elements should use CAS operations, with no need to lock. + // pub(super) tower: [Link; self.opts.max_height], +} + +impl core::fmt::Debug for VersionedNode { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let (key_size, height) = decode_key_size_and_height(self.key_size_and_height); + let (value_offset, value_size) = self.value.load(); + f.debug_struct("Node") + .field("value_offset", &value_offset) + .field("value_size", &value_size) + .field("key_offset", &self.key_offset) + .field("key_size", &key_size) + .field("height", &height) + .finish() + } +} + +impl WithVersion for VersionedNode {} + +impl Node for VersionedNode { + type Link = Link; + + type Trailer = (); + + type ValuePointer = UnsyncValuePointer; + + type Pointer = NodePointer; + + fn full(value_offset: u32, max_height: u8) -> Self { + Self { + value: UnsyncValuePointer::new(value_offset, 0), + key_offset: 0, + key_size_and_height: encode_key_size_and_height(0, max_height), + version: MIN_VERSION, + } + } + + #[inline] + fn value_pointer(&self) -> &Self::ValuePointer { + &self.value + } + + #[inline] + fn set_value_pointer(&mut self, offset: u32, size: u32) { + self.value = UnsyncValuePointer::new(offset, size); + } + + #[inline] + fn clear_value( + &self, + arena: &A, + success: Ordering, + failure: Ordering, + ) -> Result<(), (u32, u32)> { + self + .value + .compare_remove(success, failure) + .map(|(_, old_len)| { + if old_len != REMOVE { + arena.increase_discarded(old_len); + } + }) + } + + #[inline] + fn set_key_size_and_height(&mut self, key_size_and_height: u32) { + self.key_size_and_height = key_size_and_height; + } + + #[inline] + fn set_key_offset(&mut self, key_offset: u32) { + self.key_offset = key_offset; + } + + #[inline] + fn version(&self) -> Version { + self.version + } + + #[inline] + fn set_version(&mut self, version: Version) { + self.version = version; + } + + #[inline] + fn key_size_and_height(&self) -> u32 { + self.key_size_and_height + } + + #[inline] + fn key_offset(&self) -> u32 { + self.key_offset + } +} + +/// A fast, ARENA based `SkipMap` that supports multiple versions, forward and backward iteration. +/// +/// If you want to use in concurrent environment, you can use [`sync::versioned::SkipMap`]. +#[repr(transparent)] +pub struct SkipMap(SkipList); + +impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl SkipMap { + /// Create a new skipmap with default options. + /// + /// **Note:** The capacity stands for how many memory allocated, + /// it does not mean the skiplist can store `cap` entries. + /// + /// + /// + /// **What the difference between this method and [`SkipMap::mmap_anon`]?** + /// + /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// 2. Where as [`SkipMap::mmap_anon`] will use mmap anonymous to require memory from the OS. + /// If you require very large contiguous memory regions, `mmap` might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// [`SkipMap::mmap_anon`]: #method.mmap_anon + pub fn new(opts: Options) -> Result { + Self::with_comparator(opts, Ascend) + } + + /// Create a new memory map file backed with default options. + /// + /// **Note:** The capacity stands for how many memory mmaped, + /// it does not mean the skipmap can store `cap` entries. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map_mut>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_mut_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Open an exist file and mmap it to create skipmap. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::map_with_comparator(path, opts, open_options, mmap_options, Ascend) + } + + /// Create a new memory map backed skipmap with default options. + /// + /// **What the difference between this method and [`SkipMap::new`]?** + /// + /// 1. This method will use mmap anonymous to require memory from the OS directly. + /// If you require very large contiguous memory regions, this method might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// 2. Where as [`SkipMap::new`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// [`SkipMap::new`]: #method.new + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn map_anon(opts: Options, mmap_options: MmapOptions) -> std::io::Result { + Self::map_anon_with_comparator(opts, mmap_options, Ascend) + } +} + +impl SkipMap { + /// Returns the reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + #[inline] + pub fn reserved_slice(&self) -> &[u8] { + self.0.arena.reserved_slice() + } + + /// Returns the mutable reserved bytes of the allocator specified in the [`ArenaOptions::with_reserved`]. + /// + /// # Safety + /// - The caller need to make sure there is no data-race + /// + /// # Panics + /// - If in read-only mode, it will panic. + #[inline] + #[allow(clippy::mut_from_ref)] + pub unsafe fn reserved_slice_mut(&self) -> &mut [u8] { + self.0.arena.reserved_slice_mut() + } + + /// Returns the path of the mmap file, only returns `Some` when the ARENA is backed by a mmap file. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn path(&self) -> Option<&std::rc::Rc> { + self.0.arena.path() + } + + /// Sets remove on drop, only works on mmap with a file backend. + /// + /// Default is `false`. + /// + /// > **WARNING:** Once set to `true`, the backed file will be removed when the allocator is dropped, even though the file is opened in + /// > read-only mode. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn remove_on_drop(&self, val: bool) { + self.0.remove_on_drop(val); + } + + /// Returns the offset of the data section in the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the offset of the data section in the ARENA. + #[inline] + pub const fn data_offset(&self) -> usize { + self.0.data_offset() + } + + /// Returns the version number of the [`SkipMap`]. + #[inline] + pub fn version(&self) -> u16 { + self.0.magic_version() + } + + /// Returns the magic version number of the [`SkipMap`]. + /// + /// This value can be used to check the compatibility for application using [`SkipMap`]. + #[inline] + pub fn magic_version(&self) -> u16 { + self.0.magic_version() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + pub fn height(&self) -> u8 { + self.0.height() + } + + /// Returns the number of remaining bytes can be allocated by the arena. + #[inline] + pub fn remaining(&self) -> usize { + self.0.remaining() + } + + /// Returns the number of bytes that have allocated from the arena. + #[inline] + pub fn allocated(&self) -> usize { + self.0.allocated() + } + + /// Returns the capacity of the arena. + #[inline] + pub fn capacity(&self) -> usize { + self.0.capacity() + } + + /// Returns the number of entries in the skipmap. + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns true if the skipmap is empty. + #[inline] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Gets the number of pointers to this `SkipMap` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). + #[inline] + pub fn refs(&self) -> usize { + self.0.refs() + } + + /// Returns how many bytes are discarded by the ARENA. + #[inline] + pub fn discarded(&self) -> u32 { + self.0.discarded() + } + + /// Returns the maximum version of all entries in the map. + #[inline] + pub fn max_version(&self) -> u64 { + self.0.max_version() + } + + /// Returns the minimum version of all entries in the map. + #[inline] + pub fn min_version(&self) -> u64 { + self.0.min_version() + } + + /// Returns the comparator used to compare keys. + #[inline] + pub const fn comparator(&self) -> &C { + self.0.comparator() + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, Ascend, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + pub fn random_height(&self) -> Height { + self.0.random_height() + } + + /// Returns the estimated size of a node with the given height and key/value sizes. + /// + /// **Note**: The returned size is only an estimate and may not be accurate, which means that the actual size is less than or equal to the returned size. + #[inline] + pub fn estimated_node_size(height: Height, key_size: usize, value_size: usize) -> usize { + SkipList::::estimated_node_size(height, key_size, value_size) + } + + /// Like [`SkipMap::new`], but with a custom [`Comparator`]. + #[inline] + pub fn with_comparator(opts: Options, cmp: C) -> Result { + SkipList::with_comparator(opts, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_mut_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map_mut`], but with [`Options`], a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::map_mut_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`]. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator>( + path: P, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_with_comparator(path, opts, open_options, mmap_options, cmp).map(Self) + } + + /// Like [`SkipMap::map`], but with a custom [`Comparator`] and a [`PathBuf`](std::path::PathBuf) builder. + /// + /// # Safety + /// - If trying to reopens a skiplist, then the trailer type must be the same as the previous one + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_comparator_and_path_builder( + path_builder: PB, + opts: Options, + open_options: OpenOptions, + mmap_options: MmapOptions, + cmp: C, + ) -> Result> + where + PB: FnOnce() -> Result, + { + SkipList::map_with_comparator_and_path_builder( + path_builder, + opts, + open_options, + mmap_options, + cmp, + ) + .map(Self) + } + + /// Like [`SkipMap::map_anon`], but with a custom [`Comparator`]. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn map_anon_with_comparator( + opts: Options, + mmap_options: MmapOptions, + cmp: C, + ) -> std::io::Result { + SkipList::map_anon_with_comparator(opts, mmap_options, cmp).map(Self) + } + + /// Clear the skiplist to empty and re-initialize. + /// + /// # Safety + /// - The current pointers get from the ARENA cannot be used anymore after calling this method. + /// - This method is not thread-safe. + /// + /// # Example + /// + /// Undefine behavior: + /// + /// ```ignore + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(1, b"hello", b"world").unwrap(); + /// + /// let data = map.get(b"hello").unwrap(); + /// + /// map.clear().unwrap(); + /// + /// let w = data[0]; // undefined behavior + /// ``` + pub unsafe fn clear(&mut self) -> Result<(), Error> { + self.0.clear() + } + + /// Flushes outstanding memory map modifications to disk. + /// + /// When this method returns with a non-error result, + /// all outstanding changes to a file-backed memory map are guaranteed to be durably stored. + /// The file's metadata (including last modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush(&self) -> std::io::Result<()> { + self.0.flush() + } + + /// Asynchronously flushes outstanding memory map modifications to disk. + /// + /// This method initiates flushing modified pages to durable storage, but it will not wait for + /// the operation to complete before returning. The file's metadata (including last + /// modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush_async(&self) -> std::io::Result<()> { + self.0.flush_async() + } +} + +impl SkipMap { + /// Returns `true` if the key exists in the map. + /// + /// This method will return `false` if the entry is marked as removed. If you want to check if the key exists even if it is marked as removed, + /// you can use [`contains_key_versioned`](SkipMap::contains_key_versioned). + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// map.remove(1, b"hello").unwrap(); + /// + /// assert!(!map.contains_key(1, b"hello")); + /// assert!(map.contains_key_versioned(1, b"hello")); + /// ``` + #[inline] + pub fn contains_key<'a, 'b: 'a>(&'a self, version: Version, key: &'b [u8]) -> bool { + self.0.contains_key(version, key) + } + + /// Returns `true` if the key exists in the map, even if it is marked as removed. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// map.remove(1, b"hello").unwrap(); + /// + /// assert!(!map.contains_key(1, b"hello")); + /// assert!(map.contains_key_versioned(1, b"hello")); + /// ``` + #[inline] + pub fn contains_key_versioned<'a, 'b: 'a>(&'a self, version: Version, key: &'b [u8]) -> bool { + self.0.contains_key_versioned(version, key) + } + + /// Returns the first entry in the map. + pub fn first(&self, version: Version) -> Option> { + self.0.first(version) + } + + /// Returns the last entry in the map. + pub fn last(&self, version: Version) -> Option> { + self.0.last(version) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// This method will return `None` if the entry is marked as removed. If you want to get the entry even if it is marked as removed, + /// you can use [`get_versioned`](SkipMap::get_versioned). + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// let ent = map.get(0, b"hello").unwrap(); + /// assert_eq!(ent.value(), b"world"); + /// + /// map.remove(1, b"hello").unwrap(); + /// + /// assert!(map.get(1, b"hello").is_none()); + /// ``` + pub fn get<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Option> { + self.0.get(version, key) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// The difference between `get` and `get_versioned` is that `get_versioned` will return the value even if the entry is removed. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// map.remove(1, b"hello").unwrap(); + /// + /// assert!(map.get(1, b"hello").is_none()); + /// + /// let ent = map.get_versioned(1, b"hello").unwrap(); + /// // value is None because the entry is marked as removed. + /// assert!(ent.value().is_none()); + /// ``` + pub fn get_versioned<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Option> { + self.0.get_versioned(version, key) + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + pub fn upper_bound<'a, 'b: 'a>( + &'a self, + version: Version, + upper: Bound<&'b [u8]>, + ) -> Option> { + self.0.upper_bound(version, upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + pub fn lower_bound<'a, 'b: 'a>( + &'a self, + version: Version, + lower: Bound<&'b [u8]>, + ) -> Option> { + self.0.lower_bound(version, lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + pub fn iter(&self, version: Version) -> Iter { + self.0.iter(version) + } + + /// Returns a new iterator, this iterator will yield all versions for all entries in the map less or equal to the given version. + #[inline] + pub fn iter_all_versions(&self, version: Version) -> AllVersionsIter { + self.0.iter_all_versions(version) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + pub fn range<'a, Q, R>(&'a self, version: Version, range: R) -> Iter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range(version, range) + } + + /// Returns a iterator that within the range, this iterator will yield all versions for all entries in the range less or equal to the given version. + #[inline] + pub fn range_all_versions<'a, Q, R>( + &'a self, + version: Version, + range: R, + ) -> AllVersionsIter<'a, Allocator, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds + 'a, + { + self.0.range_all_versions(version, range) + } +} + +impl SkipMap { + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](SkipMap::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + pub fn insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.0.insert(version, key, value, ()) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](SkipMap::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, Options}; + /// + /// let map = SkipMap::new(Options::new()).unwrap(); + /// + /// let height = map.random_height(); + /// map.insert_at_height(0, height, b"hello", b"world").unwrap(); + /// ``` + #[inline] + pub fn insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.0.insert_at_height(version, height, key, value, ()) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_value_builder::(1, b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self.0.insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + (), + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](SkipMap::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_value_builder::(1, height, b"alice", vb) + /// .unwrap(); + /// ``` + pub fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .insert_at_height_with_value_builder(version, height, key, value_builder, ()) + } + + /// Inserts a new key-value pair if it does not yet exist. + /// + /// Unlike [`insert`](SkipMap::insert), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + pub fn get_or_insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .0 + .get_or_insert_at_height(version, self.random_height(), key, value, ()) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](SkipMap::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + pub fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .0 + .get_or_insert_at_height(version, height, key, value, ()) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_value_builder`](SkipMap::insert_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// l.get_or_insert_with_value_builder::(1, b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self.get_or_insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](SkipMap::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_value_builder::(1, height, b"alice", vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result<(), E>>, + ) -> Result>, Either> { + self + .0 + .get_or_insert_at_height_with_value_builder(version, height, key, value_builder, ()) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipMap::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.insert_with_builders::<(), ()>(1, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self.0.insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + (), + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders_and_trailer`](SkipMap::get_or_insert_with_builders_and_trailer), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_builders::<(), ()>(1, height, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self + .0 + .insert_at_height_with_builders(version, height, key_builder, value_builder, ()) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_builders`](SkipMap::insert_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// l.get_or_insert_with_builders::<(), ()>(1, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + pub fn get_or_insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self.0.get_or_insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + (), + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](SkipMap::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// # Example + /// + /// ```rust + /// use skl::{unsync::versioned::SkipMap, KeyBuilder, ValueBuilder, Options}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = SkipMap::new(Options::new()).unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |mut key| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(()) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size as u32, |mut val| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(()) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_builders::<(), ()>(1, height, kb, vb) + /// .unwrap(); + /// ``` + pub fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result<(), KE>>, + value_builder: ValueBuilder) -> Result<(), VE>>, + ) -> Result>, Among> { + self + .0 + .get_or_insert_at_height_with_builders(version, height, key_builder, value_builder, ()) + } + + /// Removes the key-value pair if it exists. + #[inline] + pub fn remove<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Result>, Error> { + self.remove_at_height(version, self.random_height(), key) + } + + /// Removes the key-value pair if it exists. + pub fn remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + ) -> Result>, Error> { + self.0.compare_remove_at_height( + version, + height, + key, + (), + Ordering::Relaxed, + Ordering::Relaxed, + ) + } +}