made FilePointer generic
This commit is contained in:
parent
05f61e177e
commit
b6003fab95
@ -7,7 +7,7 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
memmap = "0.7.0"
|
||||
zerocopy = "0.6.1"
|
||||
zerocopy = "0.7.0-alpha.5"
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.8.5"
|
||||
|
263
src/allocator.rs
263
src/allocator.rs
@ -1,8 +1,8 @@
|
||||
use std::mem::size_of;
|
||||
|
||||
use zerocopy::{AsBytes, FromBytes, Unaligned};
|
||||
use zerocopy::{AsBytes, FromBytes, FromZeroes, Unaligned};
|
||||
|
||||
use crate::{Db, FilePointer, FileRange, PagePointer, PAGE_SIZE, U16, U32, U64};
|
||||
use crate::{Db, FilePointer, FileRange, PagePointer, RawFilePointer, PAGE_SIZE, U16, U32, U64};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
enum SlabKind {
|
||||
@ -15,7 +15,7 @@ impl SlabKind {
|
||||
fn for_size(size: u32) -> Self {
|
||||
if size == 1 {
|
||||
Self::SingleBytes
|
||||
} else if size < size_of::<FilePointer>() as u32 {
|
||||
} else if size < size_of::<RawFilePointer>() as u32 {
|
||||
Self::RelativeFreeList
|
||||
} else if (size as u64) <= PAGE_SIZE / 2 {
|
||||
// TODO
|
||||
@ -28,84 +28,87 @@ impl SlabKind {
|
||||
}
|
||||
}
|
||||
|
||||
// Best bitmap sizes in bytes for a 4096 Byte slab
|
||||
// const BITMAP_SIZE: [u32; 8] = [456, 241, 164, 125, 100, 84, 72, 63];
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned)]
|
||||
#[repr(transparent)]
|
||||
pub struct FreeList {
|
||||
head: FilePointer,
|
||||
}
|
||||
|
||||
impl FreeList {
|
||||
pub fn empty() -> Self {
|
||||
Self {
|
||||
head: FilePointer::null(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned)]
|
||||
#[repr(C)]
|
||||
pub struct AllocatorState {
|
||||
pub general: FilePointer,
|
||||
pub general: RawFilePointer,
|
||||
pub slabs: SlabListPointer,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned)]
|
||||
#[repr(transparent)]
|
||||
pub struct GeneralPurposeAllocator {
|
||||
pub head_ptr: FilePointer,
|
||||
pub head_ptr: FilePointer<FreeListBlock>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned)]
|
||||
#[repr(C)]
|
||||
struct FreeListBlock {
|
||||
next: FilePointer<FreeListBlock>,
|
||||
size: u8,
|
||||
}
|
||||
|
||||
impl GeneralPurposeAllocator {
|
||||
const SIZE_MASK: u8 = 0b1000_0000;
|
||||
const MIN_ALLOCATION_SIZE: u64 = size_of::<FilePointer>() as u64 + 1;
|
||||
const MIN_ALLOCATION_SIZE: u64 = size_of::<RawFilePointer>() as u64 + 1;
|
||||
|
||||
fn size_ptr(ptr: FilePointer) -> FilePointer {
|
||||
ptr + size_of::<FilePointer>() as u64
|
||||
pub(crate) fn next_ptr(
|
||||
ptr: FilePointer<FreeListBlock>,
|
||||
) -> FilePointer<FilePointer<FreeListBlock>> {
|
||||
FilePointer::new(ptr.into_raw())
|
||||
}
|
||||
|
||||
pub fn size(db: &Db, head: FilePointer) -> u64 {
|
||||
let first_byte: u8 = unsafe { db.read(Self::size_ptr(head)) };
|
||||
fn first_byte_ptr(ptr: FilePointer<FreeListBlock>) -> FilePointer<u8> {
|
||||
FilePointer::new(ptr.into_raw() + size_of::<RawFilePointer>() as u64)
|
||||
}
|
||||
|
||||
fn size_ptr(ptr: FilePointer<FreeListBlock>) -> FilePointer<U64> {
|
||||
FilePointer::new(ptr.into_raw() + size_of::<FreeListBlock>() as u64)
|
||||
}
|
||||
|
||||
pub fn size<R>(db: &Db<R>, head: FilePointer<FreeListBlock>) -> u64 {
|
||||
let first_byte: u8 = unsafe { db.read(Self::first_byte_ptr(head)) };
|
||||
|
||||
let size = if first_byte & Self::SIZE_MASK == 0 {
|
||||
// small size (can fit in 7bits)
|
||||
first_byte as u64
|
||||
} else {
|
||||
// large size
|
||||
unsafe { db.read::<U64>(Self::size_ptr(head) + 1) }.get()
|
||||
unsafe { db.read::<U64>(Self::size_ptr(head)) }.get()
|
||||
};
|
||||
|
||||
Self::MIN_ALLOCATION_SIZE + size
|
||||
}
|
||||
|
||||
fn set_size(db: &mut Db, head: FilePointer, size: u64) {
|
||||
fn set_size<R>(db: &mut Db<R>, head: FilePointer<FreeListBlock>, size: u64) {
|
||||
assert!(size >= Self::MIN_ALLOCATION_SIZE);
|
||||
let size = size - Self::MIN_ALLOCATION_SIZE;
|
||||
if size <= (u8::MAX & !Self::SIZE_MASK) as u64 {
|
||||
// small size (can fit in 7bits)
|
||||
debug_assert_eq!(size as u8 & Self::SIZE_MASK, 0);
|
||||
unsafe { db.write(Self::size_ptr(head), size as u8) };
|
||||
unsafe { db.write(Self::first_byte_ptr(head), size as u8) };
|
||||
} else {
|
||||
unsafe {
|
||||
db.write(Self::size_ptr(head), Self::SIZE_MASK);
|
||||
db.write::<U64>(Self::size_ptr(head) + 1, size.into());
|
||||
db.write(Self::first_byte_ptr(head), Self::SIZE_MASK);
|
||||
db.write::<U64>(Self::size_ptr(head), size.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn clear(db: &mut Db, ptr: FilePointer) {
|
||||
fn clear<R>(db: &mut Db<R>, ptr: FilePointer<FreeListBlock>) -> RawFilePointer {
|
||||
unsafe {
|
||||
db.write(ptr, FilePointer::null());
|
||||
let first_byte: u8 = db.read(Self::size_ptr(ptr));
|
||||
db.write(Self::next_ptr(ptr), FilePointer::null());
|
||||
let first_byte: u8 = db.read(Self::first_byte_ptr(ptr));
|
||||
|
||||
if first_byte & Self::SIZE_MASK == 0 {
|
||||
db.write(Self::size_ptr(ptr), 0u8);
|
||||
} else {
|
||||
// clear first size byte
|
||||
db.write(Self::first_byte_ptr(ptr), 0u8);
|
||||
if first_byte & Self::SIZE_MASK != 0 {
|
||||
// larger block. clear full size field
|
||||
db.write(Self::size_ptr(ptr), U64::from(0));
|
||||
}
|
||||
}
|
||||
|
||||
ptr.into_raw()
|
||||
}
|
||||
|
||||
fn can_allocate_into(needed_size: u64, actual_size: u64) -> bool {
|
||||
@ -132,28 +135,23 @@ impl GeneralPurposeAllocator {
|
||||
n_pages
|
||||
}
|
||||
|
||||
pub fn allocate(self, db: &mut Db, expected_size: u64) -> FileRange {
|
||||
// println!("allocate({expected_size}) {{\x1b[33m");
|
||||
pub fn allocate<R>(self, db: &mut Db<R>, expected_size: u64) -> FileRange {
|
||||
// we need space to store the free list entry
|
||||
let needed_size = expected_size.max(Self::MIN_ALLOCATION_SIZE);
|
||||
|
||||
// dbg!(expected_size, needed_size);
|
||||
|
||||
// dbg!(needed_size);
|
||||
|
||||
let head = self.head_ptr;
|
||||
|
||||
// if the first element is replaced update the head pointer
|
||||
let mut prevprev = FilePointer::null();
|
||||
let mut prevprev = FilePointer::<FreeListBlock>::null();
|
||||
let mut prev = head;
|
||||
let mut next: FilePointer = unsafe { db.read(head) };
|
||||
let mut next: FilePointer<FreeListBlock> = unsafe { db.read(Self::next_ptr(head)) };
|
||||
|
||||
let empty_list = next.is_null();
|
||||
|
||||
while !next.is_null() && !Self::can_allocate_into(needed_size, Self::size(db, next)) {
|
||||
prevprev = prev;
|
||||
prev = next;
|
||||
next = unsafe { db.read(next) };
|
||||
next = unsafe { db.read(next) }.next;
|
||||
}
|
||||
|
||||
// dbg!(next, Self::size(db, next));
|
||||
@ -162,17 +160,17 @@ impl GeneralPurposeAllocator {
|
||||
let (prev, start, prev_free) = if !empty_list {
|
||||
let prevlen = Self::size(db, prev);
|
||||
|
||||
if prev + prevlen == db.end_of_file() {
|
||||
if prev.into_raw() + prevlen == db.end_of_file() {
|
||||
// println!("free block at end of file {prev:?}");
|
||||
|
||||
Self::clear(db, prev);
|
||||
|
||||
(prevprev, prev, prevlen)
|
||||
} else {
|
||||
(prev, db.end_of_file(), 0)
|
||||
(prev, FilePointer::new(db.end_of_file()), 0)
|
||||
}
|
||||
} else {
|
||||
(prev, db.end_of_file(), 0)
|
||||
(prev, FilePointer::new(db.end_of_file()), 0)
|
||||
};
|
||||
|
||||
// dbg!(prev, start, prev_free);
|
||||
@ -192,85 +190,82 @@ impl GeneralPurposeAllocator {
|
||||
// dbg!(n_pages, page_start);
|
||||
|
||||
if prev_free == 0 {
|
||||
assert_eq!(page_start, start);
|
||||
assert_eq!(page_start, start.into_raw());
|
||||
}
|
||||
|
||||
let free_space = prev_free + PAGE_SIZE * n_pages;
|
||||
|
||||
let extra_space = free_space - needed_size;
|
||||
if extra_space != 0 {
|
||||
let remainder = start + needed_size;
|
||||
let remainder = FilePointer::<FreeListBlock>::new(start.into_raw() + needed_size);
|
||||
Self::set_size(db, remainder, extra_space);
|
||||
// prev must be the current tail of the free list and the newly allocated space, being at the end of the file
|
||||
// must be the last element of the free list to keep it sorted.
|
||||
unsafe { db.write(prev, remainder) };
|
||||
unsafe { db.write(Self::next_ptr(prev), remainder) };
|
||||
} else {
|
||||
unsafe { db.write(prev, FilePointer::null()) };
|
||||
unsafe { db.write(Self::next_ptr(prev), FilePointer::<FreeListBlock>::null()) };
|
||||
}
|
||||
|
||||
start
|
||||
} else {
|
||||
let start = next;
|
||||
|
||||
let nextnext = unsafe { db.read::<FilePointer>(start) };
|
||||
let nextnext = unsafe { db.read(Self::next_ptr(start)) };
|
||||
|
||||
let extra_space = Self::size(db, start) - needed_size;
|
||||
|
||||
// dbg!(prev, nextnext, extra_space);
|
||||
|
||||
Self::clear(db, start);
|
||||
|
||||
if extra_space != 0 {
|
||||
let remainder = start + needed_size;
|
||||
let remainder = FilePointer::<FreeListBlock>::new(start.into_raw() + needed_size);
|
||||
|
||||
// dbg!(remainder);
|
||||
|
||||
Self::set_size(db, remainder, extra_space);
|
||||
|
||||
unsafe {
|
||||
db.write(prev, remainder);
|
||||
db.write(remainder, nextnext);
|
||||
db.write(Self::next_ptr(prev), remainder);
|
||||
db.write(Self::next_ptr(remainder), nextnext);
|
||||
}
|
||||
|
||||
// println!("{:x?}", unsafe { db.read::<[u8; 9 + 8]>(remainder) });
|
||||
} else {
|
||||
unsafe { db.write(prev, nextnext) };
|
||||
unsafe { db.write(Self::next_ptr(prev), nextnext) };
|
||||
}
|
||||
|
||||
start
|
||||
};
|
||||
|
||||
Self::clear(db, start);
|
||||
let start = Self::clear(db, start);
|
||||
|
||||
let res = start.range(expected_size);
|
||||
|
||||
// println!("\x1b[m}} -> {res:?}");
|
||||
|
||||
res
|
||||
start.range(expected_size)
|
||||
}
|
||||
|
||||
pub fn free(self, db: &mut Db, mut range: FileRange) {
|
||||
pub fn free<R>(self, db: &mut Db<R>, range: FileRange) {
|
||||
// println!("free({range:?})");
|
||||
|
||||
let mut size = range.len().max(Self::MIN_ALLOCATION_SIZE);
|
||||
let mut start = FilePointer::<FreeListBlock>::new(range.start);
|
||||
|
||||
let range = ();
|
||||
|
||||
let head = self.head_ptr;
|
||||
|
||||
let mut prevprev = FilePointer::null();
|
||||
let mut prev = head;
|
||||
let mut next: FilePointer = unsafe { db.read(head) };
|
||||
let mut next = unsafe { db.read(Self::next_ptr(head)) };
|
||||
|
||||
while !next.is_null() && next < range.start {
|
||||
while !next.is_null() && next < start {
|
||||
prevprev = prev;
|
||||
prev = next;
|
||||
next = unsafe { db.read(next) };
|
||||
next = unsafe { db.read(Self::next_ptr(next)) };
|
||||
}
|
||||
|
||||
if range.start + size == next {
|
||||
if start.into_raw() + size == next.into_raw() {
|
||||
// we can merge with the next range
|
||||
|
||||
let nextlen = Self::size(db, next);
|
||||
let nextnext = unsafe { db.read(next) };
|
||||
let nextnext = unsafe { db.read(Self::next_ptr(next)) };
|
||||
|
||||
// println!("merging with next range {:?}", next.range(nextlen));
|
||||
|
||||
@ -281,7 +276,7 @@ impl GeneralPurposeAllocator {
|
||||
}
|
||||
|
||||
// we can't merge with the head pointer
|
||||
if prev != head && prev + Self::size(db, prev) == range.start {
|
||||
if prev != head && prev.into_raw() + Self::size(db, prev) == start.into_raw() {
|
||||
// we can merge with the previous range
|
||||
|
||||
let prevlen = Self::size(db, prev);
|
||||
@ -289,15 +284,15 @@ impl GeneralPurposeAllocator {
|
||||
|
||||
Self::clear(db, prev);
|
||||
|
||||
range.start = prev;
|
||||
start = prev;
|
||||
prev = prevprev;
|
||||
size += prevlen;
|
||||
}
|
||||
|
||||
unsafe {
|
||||
db.write(prev, range.start);
|
||||
db.write(range.start, next);
|
||||
Self::set_size(db, range.start, size)
|
||||
db.write(Self::next_ptr(prev), start);
|
||||
db.write(Self::next_ptr(start), next);
|
||||
Self::set_size(db, start, size)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -306,7 +301,7 @@ fn div_round_up(a: u64, b: u64) -> u64 {
|
||||
(a + b - 1) / b
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned, Debug)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned, Debug)]
|
||||
#[repr(C)]
|
||||
pub struct SlabListHeader {
|
||||
next: SlabListPointer,
|
||||
@ -314,17 +309,17 @@ pub struct SlabListHeader {
|
||||
size: U32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned, Debug)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned, Debug)]
|
||||
#[repr(transparent)]
|
||||
pub struct SlabListPointer(pub FilePointer);
|
||||
pub struct SlabListPointer(pub FilePointer<SlabListHeader>);
|
||||
|
||||
pub struct SlabListIterator<'db> {
|
||||
pub struct SlabListIterator<'db, R> {
|
||||
position: u32,
|
||||
db: &'db Db,
|
||||
db: &'db Db<R>,
|
||||
ptr: SlabListPointer,
|
||||
}
|
||||
|
||||
impl<'db> Iterator for SlabListIterator<'db> {
|
||||
impl<'db, R> Iterator for SlabListIterator<'db, R> {
|
||||
type Item = SlabPointer;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@ -347,29 +342,29 @@ impl SlabListHeader {
|
||||
}
|
||||
|
||||
impl SlabListPointer {
|
||||
pub fn next(self, db: &Db) -> Option<SlabListPointer> {
|
||||
pub fn next<R>(self, db: &Db<R>) -> Option<SlabListPointer> {
|
||||
let ptr: SlabListPointer = self.read_header(db).next;
|
||||
|
||||
(!ptr.0.is_null()).then_some(ptr)
|
||||
}
|
||||
|
||||
fn read_header(self, db: &Db) -> SlabListHeader {
|
||||
fn read_header<R>(self, db: &Db<R>) -> SlabListHeader {
|
||||
unsafe { db.read(self.0) }
|
||||
}
|
||||
|
||||
fn modify_header(self, db: &mut Db) -> &mut SlabListHeader {
|
||||
fn modify_header<R>(self, db: &mut Db<R>) -> &mut SlabListHeader {
|
||||
unsafe { db.modify(self.0) }
|
||||
}
|
||||
|
||||
pub fn set_next(self, db: &mut Db, next: SlabListPointer) {
|
||||
pub fn set_next<R>(self, db: &mut Db<R>, next: SlabListPointer) {
|
||||
self.modify_header(db).next = next;
|
||||
}
|
||||
|
||||
pub fn set_len(self, db: &mut Db, len: u32) {
|
||||
pub fn set_len<R>(self, db: &mut Db<R>, len: u32) {
|
||||
self.modify_header(db).len = U32::from(len);
|
||||
}
|
||||
|
||||
pub fn init(self, db: &mut Db, size: u32) {
|
||||
pub fn init<R>(self, db: &mut Db<R>, size: u32) {
|
||||
*self.modify_header(db) = SlabListHeader {
|
||||
next: SlabListPointer(FilePointer::null()),
|
||||
size: size.into(),
|
||||
@ -377,23 +372,27 @@ impl SlabListPointer {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn ptr(self, db: &Db, i: u32) -> Option<FilePointer> {
|
||||
pub fn element_ptr<R>(self, db: &Db<R>, i: u32) -> Option<FilePointer<Slab>> {
|
||||
let this = self.read_header(db);
|
||||
(i < this.len.get()).then(|| {
|
||||
self.0 + size_of::<SlabListHeader>() as u64 + i as u64 * size_of::<Slab>() as u64
|
||||
FilePointer::new(
|
||||
self.0.into_raw()
|
||||
+ size_of::<SlabListHeader>() as u64
|
||||
+ i as u64 * size_of::<Slab>() as u64,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn write(self, db: &mut Db, i: u32, value: Slab) {
|
||||
let ptr = self.ptr(db, i).unwrap();
|
||||
pub fn write_element<R>(self, db: &mut Db<R>, i: u32, value: Slab) {
|
||||
let ptr = self.element_ptr(db, i).unwrap();
|
||||
unsafe { db.write(ptr, value) };
|
||||
}
|
||||
|
||||
pub fn get(self, db: &Db, i: u32) -> Option<SlabPointer> {
|
||||
self.ptr(db, i).map(SlabPointer)
|
||||
pub fn get<R>(self, db: &Db<R>, i: u32) -> Option<SlabPointer> {
|
||||
self.element_ptr(db, i).map(SlabPointer)
|
||||
}
|
||||
|
||||
pub fn iter(self, db: &Db) -> SlabListIterator {
|
||||
pub fn iter<R>(self, db: &Db<R>) -> SlabListIterator<R> {
|
||||
SlabListIterator {
|
||||
position: 0,
|
||||
db,
|
||||
@ -401,7 +400,7 @@ impl SlabListPointer {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_slab(self, db: &mut Db, slab_size: u32) -> SlabPointer {
|
||||
pub fn add_slab<R>(self, db: &mut Db<R>, slab_size: u32) -> SlabPointer {
|
||||
println!("add_slab({slab_size})");
|
||||
|
||||
let this = self.read_header(db);
|
||||
@ -413,7 +412,7 @@ impl SlabListPointer {
|
||||
|
||||
if len.get() >= capacity {
|
||||
if next.0.is_null() {
|
||||
next = SlabListPointer(db.add_pages(1).start());
|
||||
next = SlabListPointer(FilePointer::new(db.add_pages(1).start()));
|
||||
next.init(db, PAGE_SIZE as u32);
|
||||
self.set_next(db, next);
|
||||
}
|
||||
@ -423,27 +422,27 @@ impl SlabListPointer {
|
||||
|
||||
let len = len.get();
|
||||
self.set_len(db, len + 1);
|
||||
self.write(
|
||||
self.write_element(
|
||||
db,
|
||||
len,
|
||||
Slab {
|
||||
head: FilePointer::null(),
|
||||
head: RawFilePointer::null(),
|
||||
size: slab_size.into(),
|
||||
},
|
||||
);
|
||||
|
||||
SlabPointer(self.ptr(db, len).unwrap())
|
||||
SlabPointer(self.element_ptr(db, len).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned)]
|
||||
#[repr(C)]
|
||||
pub struct Slab {
|
||||
head: FilePointer,
|
||||
head: RawFilePointer,
|
||||
size: U32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned)]
|
||||
#[repr(C)]
|
||||
struct RelativeFreeListHeader {
|
||||
next_page: PagePointer,
|
||||
@ -459,24 +458,24 @@ impl RelativeFreeListHeader {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned)]
|
||||
#[repr(transparent)]
|
||||
pub struct SlabPointer(FilePointer);
|
||||
pub struct SlabPointer(FilePointer<Slab>);
|
||||
|
||||
impl SlabPointer {
|
||||
fn read(&self, db: &Db) -> Slab {
|
||||
fn read<R>(&self, db: &Db<R>) -> Slab {
|
||||
unsafe { db.read(self.0) }
|
||||
}
|
||||
|
||||
fn modify<'db>(&self, db: &'db mut Db) -> &'db mut Slab {
|
||||
fn modify<'db, R>(&self, db: &'db mut Db<R>) -> &'db mut Slab {
|
||||
unsafe { db.modify(self.0) }
|
||||
}
|
||||
|
||||
pub fn size(&self, db: &Db) -> u32 {
|
||||
pub fn size<R>(&self, db: &Db<R>) -> u32 {
|
||||
self.read(db).size.get()
|
||||
}
|
||||
|
||||
pub fn alloc(&self, db: &mut Db) -> FileRange {
|
||||
pub fn alloc<R>(&self, db: &mut Db<R>) -> FileRange {
|
||||
let Slab { mut head, size } = self.read(db);
|
||||
|
||||
if head.is_null() {
|
||||
@ -489,18 +488,22 @@ impl SlabPointer {
|
||||
SlabKind::SingleBytes => todo!("single byte slabs"),
|
||||
SlabKind::RelativeFreeList => {
|
||||
let (page, offset) = head.page_offset();
|
||||
let start = FilePointer::<RelativeFreeListHeader>::new(page.start());
|
||||
assert_eq!(offset, 0);
|
||||
|
||||
let RelativeFreeListHeader { first, .. } = unsafe { db.read(page.start()) };
|
||||
let RelativeFreeListHeader { first, .. } = unsafe { db.read(start) };
|
||||
|
||||
// the page should never be full if its in the free list
|
||||
assert_ne!(first.get(), 0);
|
||||
|
||||
let ptr = FilePointer::from_page_offset(page, first.get());
|
||||
let ptr = FilePointer::<U16>::new(RawFilePointer::from_page_and_offset(
|
||||
page,
|
||||
first.get(),
|
||||
));
|
||||
|
||||
let next: U16 = unsafe { db.read(ptr) };
|
||||
|
||||
let header = unsafe { db.modify::<RelativeFreeListHeader>(page.start()) };
|
||||
let header = unsafe { db.modify::<RelativeFreeListHeader>(start) };
|
||||
|
||||
header.first = next;
|
||||
|
||||
@ -511,10 +514,10 @@ impl SlabPointer {
|
||||
self.modify(db).head = next_page.start();
|
||||
}
|
||||
|
||||
ptr
|
||||
ptr.into_raw()
|
||||
}
|
||||
SlabKind::AbsoluteFreeList => {
|
||||
let next = unsafe { db.read(head) };
|
||||
let next = unsafe { db.read(FilePointer::<RawFilePointer>::new(head)) };
|
||||
self.set_head(db, next);
|
||||
head
|
||||
}
|
||||
@ -522,7 +525,7 @@ impl SlabPointer {
|
||||
.range(size as u64)
|
||||
}
|
||||
|
||||
pub fn free(&self, db: &mut Db, range: FileRange) {
|
||||
pub fn free<R>(&self, db: &mut Db<R>, range: FileRange) {
|
||||
let Slab { head, size } = self.read(db);
|
||||
|
||||
assert_eq!(range.len(), size.get() as u64);
|
||||
@ -533,13 +536,14 @@ impl SlabPointer {
|
||||
SlabKind::SingleBytes => todo!("single byte slabs"),
|
||||
SlabKind::RelativeFreeList => {
|
||||
let (page, offset) = range.start.page_offset();
|
||||
let start = FilePointer::<RelativeFreeListHeader>::new(page.start());
|
||||
|
||||
let RelativeFreeListHeader { first, .. } = unsafe { db.read(page.start()) };
|
||||
let RelativeFreeListHeader { first, .. } = unsafe { db.read(start) };
|
||||
|
||||
// update next pointer of new element in free list
|
||||
unsafe { db.write(range.start, first) };
|
||||
unsafe { db.write(FilePointer::<U16>::new(range.start), first) };
|
||||
|
||||
let header = unsafe { db.modify::<RelativeFreeListHeader>(page.start()) };
|
||||
let header = unsafe { db.modify::<RelativeFreeListHeader>(start) };
|
||||
|
||||
// point to new element
|
||||
header.first = offset.into();
|
||||
@ -556,17 +560,17 @@ impl SlabPointer {
|
||||
}
|
||||
}
|
||||
SlabKind::AbsoluteFreeList => {
|
||||
unsafe { db.write(range.start, head) };
|
||||
unsafe { db.write(FilePointer::<RawFilePointer>::new(range.start), head) };
|
||||
self.set_head(db, range.start);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_head(&self, db: &mut Db, next: FilePointer) {
|
||||
pub fn set_head<R>(&self, db: &mut Db<R>, next: RawFilePointer) {
|
||||
self.modify(db).head = next;
|
||||
}
|
||||
|
||||
pub fn allocate_page(&self, db: &mut Db) -> FilePointer {
|
||||
pub fn allocate_page<R>(&self, db: &mut Db<R>) -> RawFilePointer {
|
||||
let Slab { head, size } = self.read(db);
|
||||
|
||||
println!("allocate_slab_page({size})");
|
||||
@ -587,7 +591,7 @@ impl SlabPointer {
|
||||
|
||||
unsafe {
|
||||
db.write(
|
||||
page.start(),
|
||||
FilePointer::new(page.start()),
|
||||
RelativeFreeListHeader {
|
||||
next_page,
|
||||
first: data_offset.into(),
|
||||
@ -600,7 +604,10 @@ impl SlabPointer {
|
||||
let next = data_offset + (i * size) as u16;
|
||||
|
||||
unsafe {
|
||||
db.write(FilePointer::from_page_offset(page, next), U16::from(offset))
|
||||
db.write(
|
||||
FilePointer::new(RawFilePointer::from_page_and_offset(page, next)),
|
||||
U16::from(offset),
|
||||
)
|
||||
};
|
||||
|
||||
offset = next;
|
||||
@ -618,7 +625,7 @@ impl SlabPointer {
|
||||
let mut next = head;
|
||||
for i in (0..n).rev() {
|
||||
let current = page.start() + i * size as u64;
|
||||
unsafe { db.write(current, next) };
|
||||
unsafe { db.write(FilePointer::<RawFilePointer>::new(current), next) };
|
||||
next = current;
|
||||
}
|
||||
|
||||
|
244
src/lib.rs
244
src/lib.rs
@ -3,6 +3,7 @@ use std::{
|
||||
collections::{BTreeMap, VecDeque},
|
||||
fmt::Debug,
|
||||
fs::File,
|
||||
marker::PhantomData,
|
||||
mem::size_of,
|
||||
ops::Range,
|
||||
sync::Arc,
|
||||
@ -17,7 +18,7 @@ use allocator::{AllocatorState, GeneralPurposeAllocator, SlabListPointer, SlabPo
|
||||
use atomic_arc::AtomicArc;
|
||||
use memmap::{Mmap, MmapMut};
|
||||
use transaction::TransactionHandle;
|
||||
use zerocopy::{AsBytes, FromBytes, LayoutVerified, Unaligned, LE};
|
||||
use zerocopy::{AsBytes, FromBytes, FromZeroes, Ref, Unaligned, LE};
|
||||
|
||||
const PAGE_SIZE: u64 = 4096;
|
||||
|
||||
@ -25,36 +26,101 @@ type U64 = zerocopy::byteorder::U64<LE>;
|
||||
type U32 = zerocopy::byteorder::U32<LE>;
|
||||
type U16 = zerocopy::byteorder::U16<LE>;
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned, Hash, PartialEq, Eq)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct FilePointer(U64);
|
||||
pub struct FilePointer<T> {
|
||||
inner: RawFilePointer,
|
||||
_phantom: PhantomData<*const T>,
|
||||
}
|
||||
|
||||
impl PartialOrd for FilePointer {
|
||||
impl<T> Debug for FilePointer<T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.inner.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialOrd for FilePointer<T> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
self.inner.partial_cmp(&other.inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Ord for FilePointer<T> {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.inner.cmp(&other.inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq for FilePointer<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.inner == other.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Eq for FilePointer<T> {}
|
||||
|
||||
impl<T> FilePointer<T> {
|
||||
fn from_range(range: FileRange) -> Self {
|
||||
assert_eq!(range.len(), size_of::<T>() as u64);
|
||||
Self::new(range.start)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> FilePointer<T> {
|
||||
pub fn new(inner: RawFilePointer) -> Self {
|
||||
Self {
|
||||
inner,
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn null() -> Self {
|
||||
Self::new(RawFilePointer::null())
|
||||
}
|
||||
|
||||
pub fn is_null(self) -> bool {
|
||||
self.inner.is_null()
|
||||
}
|
||||
|
||||
pub fn range(self) -> FileRange {
|
||||
self.inner.range(size_of::<T>() as u64)
|
||||
}
|
||||
|
||||
pub fn into_raw(self) -> RawFilePointer {
|
||||
self.inner
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned, Hash, PartialEq, Eq)]
|
||||
#[repr(transparent)]
|
||||
pub struct RawFilePointer(U64);
|
||||
|
||||
impl PartialOrd for RawFilePointer {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
self.0.get().partial_cmp(&other.0.get())
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for FilePointer {
|
||||
impl Ord for RawFilePointer {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.0.get().cmp(&other.0.get())
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for FilePointer {
|
||||
impl Debug for RawFilePointer {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "0x{:x}", self.0.get())
|
||||
}
|
||||
}
|
||||
|
||||
impl FilePointer {
|
||||
impl RawFilePointer {
|
||||
fn page(self) -> PagePointer {
|
||||
PagePointer(u32::try_from(self.0.get() / PAGE_SIZE).unwrap().into())
|
||||
}
|
||||
fn page_offset(self) -> (PagePointer, u16) {
|
||||
(self.page(), (self.0.get() % PAGE_SIZE) as u16)
|
||||
}
|
||||
fn from_page_offset(page: PagePointer, offset: u16) -> Self {
|
||||
fn from_page_and_offset(page: PagePointer, offset: u16) -> Self {
|
||||
debug_assert!(
|
||||
offset < PAGE_SIZE as u16,
|
||||
"offset 0x{offset:x} out for page bounds (0..0x{PAGE_SIZE:x})"
|
||||
@ -69,13 +135,13 @@ impl FilePointer {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned, Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned, Debug, PartialEq, Eq)]
|
||||
#[repr(transparent)]
|
||||
pub struct PagePointer(U32);
|
||||
|
||||
impl PagePointer {
|
||||
fn start(self) -> FilePointer {
|
||||
FilePointer((self.0.get() as u64 * PAGE_SIZE).into())
|
||||
fn start(self) -> RawFilePointer {
|
||||
RawFilePointer((self.0.get() as u64 * PAGE_SIZE).into())
|
||||
}
|
||||
fn range(self) -> FileRange {
|
||||
self.start().range(PAGE_SIZE)
|
||||
@ -88,10 +154,10 @@ impl PagePointer {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned, PartialEq, Eq)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned, PartialEq, Eq)]
|
||||
#[repr(C)]
|
||||
pub struct FileRange {
|
||||
start: FilePointer,
|
||||
start: RawFilePointer,
|
||||
len: U64,
|
||||
}
|
||||
|
||||
@ -101,7 +167,7 @@ impl Debug for FileRange {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Add<u64> for FilePointer {
|
||||
impl std::ops::Add<u64> for RawFilePointer {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, rhs: u64) -> Self::Output {
|
||||
@ -109,7 +175,7 @@ impl std::ops::Add<u64> for FilePointer {
|
||||
}
|
||||
}
|
||||
|
||||
impl FilePointer {
|
||||
impl RawFilePointer {
|
||||
pub fn range(&self, len: u64) -> FileRange {
|
||||
FileRange {
|
||||
start: *self,
|
||||
@ -128,39 +194,41 @@ impl FileRange {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned)]
|
||||
#[repr(C)]
|
||||
struct Header {
|
||||
struct Header<R> {
|
||||
magic: [u8; 16],
|
||||
root: FilePointer,
|
||||
root: FilePointer<R>,
|
||||
allocator_state: AllocatorState,
|
||||
}
|
||||
|
||||
impl Default for Header {
|
||||
impl<R> Default for Header<R> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
magic: *b"cool db format 1",
|
||||
root: FilePointer::null(),
|
||||
allocator_state: AllocatorState {
|
||||
general: FilePointer::null(),
|
||||
slabs: SlabListPointer(FilePointer::null() + size_of::<Header>() as u64),
|
||||
general: RawFilePointer::null(),
|
||||
slabs: SlabListPointer(FilePointer::new(
|
||||
RawFilePointer::null() + size_of::<Header<R>>() as u64,
|
||||
)),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Snapshot {
|
||||
root: FilePointer,
|
||||
struct Snapshot<R> {
|
||||
root: FilePointer<R>,
|
||||
map: Mmap,
|
||||
}
|
||||
|
||||
impl Snapshot {
|
||||
fn read<T: FromBytes>(&self, at: FilePointer) -> &T {
|
||||
self.read_range(at.range(size_of::<T>() as u64))
|
||||
impl<R> Snapshot<R> {
|
||||
fn read<T: FromBytes>(&self, at: FilePointer<T>) -> &T {
|
||||
self.read_range(at.range())
|
||||
}
|
||||
|
||||
fn read_range<T: FromBytes>(&self, range: FileRange) -> &T {
|
||||
LayoutVerified::<_, T>::new(&self.map[range.as_range()])
|
||||
Ref::<_, T>::new(&self.map[range.as_range()])
|
||||
.unwrap()
|
||||
.into_ref()
|
||||
}
|
||||
@ -170,32 +238,31 @@ impl Snapshot {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Reader {
|
||||
state: Arc<AtomicArc<Snapshot>>,
|
||||
pub struct Reader<R> {
|
||||
state: Arc<AtomicArc<Snapshot<R>>>,
|
||||
}
|
||||
|
||||
pub struct Db {
|
||||
pub struct Db<R> {
|
||||
file: File,
|
||||
map: MmapMut,
|
||||
slabs: BTreeMap<u32, SlabPointer>,
|
||||
state: Arc<AtomicArc<Snapshot>>,
|
||||
snapshots: VecDeque<SnapshotAndFreeList>,
|
||||
state: Arc<AtomicArc<Snapshot<R>>>,
|
||||
snapshots: VecDeque<SnapshotAndFreeList<R>>,
|
||||
_phantom: PhantomData<R>,
|
||||
}
|
||||
|
||||
struct SnapshotAndFreeList {
|
||||
snapshot: Arc<Snapshot>,
|
||||
struct SnapshotAndFreeList<R> {
|
||||
snapshot: Arc<Snapshot<R>>,
|
||||
to_free: Vec<FileRange>,
|
||||
}
|
||||
|
||||
impl Db {
|
||||
fn header(&self) -> &Header {
|
||||
unsafe {
|
||||
self.reference_range_unchecked(Self::header_ptr().range(size_of::<Header>() as u64))
|
||||
}
|
||||
impl<R> Db<R> {
|
||||
fn header(&self) -> &Header<R> {
|
||||
unsafe { self.reference_range_unchecked(Self::header_ptr().range()) }
|
||||
}
|
||||
|
||||
fn header_mut(&mut self) -> &mut Header {
|
||||
unsafe { self.modify_range_unchecked(Self::header_ptr().range(size_of::<Header>() as u64)) }
|
||||
fn header_mut(&mut self) -> &mut Header<R> {
|
||||
unsafe { self.modify_range_unchecked(Self::header_ptr().range()) }
|
||||
}
|
||||
|
||||
// NOTE: only allowed before any data of `size` has been allocated
|
||||
@ -214,7 +281,7 @@ impl Db {
|
||||
.unwrap_or_else(|| self.add_slab(size))
|
||||
}
|
||||
|
||||
fn transaction(&mut self, f: impl FnOnce(&mut TransactionHandle) -> FilePointer) {
|
||||
fn transaction(&mut self, f: impl FnOnce(&mut TransactionHandle<R>) -> FilePointer<R>) {
|
||||
let mut handle = TransactionHandle::new(self);
|
||||
|
||||
let root = f(&mut handle);
|
||||
@ -248,31 +315,31 @@ impl Db {
|
||||
self.snapshots = snapshots;
|
||||
}
|
||||
|
||||
fn header_ptr() -> FilePointer {
|
||||
FilePointer(0.into())
|
||||
fn header_ptr() -> FilePointer<Header<R>> {
|
||||
FilePointer::new(RawFilePointer(0.into()))
|
||||
}
|
||||
|
||||
fn root_ptr() -> FilePointer {
|
||||
Self::header_ptr() + 16
|
||||
fn root_ptr() -> FilePointer<FilePointer<R>> {
|
||||
FilePointer::new(RawFilePointer((size_of::<Header<R>>() as u64).into()))
|
||||
}
|
||||
|
||||
fn allocator_state_ptr() -> FilePointer {
|
||||
Self::root_ptr() + size_of::<FilePointer>() as u64
|
||||
fn allocator_state_ptr() -> RawFilePointer {
|
||||
RawFilePointer((size_of::<Header<R>>() as u64 + size_of::<RawFilePointer>() as u64).into())
|
||||
}
|
||||
|
||||
fn general_purpose_allocator() -> GeneralPurposeAllocator {
|
||||
GeneralPurposeAllocator {
|
||||
head_ptr: Self::allocator_state_ptr(),
|
||||
head_ptr: FilePointer::new(Self::allocator_state_ptr()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_reader(&self) -> Reader {
|
||||
pub fn create_reader(&self) -> Reader<R> {
|
||||
Reader {
|
||||
state: self.state.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn update_root(&mut self, new_root: FilePointer) -> Arc<Snapshot> {
|
||||
fn update_root(&mut self, new_root: FilePointer<R>) -> Arc<Snapshot<R>> {
|
||||
// TODO: we could write some here + flush here for better consistency
|
||||
// e.g. a copy of the new root pointer
|
||||
|
||||
@ -285,7 +352,10 @@ impl Db {
|
||||
}
|
||||
|
||||
self.map
|
||||
.flush_range(Self::root_ptr().0.get() as usize, size_of::<FilePointer>())
|
||||
.flush_range(
|
||||
Self::root_ptr().into_raw().0.get() as usize,
|
||||
size_of::<RawFilePointer>(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// update data that readers see
|
||||
@ -303,34 +373,34 @@ impl Db {
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
unsafe fn read<T: FromBytes>(&self, at: FilePointer) -> T {
|
||||
self.read_range(at.range(size_of::<T>() as u64))
|
||||
unsafe fn read<T: FromBytes>(&self, at: FilePointer<T>) -> T {
|
||||
self.read_range(at.range())
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
unsafe fn read_range<T: FromBytes>(&self, range: FileRange) -> T {
|
||||
assert!(!range.start.is_null(), "null pointer dereference");
|
||||
LayoutVerified::<_, T>::new(&self.map[range.as_range()])
|
||||
Ref::<_, T>::new(&self.map[range.as_range()])
|
||||
.unwrap()
|
||||
.read()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
unsafe fn write<T: AsBytes>(&mut self, at: FilePointer, data: T) {
|
||||
self.write_range(at.range(size_of::<T>() as u64), data)
|
||||
unsafe fn write<T: AsBytes>(&mut self, at: FilePointer<T>, data: T) {
|
||||
self.write_range(at.range(), data)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
unsafe fn write_range<T: AsBytes>(&mut self, range: FileRange, data: T) {
|
||||
assert!(!range.start.is_null(), "null pointer dereference");
|
||||
LayoutVerified::<_, T>::new(&mut self.map[range.as_range()])
|
||||
Ref::<_, T>::new(&mut self.map[range.as_range()])
|
||||
.unwrap()
|
||||
.write(data)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
unsafe fn modify<T: FromBytes + AsBytes>(&mut self, at: FilePointer) -> &mut T {
|
||||
self.modify_range(at.range(size_of::<T>() as u64))
|
||||
unsafe fn modify<T: FromBytes + AsBytes>(&mut self, at: FilePointer<T>) -> &mut T {
|
||||
self.modify_range(at.range())
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
@ -343,14 +413,14 @@ impl Db {
|
||||
&mut self,
|
||||
range: FileRange,
|
||||
) -> &mut T {
|
||||
LayoutVerified::<_, T>::new(&mut self.map[range.as_range()])
|
||||
Ref::<_, T>::new(&mut self.map[range.as_range()])
|
||||
.unwrap()
|
||||
.into_mut()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
unsafe fn reference<T: FromBytes>(&self, at: FilePointer) -> &T {
|
||||
self.reference_range(at.range(size_of::<T>() as u64))
|
||||
unsafe fn reference<T: FromBytes>(&self, at: FilePointer<T>) -> &T {
|
||||
self.reference_range(at.range())
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
@ -360,7 +430,7 @@ impl Db {
|
||||
}
|
||||
|
||||
unsafe fn reference_range_unchecked<T: FromBytes>(&self, range: FileRange) -> &T {
|
||||
LayoutVerified::<_, T>::new(&self.map[range.as_range()])
|
||||
Ref::<_, T>::new(&self.map[range.as_range()])
|
||||
.unwrap()
|
||||
.into_ref()
|
||||
}
|
||||
@ -407,6 +477,7 @@ impl Db {
|
||||
map,
|
||||
slabs: BTreeMap::new(),
|
||||
snapshots: VecDeque::new(),
|
||||
_phantom: PhantomData,
|
||||
};
|
||||
|
||||
unsafe {
|
||||
@ -434,6 +505,7 @@ impl Db {
|
||||
map,
|
||||
slabs: BTreeMap::new(),
|
||||
snapshots: VecDeque::new(),
|
||||
_phantom: PhantomData,
|
||||
};
|
||||
|
||||
let _ = db.state.swap(Arc::new(Snapshot {
|
||||
@ -448,7 +520,9 @@ impl Db {
|
||||
let allocator_state = self.header().allocator_state;
|
||||
allocator_state.slabs.init(
|
||||
self,
|
||||
(PAGE_SIZE - size_of::<Header>() as u64).try_into().unwrap(),
|
||||
(PAGE_SIZE - size_of::<Header<R>>() as u64)
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
for &size in slabs {
|
||||
@ -456,8 +530,8 @@ impl Db {
|
||||
}
|
||||
}
|
||||
|
||||
fn end_of_file(&self) -> FilePointer {
|
||||
FilePointer::null() + self.file.metadata().unwrap().len()
|
||||
fn end_of_file(&self) -> RawFilePointer {
|
||||
RawFilePointer::null() + self.file.metadata().unwrap().len()
|
||||
}
|
||||
|
||||
fn get_slab(&self, size: u64) -> Option<SlabPointer> {
|
||||
@ -472,7 +546,7 @@ impl Db {
|
||||
if let Some(slab) = self.get_slab(size) {
|
||||
slab.alloc(self)
|
||||
} else {
|
||||
Db::general_purpose_allocator().allocate(self, size)
|
||||
Self::general_purpose_allocator().allocate(self, size)
|
||||
}
|
||||
}
|
||||
|
||||
@ -480,7 +554,7 @@ impl Db {
|
||||
if let Some(slab) = self.get_slab(range.len()) {
|
||||
slab.free(self, range)
|
||||
} else {
|
||||
Db::general_purpose_allocator().free(self, range)
|
||||
Self::general_purpose_allocator().free(self, range)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -499,8 +573,8 @@ mod tests {
|
||||
}
|
||||
|
||||
fn causes_fragmentation(sequence: &[Operation], print: bool) -> bool {
|
||||
let mut db = Db::create(tempfile::tempfile().unwrap(), &[]);
|
||||
let allocator = Db::general_purpose_allocator();
|
||||
let mut db = Db::<()>::create(tempfile::tempfile().unwrap(), &[]);
|
||||
let allocator = Db::<()>::general_purpose_allocator();
|
||||
|
||||
let mut ranges = Vec::new();
|
||||
|
||||
@ -522,18 +596,18 @@ mod tests {
|
||||
fragmentation(&mut db, print) > 1
|
||||
}
|
||||
|
||||
fn fragmentation(db: &mut Db, print: bool) -> usize {
|
||||
let allocator = Db::general_purpose_allocator();
|
||||
fn fragmentation<R>(db: &mut Db<R>, print: bool) -> usize {
|
||||
let allocator = Db::<()>::general_purpose_allocator();
|
||||
|
||||
let mut next = unsafe { db.read::<FilePointer>(allocator.head_ptr) };
|
||||
let mut next = unsafe { db.read(GeneralPurposeAllocator::next_ptr(allocator.head_ptr)) };
|
||||
|
||||
let mut n = 0;
|
||||
while !next.is_null() {
|
||||
let size = GeneralPurposeAllocator::size(db, next);
|
||||
if print {
|
||||
println!("\x1b[34m[{n}]\x1b[m {:?}", next.range(size));
|
||||
println!("\x1b[34m[{n}]\x1b[m {:?}", next.into_raw().range(size));
|
||||
}
|
||||
next = unsafe { db.read::<FilePointer>(next) };
|
||||
next = unsafe { db.read(GeneralPurposeAllocator::next_ptr(next)) };
|
||||
|
||||
n += 1;
|
||||
}
|
||||
@ -653,7 +727,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let mut db = Db::create(tempfile::tempfile().unwrap(), &[4, 7, 16]);
|
||||
let mut db = Db::<()>::create(tempfile::tempfile().unwrap(), &[4, 7, 16]);
|
||||
|
||||
let mut ranges = Vec::new();
|
||||
for i in 0..10000 {
|
||||
@ -678,21 +752,21 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn transactions_work() {
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned)]
|
||||
#[repr(C)]
|
||||
struct DataHeader {
|
||||
generation: U64,
|
||||
list: FilePointer,
|
||||
list: FilePointer<DataList>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, FromBytes, AsBytes, Unaligned)]
|
||||
#[derive(Clone, Copy, FromBytes, FromZeroes, AsBytes, Unaligned)]
|
||||
#[repr(C)]
|
||||
struct DataList {
|
||||
next: FilePointer,
|
||||
next: FilePointer<DataList>,
|
||||
data: U64,
|
||||
}
|
||||
|
||||
let mut db = Db::create(
|
||||
let mut db = Db::<DataHeader>::create(
|
||||
tempfile::tempfile().unwrap(),
|
||||
&[size_of::<DataHeader>() as u32, size_of::<DataList>() as u32],
|
||||
);
|
||||
@ -711,7 +785,7 @@ mod tests {
|
||||
list: FilePointer::null(),
|
||||
};
|
||||
|
||||
root.start
|
||||
root
|
||||
} else {
|
||||
root
|
||||
};
|
||||
@ -742,10 +816,10 @@ mod tests {
|
||||
element.data = i.into();
|
||||
|
||||
let (root, data) = transaction.modify::<DataHeader>(root);
|
||||
data.list = elem_ptr.start;
|
||||
data.list = elem_ptr;
|
||||
data.generation = (i + 1).into();
|
||||
|
||||
root.start
|
||||
root
|
||||
});
|
||||
|
||||
snapshots.push(db.create_reader().state.get());
|
||||
|
@ -1,13 +1,13 @@
|
||||
use std::mem::size_of;
|
||||
|
||||
use zerocopy::{FromBytes, LayoutVerified};
|
||||
use zerocopy::{FromBytes, FromZeroes, Ref};
|
||||
|
||||
use crate::{FilePointer, FileRange};
|
||||
use crate::{FilePointer, FileRange, RawFilePointer};
|
||||
|
||||
pub trait ReaderTrait {
|
||||
fn read_raw(&self, ptr: FileRange) -> &[u8];
|
||||
fn read<T: FromBytes>(&self, ptr: FilePointer) -> &T {
|
||||
LayoutVerified::<_, T>::new(self.read_raw(ptr.range(size_of::<T>() as u64)))
|
||||
fn read<T: FromBytes>(&self, ptr: FilePointer<T>) -> &T {
|
||||
Ref::<_, T>::new(self.read_raw(ptr.range()))
|
||||
.unwrap()
|
||||
.into_ref()
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ use std::{cell::RefCell, collections::HashMap, mem::size_of};
|
||||
|
||||
use zerocopy::{AsBytes, FromBytes};
|
||||
|
||||
use crate::{mapped::ReaderTrait, Db, FilePointer, FileRange};
|
||||
use crate::{mapped::ReaderTrait, Db, FilePointer, FileRange, RawFilePointer};
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct Replaced {
|
||||
@ -10,19 +10,19 @@ struct Replaced {
|
||||
to: Option<FileRange>,
|
||||
}
|
||||
|
||||
pub struct TransactionHandle<'t> {
|
||||
db: &'t mut Db,
|
||||
replaced: HashMap<FilePointer, Replaced>,
|
||||
new: HashMap<FilePointer, FileRange>,
|
||||
pub struct TransactionHandle<'t, R> {
|
||||
db: &'t mut Db<R>,
|
||||
replaced: HashMap<RawFilePointer, Replaced>,
|
||||
new: HashMap<RawFilePointer, FileRange>,
|
||||
}
|
||||
|
||||
impl<'t> ReaderTrait for TransactionHandle<'t> {
|
||||
impl<'t, R> ReaderTrait for TransactionHandle<'t, R> {
|
||||
fn read_raw(&self, ptr: FileRange) -> &[u8] {
|
||||
self.reference_raw(ptr)
|
||||
}
|
||||
}
|
||||
impl<'t> TransactionHandle<'t> {
|
||||
pub fn new(db: &'t mut Db) -> Self {
|
||||
impl<'t, R> TransactionHandle<'t, R> {
|
||||
pub fn new(db: &'t mut Db<R>) -> Self {
|
||||
Self {
|
||||
db,
|
||||
replaced: HashMap::new(),
|
||||
@ -37,7 +37,15 @@ impl<'t> TransactionHandle<'t> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn read_ptr(&self, range: FileRange) -> FileRange {
|
||||
fn read_ptr<T>(&self, src: FilePointer<T>) -> FilePointer<T> {
|
||||
FilePointer::from_range(self.read_ptr_raw(src.range()))
|
||||
}
|
||||
|
||||
unsafe fn write_ptr<T>(&mut self, src: FilePointer<T>) -> FilePointer<T> {
|
||||
FilePointer::from_range(self.write_ptr_raw(src.range()))
|
||||
}
|
||||
|
||||
fn read_ptr_raw(&self, range: FileRange) -> FileRange {
|
||||
if let Some(&replaced) = self.replaced.get(&range.start) {
|
||||
assert_eq!(replaced.from, range);
|
||||
replaced.to.expect("use after free")
|
||||
@ -49,7 +57,7 @@ impl<'t> TransactionHandle<'t> {
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn write_ptr(&mut self, range: FileRange) -> FileRange {
|
||||
unsafe fn write_ptr_raw(&mut self, range: FileRange) -> FileRange {
|
||||
let new_range = if let Some(&replaced) = self.replaced.get(&range.start) {
|
||||
assert_eq!(replaced.from, range);
|
||||
replaced.to
|
||||
@ -63,7 +71,7 @@ impl<'t> TransactionHandle<'t> {
|
||||
if let Some(range) = new_range {
|
||||
range
|
||||
} else {
|
||||
let (new, _) = self.allocate_raw(range.len());
|
||||
let (new, _) = self.allocate_range(range.len());
|
||||
|
||||
self.db.copy(range, new);
|
||||
|
||||
@ -84,26 +92,15 @@ impl<'t> TransactionHandle<'t> {
|
||||
}
|
||||
|
||||
pub fn reference_raw(&self, range: FileRange) -> &[u8] {
|
||||
let range = self.read_ptr(range);
|
||||
let range = self.read_ptr_raw(range);
|
||||
&self.db.map[range.as_range()]
|
||||
}
|
||||
|
||||
pub unsafe fn modify_raw(&mut self, range: FileRange) -> (FileRange, &mut [u8]) {
|
||||
let range = self.write_ptr(range);
|
||||
let range = self.write_ptr_raw(range);
|
||||
(range, &mut self.db.map[range.as_range()])
|
||||
}
|
||||
|
||||
pub fn allocate_raw(&mut self, length: u64) -> (FileRange, &mut [u8]) {
|
||||
unsafe {
|
||||
let range = self.allocate_range(length);
|
||||
|
||||
let res = self.new.insert(range.start, range);
|
||||
debug_assert!(res.is_none());
|
||||
|
||||
(range, &mut self.db.map[range.as_range()])
|
||||
}
|
||||
}
|
||||
|
||||
pub fn modify_range<T: FromBytes + AsBytes>(
|
||||
&mut self,
|
||||
range: FileRange,
|
||||
@ -114,25 +111,33 @@ impl<'t> TransactionHandle<'t> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn modify<T: FromBytes + AsBytes>(&mut self, at: FilePointer) -> (FileRange, &mut T) {
|
||||
self.modify_range(at.range(size_of::<T>() as u64))
|
||||
pub fn modify<T: FromBytes + AsBytes>(
|
||||
&mut self,
|
||||
at: FilePointer<T>,
|
||||
) -> (FilePointer<T>, &mut T) {
|
||||
let (range, data) = self.modify_range(at.range());
|
||||
(FilePointer::from_range(range), data)
|
||||
}
|
||||
|
||||
pub fn allocate_size<T: FromBytes + AsBytes>(&mut self, length: u64) -> (FileRange, &mut T) {
|
||||
pub fn allocate_range(&mut self, length: u64) -> (FileRange, &mut [u8]) {
|
||||
unsafe {
|
||||
let (ptr, _) = self.allocate_raw(length);
|
||||
(ptr, self.db.modify_range(ptr))
|
||||
let range = self.db.allocate(length);
|
||||
|
||||
let res = self.new.insert(range.start, range);
|
||||
debug_assert!(res.is_none());
|
||||
|
||||
(range, &mut self.db.map[range.as_range()])
|
||||
}
|
||||
}
|
||||
|
||||
pub fn allocate<T: FromBytes + AsBytes>(&mut self) -> (FileRange, &mut T) {
|
||||
pub fn allocate<T: FromBytes + AsBytes>(&mut self) -> (FilePointer<T>, &mut T) {
|
||||
unsafe {
|
||||
let (ptr, _) = self.allocate_raw(size_of::<T>() as u64);
|
||||
(ptr, self.db.modify_range(ptr))
|
||||
let (range, _) = self.allocate_range(size_of::<T>() as u64);
|
||||
(FilePointer::from_range(range), self.db.modify_range(range))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn free<T>(&mut self, at: FilePointer) {
|
||||
pub fn free<T>(&mut self, at: RawFilePointer) {
|
||||
self.free_range(at.range(size_of::<T>() as u64))
|
||||
}
|
||||
|
||||
@ -164,11 +169,7 @@ impl<'t> TransactionHandle<'t> {
|
||||
}
|
||||
}
|
||||
|
||||
fn allocate_range(&mut self, size: u64) -> FileRange {
|
||||
self.db.allocate(size)
|
||||
}
|
||||
|
||||
pub fn root(&self) -> FilePointer {
|
||||
pub fn root(&self) -> FilePointer<R> {
|
||||
self.db.header().root
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user