Remove unnecessar PhantomData
This commit is contained in:
parent
82300039fc
commit
b64819f28b
3 changed files with 21 additions and 14 deletions
|
|
@ -2,7 +2,7 @@ use tokio::io::{AsyncReadExt, AsyncWriteExt, AsyncSeekExt, SeekFrom};
|
||||||
use tokio::fs::{File, OpenOptions};
|
use tokio::fs::{File, OpenOptions};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use crate::error::{Error, DecodeErrorKind};
|
use std::collections::{BTreeMap, HashSet};
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
|
|
||||||
|
|
@ -10,6 +10,7 @@ use bincode;
|
||||||
use bincode::{Decode, Encode};
|
use bincode::{Decode, Encode};
|
||||||
use crate::binary_coding::{encode, decode};
|
use crate::binary_coding::{encode, decode};
|
||||||
|
|
||||||
|
use crate::error::{Error, DecodeErrorKind};
|
||||||
use crate::entry::{Entry, EntryDetailed};
|
use crate::entry::{Entry, EntryDetailed};
|
||||||
use crate::entry_header::{EntryHeaderWithDataSize, EntryHeader};
|
use crate::entry_header::{EntryHeaderWithDataSize, EntryHeader};
|
||||||
use crate::store_header::StoreHeader;
|
use crate::store_header::StoreHeader;
|
||||||
|
|
@ -23,7 +24,6 @@ pub struct ReadCursor<'a, T> {
|
||||||
header: StoreHeader,
|
header: StoreHeader,
|
||||||
indexes: &'a [Option<Index<T, FilePosition>>],
|
indexes: &'a [Option<Index<T, FilePosition>>],
|
||||||
file: File,
|
file: File,
|
||||||
data_type: PhantomData<T>,
|
|
||||||
|
|
||||||
eof_file_position: FilePosition,
|
eof_file_position: FilePosition,
|
||||||
}
|
}
|
||||||
|
|
@ -32,7 +32,6 @@ pub struct WriteCursor<'a, T> {
|
||||||
header: &'a mut StoreHeader,
|
header: &'a mut StoreHeader,
|
||||||
indexes: &'a mut [Option<Index<T, FilePosition>>],
|
indexes: &'a mut [Option<Index<T, FilePosition>>],
|
||||||
file: File,
|
file: File,
|
||||||
data_type: PhantomData<T>,
|
|
||||||
|
|
||||||
eof_file_position: FilePosition,
|
eof_file_position: FilePosition,
|
||||||
}
|
}
|
||||||
|
|
@ -390,7 +389,6 @@ impl <'cursor, T> ReadCursor<'cursor, T> {
|
||||||
let mut cursor = Self {
|
let mut cursor = Self {
|
||||||
header: store.header.clone(),
|
header: store.header.clone(),
|
||||||
file,
|
file,
|
||||||
data_type: store.data_type,
|
|
||||||
indexes: &store.indexes,
|
indexes: &store.indexes,
|
||||||
|
|
||||||
eof_file_position: 0,
|
eof_file_position: 0,
|
||||||
|
|
@ -425,7 +423,6 @@ impl <'cursor, T> WriteCursor<'cursor, T>
|
||||||
let mut cursor = Self {
|
let mut cursor = Self {
|
||||||
header: &mut store.header,
|
header: &mut store.header,
|
||||||
file,
|
file,
|
||||||
data_type: store.data_type,
|
|
||||||
indexes: &mut store.indexes,
|
indexes: &mut store.indexes,
|
||||||
|
|
||||||
eof_file_position: 0,
|
eof_file_position: 0,
|
||||||
|
|
@ -451,7 +448,6 @@ impl <'cursor, T> WriteCursor<'cursor, T>
|
||||||
let mut cursor = Self {
|
let mut cursor = Self {
|
||||||
header,
|
header,
|
||||||
file,
|
file,
|
||||||
data_type: PhantomData::<T>,
|
|
||||||
indexes,
|
indexes,
|
||||||
|
|
||||||
eof_file_position: 0,
|
eof_file_position: 0,
|
||||||
|
|
@ -476,7 +472,7 @@ impl <'cursor, T> WriteCursor<'cursor, T>
|
||||||
|
|
||||||
// ===Deletion===
|
// ===Deletion===
|
||||||
pub async fn mark_deleted_at(&mut self, file_position: FilePosition) -> Result<()>
|
pub async fn mark_deleted_at(&mut self, file_position: FilePosition) -> Result<()>
|
||||||
where T: Send
|
where T: Send + Decode + Encode
|
||||||
{
|
{
|
||||||
self.seek_to(file_position).await?;
|
self.seek_to(file_position).await?;
|
||||||
let mut entry_header = self.read_entry_header().await?;
|
let mut entry_header = self.read_entry_header().await?;
|
||||||
|
|
@ -495,7 +491,7 @@ impl <'cursor, T> WriteCursor<'cursor, T>
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn find_first_eq_bruteforce_and_delete(&mut self, column: Column, t0: &T) -> Result<Option<EntryDetailed<T>>>
|
async fn find_first_eq_bruteforce_and_delete(&mut self, column: Column, t0: &T) -> Result<Option<EntryDetailed<T>>>
|
||||||
where T: Decode + PartialEq + Send + Sync
|
where T: Decode + Encode + PartialEq + Send + Sync
|
||||||
{
|
{
|
||||||
let maybe_entry = self.find_first_eq_bruteforce(column, t0).await?;
|
let maybe_entry = self.find_first_eq_bruteforce(column, t0).await?;
|
||||||
if let Some(entry) = maybe_entry {
|
if let Some(entry) = maybe_entry {
|
||||||
|
|
@ -508,7 +504,7 @@ impl <'cursor, T> WriteCursor<'cursor, T>
|
||||||
|
|
||||||
// ===Garbage Collection===
|
// ===Garbage Collection===
|
||||||
async fn attempt_garbage_collection_if_necessary(&mut self) -> Result<()>
|
async fn attempt_garbage_collection_if_necessary(&mut self) -> Result<()>
|
||||||
where T: Send
|
where T: Send + Decode + Encode
|
||||||
{
|
{
|
||||||
// TODO: What should be the policy? Counting size of garbage? Counting how many entries are
|
// TODO: What should be the policy? Counting size of garbage? Counting how many entries are
|
||||||
// garbage?
|
// garbage?
|
||||||
|
|
@ -519,11 +515,18 @@ impl <'cursor, T> WriteCursor<'cursor, T>
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn initiate_garbage_collection(&mut self) -> Result<usize>
|
async fn initiate_garbage_collection(&mut self) -> Result<usize>
|
||||||
where T: Send
|
where T: Send + Decode + Encode
|
||||||
{
|
{
|
||||||
// We'll dump all alive entries into a new file.
|
|
||||||
let mut cursor_to_intermediate = self.spawn_cursor_to_intermediate_file().await?;
|
let mut cursor_to_intermediate = self.spawn_cursor_to_intermediate_file().await?;
|
||||||
|
|
||||||
|
let in_memory_index: BTreeMap<T, HashSet<FilePosition>> = BTreeMap::new();
|
||||||
|
|
||||||
|
// We'll dump all alive entries into a new file.
|
||||||
|
while let Some(live_entry) = self.next_alive().await? {
|
||||||
|
let file_position = cursor_to_intermediate.append_entry(&live_entry.forget()).await?;
|
||||||
|
// TODO: Start indexing all of the indexable columns from scratch.
|
||||||
|
}
|
||||||
|
|
||||||
// In it there will be only the alive rows.
|
// In it there will be only the alive rows.
|
||||||
// Afterwards we swap the files, and delete the garbage.
|
// Afterwards we swap the files, and delete the garbage.
|
||||||
todo!()
|
todo!()
|
||||||
|
|
|
||||||
|
|
@ -48,4 +48,11 @@ impl <T>EntryDetailed<T> {
|
||||||
.map_err(|e| Error::DecodeError(DecodeErrorKind::EntryData, e))?;
|
.map_err(|e| Error::DecodeError(DecodeErrorKind::EntryData, e))?;
|
||||||
Ok(EntryDetailed { header, file_position, data })
|
Ok(EntryDetailed { header, file_position, data })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn forget(self) -> Entry<T> {
|
||||||
|
Entry {
|
||||||
|
header: self.header.into(),
|
||||||
|
data: self.data,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,6 @@ pub type FilePosition = u64;
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Store<T> {
|
pub struct Store<T> {
|
||||||
pub header: StoreHeader,
|
pub header: StoreHeader,
|
||||||
pub data_type: PhantomData<T>,
|
|
||||||
pub indexes: StoreIndexes<T>,
|
pub indexes: StoreIndexes<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -68,7 +67,6 @@ impl <T>Store<T> {
|
||||||
|
|
||||||
let store = Self {
|
let store = Self {
|
||||||
header,
|
header,
|
||||||
data_type: PhantomData::<T>,
|
|
||||||
indexes,
|
indexes,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -171,7 +169,6 @@ impl <T>Store<T> {
|
||||||
|
|
||||||
let store = Self {
|
let store = Self {
|
||||||
header,
|
header,
|
||||||
data_type: PhantomData::<T>,
|
|
||||||
indexes
|
indexes
|
||||||
};
|
};
|
||||||
Ok(store)
|
Ok(store)
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue