Remove unnecessar PhantomData

This commit is contained in:
Yuriy Dupyn 2024-02-04 21:09:46 +01:00
parent 82300039fc
commit b64819f28b
3 changed files with 21 additions and 14 deletions

View file

@ -2,7 +2,7 @@ use tokio::io::{AsyncReadExt, AsyncWriteExt, AsyncSeekExt, SeekFrom};
use tokio::fs::{File, OpenOptions};
use std::path::Path;
use std::marker::PhantomData;
use crate::error::{Error, DecodeErrorKind};
use std::collections::{BTreeMap, HashSet};
use async_trait::async_trait;
@ -10,6 +10,7 @@ use bincode;
use bincode::{Decode, Encode};
use crate::binary_coding::{encode, decode};
use crate::error::{Error, DecodeErrorKind};
use crate::entry::{Entry, EntryDetailed};
use crate::entry_header::{EntryHeaderWithDataSize, EntryHeader};
use crate::store_header::StoreHeader;
@ -23,7 +24,6 @@ pub struct ReadCursor<'a, T> {
header: StoreHeader,
indexes: &'a [Option<Index<T, FilePosition>>],
file: File,
data_type: PhantomData<T>,
eof_file_position: FilePosition,
}
@ -32,7 +32,6 @@ pub struct WriteCursor<'a, T> {
header: &'a mut StoreHeader,
indexes: &'a mut [Option<Index<T, FilePosition>>],
file: File,
data_type: PhantomData<T>,
eof_file_position: FilePosition,
}
@ -390,7 +389,6 @@ impl <'cursor, T> ReadCursor<'cursor, T> {
let mut cursor = Self {
header: store.header.clone(),
file,
data_type: store.data_type,
indexes: &store.indexes,
eof_file_position: 0,
@ -425,7 +423,6 @@ impl <'cursor, T> WriteCursor<'cursor, T>
let mut cursor = Self {
header: &mut store.header,
file,
data_type: store.data_type,
indexes: &mut store.indexes,
eof_file_position: 0,
@ -451,7 +448,6 @@ impl <'cursor, T> WriteCursor<'cursor, T>
let mut cursor = Self {
header,
file,
data_type: PhantomData::<T>,
indexes,
eof_file_position: 0,
@ -476,7 +472,7 @@ impl <'cursor, T> WriteCursor<'cursor, T>
// ===Deletion===
pub async fn mark_deleted_at(&mut self, file_position: FilePosition) -> Result<()>
where T: Send
where T: Send + Decode + Encode
{
self.seek_to(file_position).await?;
let mut entry_header = self.read_entry_header().await?;
@ -495,7 +491,7 @@ impl <'cursor, T> WriteCursor<'cursor, T>
}
async fn find_first_eq_bruteforce_and_delete(&mut self, column: Column, t0: &T) -> Result<Option<EntryDetailed<T>>>
where T: Decode + PartialEq + Send + Sync
where T: Decode + Encode + PartialEq + Send + Sync
{
let maybe_entry = self.find_first_eq_bruteforce(column, t0).await?;
if let Some(entry) = maybe_entry {
@ -508,7 +504,7 @@ impl <'cursor, T> WriteCursor<'cursor, T>
// ===Garbage Collection===
async fn attempt_garbage_collection_if_necessary(&mut self) -> Result<()>
where T: Send
where T: Send + Decode + Encode
{
// TODO: What should be the policy? Counting size of garbage? Counting how many entries are
// garbage?
@ -519,11 +515,18 @@ impl <'cursor, T> WriteCursor<'cursor, T>
}
async fn initiate_garbage_collection(&mut self) -> Result<usize>
where T: Send
where T: Send + Decode + Encode
{
// We'll dump all alive entries into a new file.
let mut cursor_to_intermediate = self.spawn_cursor_to_intermediate_file().await?;
let in_memory_index: BTreeMap<T, HashSet<FilePosition>> = BTreeMap::new();
// We'll dump all alive entries into a new file.
while let Some(live_entry) = self.next_alive().await? {
let file_position = cursor_to_intermediate.append_entry(&live_entry.forget()).await?;
// TODO: Start indexing all of the indexable columns from scratch.
}
// In it there will be only the alive rows.
// Afterwards we swap the files, and delete the garbage.
todo!()