feat: index append encoding

This commit is contained in:
Jindřich Moravec 2024-02-04 20:20:20 +01:00
parent 8fd2d4ebf3
commit 6db62c42d7

View file

@ -1,33 +1,22 @@
use std::marker::PhantomData;
use std::path::PathBuf;
use tokio::fs::{File, OpenOptions};
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt, SeekFrom};
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt, BufWriter};
use async_trait::async_trait;
use std::collections::{BTreeMap, HashSet};
use std::hash::Hash;
use crate::binary_coding::{decode, decode_sequence, encode, encode_sequence};
use crate::binary_coding::{decode, encode};
use bincode;
use bincode::{Decode, Encode};
use tokio::fs;
use crate::error::{DecodeErrorKind, Error};
use bincode::error::DecodeError;
use std::mem::size_of;
type Result<T> = std::result::Result<T, Error>;
// Implements a persistant self-balancing Binary Search Tree. Nope.
// We need fixed-size nodes. But we want to index Strings which are variable length.
#[derive(Debug)]
pub struct Index<K, V> {
file: File,
data: BTreeMap<K, HashSet<V>>,
key_type: PhantomData<K>,
value_type: PhantomData<V>,
}
#[derive(Debug)]
@ -48,61 +37,102 @@ where
let data = BTreeMap::new();
Ok(Index {
file,
data,
key_type: PhantomData::<K>,
value_type: PhantomData::<V>,
})
Ok(Index { file, data })
}
pub async fn connect(file_name: PathBuf) -> Result<Index<K, V>> {
let mut file: File = OpenOptions::new()
let file: File = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(file_name)
.await?;
let mut bytes = vec![];
file.read_to_end(&mut bytes).await?;
let data = Index::decode_tree(&bytes)
.map_err(|e| Error::DecodeError(DecodeErrorKind::CorruptedData, e))?;
Ok(Index {
let mut index = Index {
file,
data,
key_type: PhantomData::<K>,
value_type: PhantomData::<V>,
})
data: BTreeMap::new(),
};
index.load_from_file().await?;
Ok(index)
}
pub async fn insert(&mut self, k: K, v: V) -> Result<()> {
self.append_to_file(&k, &v).await?;
self.data.entry(k).or_insert_with(HashSet::new).insert(v);
Ok(())
}
pub fn insert_desynced(&mut self, k: K, v: V) -> () {
self.data.entry(k).or_insert_with(HashSet::new).insert(v);
}
pub async fn lookup(&self, k: &K) -> Result<Option<HashSet<V>>> {
let hashset = self.data.get(k).unwrap();
Ok(Some(hashset.clone()))
let hashset = self.data.get(k).cloned();
Ok(hashset)
}
pub async fn delete(&mut self, k: K, v: V) -> Result<Option<bool>> {
Ok(Some(
self.data.entry(k).or_insert_with(HashSet::new).remove(&v),
))
pub async fn delete(&mut self, k: K, v: V) -> Result<()> {
self.data.entry(k).and_modify(|values| {
values.remove(&v);
});
self.dump_to_file().await
}
fn encode(&self) -> Result<Vec<u8>> {
pub async fn sync_to_disk(&mut self) -> Result<()> {
self.dump_to_file().await
}
async fn append_to_file(&mut self, key: &K, value: &V) -> Result<()> {
let mut encoded = Vec::new();
encoded.extend(encode(&self.data)?);
Ok(encoded)
encoded.extend(encode(key)?);
encoded.extend(encode(value)?);
self.file.seek(std::io::SeekFrom::End(0)).await?;
self.file.write(&encoded).await?;
Ok(())
}
fn decode_tree(data: &[u8]) -> std::result::Result<BTreeMap<K, HashSet<V>>, DecodeError> {
let data: BTreeMap<K, HashSet<V>> = decode(data)?.0;
Ok(data)
async fn dump_to_file(&mut self) -> Result<()> {
let mut writer = BufWriter::new(&mut self.file);
writer.seek(std::io::SeekFrom::Start(0)).await?;
let mut written: u64 = 0;
let mut encoded = Vec::new();
for (key, value) in &self.data {
for v in value {
encoded.clear();
encoded.extend(encode(key)?);
encoded.extend(encode(v)?);
writer.write(&encoded).await?;
written += encoded.len() as u64;
}
}
writer.flush().await?;
self.file.set_len(written).await?;
Ok(())
}
async fn load_from_file(&mut self) -> Result<()> {
let mut bytes = vec![];
self.file.seek(std::io::SeekFrom::Start(0)).await?;
self.file.read_to_end(&mut bytes).await?;
let mut cursor = 0;
while cursor < bytes.len() {
let (key, len) = decode(&bytes[cursor..])
.map_err(|e| Error::DecodeError(DecodeErrorKind::CorruptedData, e))?;
cursor += len;
let (value, len) = decode(&bytes[cursor..])
.map_err(|e| Error::DecodeError(DecodeErrorKind::CorruptedData, e))?;
cursor += len;
self.insert_desynced(key, value);
}
Ok(())
}
}
@ -112,39 +142,6 @@ mod tests {
#[tokio::test]
async fn encode_decode() {
let mut index: Index<String, u32> = Index {
file: File::from_std(std::fs::File::create("test").unwrap()),
data: BTreeMap::new(),
key_type: PhantomData::<String>,
value_type: PhantomData::<u32>,
};
index.insert("foo".to_string(), 123).await.unwrap();
index.insert("foo".to_string(), 124).await.unwrap();
index.insert("bar".to_string(), 125).await.unwrap();
index.insert("bar".to_string(), 126).await.unwrap();
let lookup = index.lookup(&"foo".to_string()).await.unwrap().unwrap();
assert_eq!(lookup.len(), 2);
assert!(lookup.contains(&123));
assert!(lookup.contains(&124));
println!("lookup {:?}", lookup);
let encoded = index.encode().unwrap();
let decoded = Index::<String, u32>::decode_tree(&encoded).unwrap();
let decoded = Index {
file: File::from_std(std::fs::File::create("test").unwrap()),
data: decoded,
key_type: PhantomData::<String>,
value_type: PhantomData::<u32>,
};
let lookup = decoded.lookup(&"foo".to_string()).await.unwrap().unwrap();
assert_eq!(lookup.len(), 2);
assert!(lookup.contains(&123));
assert!(lookup.contains(&124));
println!("lookup {:?}", lookup);
std::fs::remove_file("test").unwrap();
todo!();
}
}