monorepo/emseries/src/series.rs

178 lines
6.5 KiB
Rust

/*
Copyright 2020-2023, Savanni D'Gerinel <savanni@luminescent-dreams.com>
This file is part of the Luminescent Dreams Tools.
Luminescent Dreams Tools is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
Luminescent Dreams Tools is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
*/
extern crate serde;
extern crate serde_json;
extern crate uuid;
use serde::de::DeserializeOwned;
use serde::ser::Serialize;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::{BufRead, BufReader, LineWriter, Write};
use std::iter::Iterator;
use criteria::Criteria;
use types::{EmseriesReadError, EmseriesWriteError, Record, Recordable, UniqueId};
/// An open time series database.
///
/// Any given database can store only one data type, T. The data type must be determined when the
/// database is opened.
pub struct Series<T: Clone + Recordable + DeserializeOwned + Serialize> {
//path: String,
writer: LineWriter<File>,
records: HashMap<UniqueId, T>,
}
impl<T> Series<T>
where
T: Clone + Recordable + DeserializeOwned + Serialize,
{
/// Open a time series database at the specified path. `path` is the full path and filename for
/// the database.
pub fn open(path: &str) -> Result<Series<T>, EmseriesReadError> {
let f = OpenOptions::new()
.read(true)
.append(true)
.create(true)
.open(&path)
.map_err(EmseriesReadError::IOError)?;
let records = Series::load_file(&f)?;
let writer = LineWriter::new(f);
Ok(Series {
//path: String::from(path),
writer,
records,
})
}
/// Load a file and return all of the records in it.
fn load_file(f: &File) -> Result<HashMap<UniqueId, T>, EmseriesReadError> {
let mut records: HashMap<UniqueId, T> = HashMap::new();
let reader = BufReader::new(f);
for line in reader.lines() {
match line {
Ok(line_) => {
/* Can't create a JSONParseError because I can't actually create the underlying error.
fail_point!("parse-line", Err(Error::JSONParseError()))
*/
match line_.parse::<Record<T>>() {
Ok(record) => match record.data {
Some(val) => records.insert(record.id.clone(), val),
None => records.remove(&record.id.clone()),
},
Err(err) => return Err(err),
};
}
Err(err) => return Err(EmseriesReadError::IOError(err)),
}
}
Ok(records)
}
/// Put a new record into the database. A unique id will be assigned to the record and
/// returned.
pub fn put(&mut self, entry: T) -> Result<UniqueId, EmseriesWriteError> {
let uuid = UniqueId::new();
self.update(uuid.clone(), entry).and_then(|_| Ok(uuid))
}
/// Update an existing record. The `UniqueId` of the record passed into this function must match
/// the `UniqueId` of a record already in the database.
pub fn update(&mut self, uuid: UniqueId, entry: T) -> Result<(), EmseriesWriteError> {
self.records.insert(uuid.clone(), entry.clone());
let write_res = match serde_json::to_string(&Record {
id: uuid,
data: Some(entry),
}) {
Ok(rec_str) => self
.writer
.write_fmt(format_args!("{}\n", rec_str.as_str()))
.map_err(EmseriesWriteError::IOError),
Err(err) => Err(EmseriesWriteError::JSONWriteError(err)),
};
match write_res {
Ok(_) => Ok(()),
Err(err) => Err(err),
}
}
/// Delete a record from the database
///
/// Future note: while this deletes a record from the view, it only adds an entry to the
/// database that indicates `data: null`. If record histories ever become important, the record
/// and its entire history (including this delete) will still be available.
pub fn delete(&mut self, uuid: &UniqueId) -> Result<(), EmseriesWriteError> {
if !self.records.contains_key(uuid) {
return Ok(());
};
self.records.remove(uuid);
let rec: Record<T> = Record {
id: uuid.clone(),
data: None,
};
match serde_json::to_string(&rec) {
Ok(rec_str) => self
.writer
.write_fmt(format_args!("{}\n", rec_str.as_str()))
.map_err(EmseriesWriteError::IOError),
Err(err) => Err(EmseriesWriteError::JSONWriteError(err)),
}
}
/// Get all of the records in the database.
pub fn records<'s>(&'s self) -> impl Iterator<Item = (&'s UniqueId, &'s T)> + 's {
self.records.iter()
}
/* The point of having Search is so that a lot of internal optimizations can happen once the
* data sets start getting large. */
/// Perform a search on the records in a database, based on the given criteria.
pub fn search<'s>(
&'s self,
criteria: impl Criteria + 's,
) -> impl Iterator<Item = (&'s UniqueId, &'s T)> + 's {
self.records().filter(move |&tr| criteria.apply(tr.1))
}
/// Perform a search and sort the resulting records based on the comparison.
pub fn search_sorted<'s, C, CMP>(&'s self, criteria: C, compare: CMP) -> Vec<(&UniqueId, &T)>
where
C: Criteria + 's,
CMP: FnMut(&(&UniqueId, &T), &(&UniqueId, &T)) -> Ordering,
{
let search_iter = self.search(criteria);
let mut records: Vec<(&UniqueId, &T)> = search_iter.collect();
records.sort_by(compare);
records
}
/// Get an exact record from the database based on unique id.
pub fn get(&self, uuid: &UniqueId) -> Option<T> {
self.records.get(uuid).map(|v| v.clone())
}
/*
pub fn remove(&self, uuid: UniqueId) -> Result<(), EmseriesError> {
unimplemented!()
}
*/
}