Compare commits
92 Commits
70191d3687
...
b2063943e9
Author | SHA1 | Date |
---|---|---|
Savanni D'Gerinel | b2063943e9 | |
Savanni D'Gerinel | 9ccad97e87 | |
Savanni D'Gerinel | b0322facf4 | |
Savanni D'Gerinel | 7d53854a98 | |
Savanni D'Gerinel | 1eee8280f6 | |
Savanni D'Gerinel | aa1c28c022 | |
Savanni D'Gerinel | 3ed09bfc32 | |
Savanni D'Gerinel | 7fc1530245 | |
Savanni D'Gerinel | 349f71fc81 | |
Savanni D'Gerinel | 2bf0b3d782 | |
Savanni D'Gerinel | 69e4605d71 | |
Savanni D'Gerinel | 94bd030958 | |
Savanni D'Gerinel | 8685faab8c | |
Savanni D'Gerinel | a1a5fbe048 | |
Savanni D'Gerinel | 0384d789a4 | |
Savanni D'Gerinel | 9007ecdead | |
Savanni D'Gerinel | a4cd2fea29 | |
Savanni D'Gerinel | 4114874156 | |
Savanni D'Gerinel | b756e8ca81 | |
Savanni D'Gerinel | 3cb742d863 | |
Savanni D'Gerinel | 27e1691854 | |
Savanni D'Gerinel | 2d2e82f41a | |
Savanni D'Gerinel | 78c017ede7 | |
Savanni D'Gerinel | cfdceff055 | |
Savanni D'Gerinel | 07b4cb31ce | |
Savanni D'Gerinel | b3f88a49aa | |
Savanni D'Gerinel | 3f1316b3dd | |
Savanni D'Gerinel | ef057eca66 | |
Savanni D'Gerinel | c70e1d943d | |
Savanni D'Gerinel | 7711f68993 | |
Savanni D'Gerinel | f13b3effd6 | |
Savanni D'Gerinel | 4cdd2b6b0f | |
Savanni D'Gerinel | 6c831567eb | |
Savanni D'Gerinel | 0afe0c1b88 | |
Savanni D'Gerinel | e0f3cdb50a | |
Savanni D'Gerinel | efac7e43eb | |
Savanni D'Gerinel | 37c60e4346 | |
Savanni D'Gerinel | 2084061526 | |
Savanni D'Gerinel | 79422b5c7a | |
Savanni D'Gerinel | 3f2feee4dd | |
Savanni D'Gerinel | 5443015868 | |
Savanni D'Gerinel | 49b1865818 | |
Savanni D'Gerinel | 10849687e3 | |
Savanni D'Gerinel | d441e19479 | |
Savanni D'Gerinel | 5496e9ce10 | |
Savanni D'Gerinel | 7b6b7ec011 | |
Savanni D'Gerinel | e657320b28 | |
Savanni D'Gerinel | bdcd7ee18e | |
Savanni D'Gerinel | f9974e79a7 | |
Savanni D'Gerinel | 4200432e1f | |
Savanni D'Gerinel | 525b5389a1 | |
Savanni D'Gerinel | d4a5e0f55d | |
Savanni D'Gerinel | 1d89254413 | |
Savanni D'Gerinel | 2f6be84a43 | |
Savanni D'Gerinel | f7403b43a3 | |
Savanni D'Gerinel | 2e7e159325 | |
Savanni D'Gerinel | 1e11069282 | |
Savanni D'Gerinel | c38d680e57 | |
Savanni D'Gerinel | 9bb32a378c | |
Savanni D'Gerinel | b3bfa84691 | |
Savanni D'Gerinel | f53c7200e6 | |
Savanni D'Gerinel | 491c80b42b | |
Savanni D'Gerinel | 5e4db0032b | |
Savanni D'Gerinel | 4a7d741224 | |
Savanni D'Gerinel | 6aedff8cda | |
Savanni D'Gerinel | 535ea6cd9d | |
Savanni D'Gerinel | da8281636a | |
Savanni D'Gerinel | b448ab7656 | |
Savanni D'Gerinel | 75a90bbdff | |
Savanni D'Gerinel | 94aa67a156 | |
Savanni D'Gerinel | ee5f4646df | |
Savanni D'Gerinel | 561ec70a65 | |
Savanni D'Gerinel | 14f0a74af8 | |
Savanni D'Gerinel | 68b62464f0 | |
Savanni D'Gerinel | da6bf3bfea | |
Savanni D'Gerinel | 3e87e13526 | |
Savanni D'Gerinel | 88938e44c8 | |
Savanni D'Gerinel | 89a1aa7ee5 | |
Savanni D'Gerinel | 22e25256a5 | |
Savanni D'Gerinel | 9787ed3e67 | |
Savanni D'Gerinel | 396f6e3bcf | |
Savanni D'Gerinel | 8521db333b | |
Savanni D'Gerinel | 4a7b23544e | |
Savanni D'Gerinel | a06c9fae25 | |
Savanni D'Gerinel | f05e0a15f1 | |
Savanni D'Gerinel | 634c404ae9 | |
Savanni D'Gerinel | e36657591b | |
Savanni D'Gerinel | 7077724e15 | |
Savanni D'Gerinel | 4816c9f4cf | |
Savanni D'Gerinel | 207d099607 | |
Savanni D'Gerinel | 59061c02ce | |
Savanni D'Gerinel | 3d460e5840 |
|
@ -4,3 +4,8 @@ node_modules
|
||||||
dist
|
dist
|
||||||
result
|
result
|
||||||
*.tgz
|
*.tgz
|
||||||
|
*.tar.gz
|
||||||
|
file-service/*.sqlite
|
||||||
|
file-service/*.sqlite-shm
|
||||||
|
file-service/*.sqlite-wal
|
||||||
|
file-service/var
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -7,7 +7,7 @@ members = [
|
||||||
"cyberpunk-splash",
|
"cyberpunk-splash",
|
||||||
"dashboard",
|
"dashboard",
|
||||||
"emseries",
|
"emseries",
|
||||||
"flow",
|
"file-service",
|
||||||
"fluent-ergonomics",
|
"fluent-ergonomics",
|
||||||
"geo-types",
|
"geo-types",
|
||||||
"gm-control-panel",
|
"gm-control-panel",
|
||||||
|
@ -16,6 +16,8 @@ members = [
|
||||||
"kifu/core",
|
"kifu/core",
|
||||||
"kifu/gtk",
|
"kifu/gtk",
|
||||||
"memorycache",
|
"memorycache",
|
||||||
|
"nom-training",
|
||||||
|
"result-extended",
|
||||||
"screenplay",
|
"screenplay",
|
||||||
"sgf",
|
"sgf",
|
||||||
]
|
]
|
||||||
|
|
5
build.sh
5
build.sh
|
@ -10,7 +10,7 @@ RUST_ALL_TARGETS=(
|
||||||
"cyberpunk-splash"
|
"cyberpunk-splash"
|
||||||
"dashboard"
|
"dashboard"
|
||||||
"emseries"
|
"emseries"
|
||||||
"flow"
|
"file-service"
|
||||||
"fluent-ergonomics"
|
"fluent-ergonomics"
|
||||||
"geo-types"
|
"geo-types"
|
||||||
"gm-control-panel"
|
"gm-control-panel"
|
||||||
|
@ -19,6 +19,8 @@ RUST_ALL_TARGETS=(
|
||||||
"kifu-core"
|
"kifu-core"
|
||||||
"kifu-gtk"
|
"kifu-gtk"
|
||||||
"memorycache"
|
"memorycache"
|
||||||
|
"nom-training"
|
||||||
|
"result-extended"
|
||||||
"screenplay"
|
"screenplay"
|
||||||
"sgf"
|
"sgf"
|
||||||
)
|
)
|
||||||
|
@ -37,6 +39,7 @@ build_dist() {
|
||||||
|
|
||||||
for target in $TARGETS; do
|
for target in $TARGETS; do
|
||||||
if [ -f $target/dist.sh ]; then
|
if [ -f $target/dist.sh ]; then
|
||||||
|
build_rust_targets release ${TARGETS[*]}
|
||||||
cd $target && ./dist.sh
|
cd $target && ./dist.sh
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
|
@ -14,6 +14,9 @@ case $CMD in
|
||||||
build)
|
build)
|
||||||
$CARGO build $MODULE $PARAMS
|
$CARGO build $MODULE $PARAMS
|
||||||
;;
|
;;
|
||||||
|
lint)
|
||||||
|
$CARGO clippy $MODULE $PARAMS -- -Dwarnings
|
||||||
|
;;
|
||||||
test)
|
test)
|
||||||
$CARGO test $MODULE $PARAMS
|
$CARGO test $MODULE $PARAMS
|
||||||
;;
|
;;
|
||||||
|
@ -21,16 +24,18 @@ case $CMD in
|
||||||
$CARGO run $MODULE $PARAMS
|
$CARGO run $MODULE $PARAMS
|
||||||
;;
|
;;
|
||||||
release)
|
release)
|
||||||
|
$CARGO clippy $MODULE $PARAMS -- -Dwarnings
|
||||||
$CARGO build --release $MODULE $PARAMS
|
$CARGO build --release $MODULE $PARAMS
|
||||||
|
$CARGO test --release $MODULE $PARAMS
|
||||||
;;
|
;;
|
||||||
clean)
|
clean)
|
||||||
$CARGO clean $MODULE
|
$CARGO clean $MODULE
|
||||||
;;
|
;;
|
||||||
"")
|
"")
|
||||||
echo "No command specified. Use build | test | run | release | clean"
|
echo "No command specified. Use build | lint | test | run | release | clean"
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo "$CMD is unknown. Use build | test | run | release | clean"
|
echo "$CMD is unknown. Use build | lint | test | run | release | clean"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,6 @@ name = "changeset"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
license-file = "../COPYING"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ pub enum Change<Key: Eq + Hash, Value> {
|
||||||
NewRecord(Value),
|
NewRecord(Value),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug, Default)]
|
||||||
pub struct Changeset<Key: Clone + Eq + Hash, Value> {
|
pub struct Changeset<Key: Clone + Eq + Hash, Value> {
|
||||||
delete: HashSet<Key>,
|
delete: HashSet<Key>,
|
||||||
update: HashMap<Key, Value>,
|
update: HashMap<Key, Value>,
|
||||||
|
@ -34,14 +34,6 @@ pub struct Changeset<Key: Clone + Eq + Hash, Value> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Key: Clone + Constructable + Eq + Hash, Value> Changeset<Key, Value> {
|
impl<Key: Clone + Constructable + Eq + Hash, Value> Changeset<Key, Value> {
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
delete: HashSet::new(),
|
|
||||||
update: HashMap::new(),
|
|
||||||
new: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add(&mut self, r: Value) -> Key {
|
pub fn add(&mut self, r: Value) -> Key {
|
||||||
let k = Key::new();
|
let k = Key::new();
|
||||||
self.new.insert(k.clone(), r);
|
self.new.insert(k.clone(), r);
|
||||||
|
@ -90,7 +82,7 @@ impl<Key: Clone + Eq + Hash, Value> From<Changeset<Key, Value>> for Vec<Change<K
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(k, v)| Change::UpdateRecord((k, v))),
|
.map(|(k, v)| Change::UpdateRecord((k, v))),
|
||||||
)
|
)
|
||||||
.chain(new.into_iter().map(|(_, v)| Change::NewRecord(v)))
|
.chain(new.into_values().map(|v| Change::NewRecord(v)))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -100,7 +92,7 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash, Default)]
|
||||||
struct Id(Uuid);
|
struct Id(Uuid);
|
||||||
impl Constructable for Id {
|
impl Constructable for Id {
|
||||||
fn new() -> Self {
|
fn new() -> Self {
|
||||||
|
@ -110,7 +102,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_generates_a_new_record() {
|
fn it_generates_a_new_record() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::new();
|
let mut set: Changeset<Id, String> = Changeset::default();
|
||||||
set.add("efgh".to_string());
|
set.add("efgh".to_string());
|
||||||
let changes = Vec::from(set.clone());
|
let changes = Vec::from(set.clone());
|
||||||
assert_eq!(changes.len(), 1);
|
assert_eq!(changes.len(), 1);
|
||||||
|
@ -125,7 +117,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_generates_a_delete_record() {
|
fn it_generates_a_delete_record() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::new();
|
let mut set: Changeset<Id, String> = Changeset::default();
|
||||||
let id1 = Id::new();
|
let id1 = Id::new();
|
||||||
set.delete(id1.clone());
|
set.delete(id1.clone());
|
||||||
let changes = Vec::from(set.clone());
|
let changes = Vec::from(set.clone());
|
||||||
|
@ -142,7 +134,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn update_unrelated_records() {
|
fn update_unrelated_records() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::new();
|
let mut set: Changeset<Id, String> = Changeset::default();
|
||||||
let id1 = Id::new();
|
let id1 = Id::new();
|
||||||
let id2 = Id::new();
|
let id2 = Id::new();
|
||||||
set.update(id1.clone(), "abcd".to_owned());
|
set.update(id1.clone(), "abcd".to_owned());
|
||||||
|
@ -155,7 +147,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn delete_cancels_new() {
|
fn delete_cancels_new() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::new();
|
let mut set: Changeset<Id, String> = Changeset::default();
|
||||||
let key = set.add("efgh".to_string());
|
let key = set.add("efgh".to_string());
|
||||||
set.delete(key);
|
set.delete(key);
|
||||||
let changes = Vec::from(set);
|
let changes = Vec::from(set);
|
||||||
|
@ -164,7 +156,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn delete_cancels_update() {
|
fn delete_cancels_update() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::new();
|
let mut set: Changeset<Id, String> = Changeset::default();
|
||||||
let id = Id::new();
|
let id = Id::new();
|
||||||
set.update(id.clone(), "efgh".to_owned());
|
set.update(id.clone(), "efgh".to_owned());
|
||||||
set.delete(id.clone());
|
set.delete(id.clone());
|
||||||
|
@ -175,7 +167,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn update_atop_new_is_new() {
|
fn update_atop_new_is_new() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::new();
|
let mut set: Changeset<Id, String> = Changeset::default();
|
||||||
let key = set.add("efgh".to_owned());
|
let key = set.add("efgh".to_owned());
|
||||||
set.update(key, "wxyz".to_owned());
|
set.update(key, "wxyz".to_owned());
|
||||||
let changes = Vec::from(set);
|
let changes = Vec::from(set);
|
||||||
|
@ -185,7 +177,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn updates_get_squashed() {
|
fn updates_get_squashed() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::new();
|
let mut set: Changeset<Id, String> = Changeset::default();
|
||||||
let id1 = Id::new();
|
let id1 = Id::new();
|
||||||
let id2 = Id::new();
|
let id2 = Id::new();
|
||||||
set.update(id1.clone(), "efgh".to_owned());
|
set.update(id1.clone(), "efgh".to_owned());
|
||||||
|
|
|
@ -33,12 +33,12 @@ fn main() {
|
||||||
|
|
||||||
let filename = args
|
let filename = args
|
||||||
.next()
|
.next()
|
||||||
.map(|p| PathBuf::from(p))
|
.map(PathBuf::from)
|
||||||
.expect("A filename is required");
|
.expect("A filename is required");
|
||||||
let size = args
|
let size = args
|
||||||
.next()
|
.next()
|
||||||
.and_then(|s| s.parse::<usize>().ok())
|
.and_then(|s| s.parse::<usize>().ok())
|
||||||
.unwrap_or(3);
|
.unwrap_or(3);
|
||||||
let map: hex_map::Map<MapVal> = hex_map::Map::new_hexagonal(size);
|
let map: hex_map::Map<MapVal> = hex_map::Map::new_hexagonal(size);
|
||||||
hex_map::write_file(filename, map);
|
hex_map::write_file(filename, map).expect("to write file");
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,10 +10,9 @@ Luminescent Dreams Tools is distributed in the hope that it will be useful, but
|
||||||
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/// Ĉi-tiu modulo enhavas la elementojn por kub-koordinato.
|
/// This module contains the elements of cube coordinates.
|
||||||
///
|
///
|
||||||
/// This code is based on https://www.redblobgames.com/grids/hexagons/
|
/// This code is based on https://www.redblobgames.com/grids/hexagons/
|
||||||
use crate::Error;
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
/// An address within the hex coordinate system
|
/// An address within the hex coordinate system
|
||||||
|
@ -62,7 +61,7 @@ impl AxialAddr {
|
||||||
pub fn is_adjacent(&self, dest: &AxialAddr) -> bool {
|
pub fn is_adjacent(&self, dest: &AxialAddr) -> bool {
|
||||||
dest.adjacencies()
|
dest.adjacencies()
|
||||||
.collect::<Vec<AxialAddr>>()
|
.collect::<Vec<AxialAddr>>()
|
||||||
.contains(&self)
|
.contains(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Measure the distance to a destination
|
/// Measure the distance to a destination
|
||||||
|
@ -79,7 +78,7 @@ impl AxialAddr {
|
||||||
|
|
||||||
positions.push(item);
|
positions.push(item);
|
||||||
|
|
||||||
while positions.len() > 0 {
|
while !positions.is_empty() {
|
||||||
let elem = positions.remove(0);
|
let elem = positions.remove(0);
|
||||||
for adj in elem.adjacencies() {
|
for adj in elem.adjacencies() {
|
||||||
if self.distance(&adj) <= distance && !results.contains(&adj) {
|
if self.distance(&adj) <= distance && !results.contains(&adj) {
|
||||||
|
|
|
@ -14,7 +14,6 @@ use crate::{hex::AxialAddr, Error};
|
||||||
use nom::{
|
use nom::{
|
||||||
bytes::complete::tag,
|
bytes::complete::tag,
|
||||||
character::complete::alphanumeric1,
|
character::complete::alphanumeric1,
|
||||||
error::ParseError,
|
|
||||||
multi::many1,
|
multi::many1,
|
||||||
sequence::{delimited, separated_pair},
|
sequence::{delimited, separated_pair},
|
||||||
Finish, IResult, Parser,
|
Finish, IResult, Parser,
|
||||||
|
@ -81,7 +80,7 @@ pub fn parse_data<'a, A: Default + From<String>>(
|
||||||
}
|
}
|
||||||
|
|
||||||
let cells = data
|
let cells = data
|
||||||
.map(|line| parse_line::<A>(&line).unwrap())
|
.map(|line| parse_line::<A>(line).unwrap())
|
||||||
.collect::<Vec<(AxialAddr, A)>>();
|
.collect::<Vec<(AxialAddr, A)>>();
|
||||||
let cells = cells.into_iter().collect::<HashMap<AxialAddr, A>>();
|
let cells = cells.into_iter().collect::<HashMap<AxialAddr, A>>();
|
||||||
Map { cells }
|
Map { cells }
|
||||||
|
|
|
@ -9,9 +9,9 @@ Lumeto is distributed in the hope that it will be useful, but WITHOUT ANY WARRAN
|
||||||
|
|
||||||
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
use thiserror;
|
use thiserror::Error;
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error("IO error on reading or writing: {0}")]
|
#[error("IO error on reading or writing: {0}")]
|
||||||
IO(std::io::Error),
|
IO(std::io::Error),
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
name = "cyberpunk-splash"
|
name = "cyberpunk-splash"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
license = "GPL-3.0-only"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@ use cairo::{
|
||||||
Context, FontSlant, FontWeight, Format, ImageSurface, LineCap, LinearGradient, Pattern,
|
Context, FontSlant, FontWeight, Format, ImageSurface, LineCap, LinearGradient, Pattern,
|
||||||
TextExtents,
|
TextExtents,
|
||||||
};
|
};
|
||||||
use glib::{GString, Object};
|
use glib::Object;
|
||||||
use gtk::{gdk::Key, prelude::*, subclass::prelude::*, EventControllerKey};
|
use gtk::{prelude::*, subclass::prelude::*, EventControllerKey};
|
||||||
use std::{
|
use std::{
|
||||||
cell::RefCell,
|
cell::RefCell,
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
|
@ -14,12 +14,6 @@ use std::{
|
||||||
const WIDTH: i32 = 1600;
|
const WIDTH: i32 = 1600;
|
||||||
const HEIGHT: i32 = 600;
|
const HEIGHT: i32 = 600;
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
|
||||||
enum Event {
|
|
||||||
Frames(u8),
|
|
||||||
Time(Duration),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
pub enum State {
|
pub enum State {
|
||||||
Running {
|
Running {
|
||||||
|
@ -50,7 +44,7 @@ impl State {
|
||||||
*self = Self::Running {
|
*self = Self::Running {
|
||||||
last_update: Instant::now(),
|
last_update: Instant::now(),
|
||||||
deadline: Instant::now() + *time_remaining,
|
deadline: Instant::now() + *time_remaining,
|
||||||
timeout: timeout.clone(),
|
timeout: *timeout,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -62,7 +56,7 @@ impl State {
|
||||||
{
|
{
|
||||||
*self = Self::Paused {
|
*self = Self::Paused {
|
||||||
time_remaining: *deadline - Instant::now(),
|
time_remaining: *deadline - Instant::now(),
|
||||||
timeout: timeout.clone(),
|
timeout: *timeout,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -108,13 +102,13 @@ impl TimeoutAnimation {
|
||||||
fn tick(&mut self, frames_elapsed: u8) {
|
fn tick(&mut self, frames_elapsed: u8) {
|
||||||
let step_size = 1. / (self.duration * 60.);
|
let step_size = 1. / (self.duration * 60.);
|
||||||
if self.ascending {
|
if self.ascending {
|
||||||
self.intensity = self.intensity + step_size * frames_elapsed as f64;
|
self.intensity += step_size * frames_elapsed as f64;
|
||||||
if self.intensity > 1. {
|
if self.intensity > 1. {
|
||||||
self.intensity = 1.0;
|
self.intensity = 1.0;
|
||||||
self.ascending = false;
|
self.ascending = false;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.intensity = self.intensity - step_size * frames_elapsed as f64;
|
self.intensity -= step_size * frames_elapsed as f64;
|
||||||
if self.intensity < 0. {
|
if self.intensity < 0. {
|
||||||
self.intensity = 0.0;
|
self.intensity = 0.0;
|
||||||
self.ascending = true;
|
self.ascending = true;
|
||||||
|
@ -148,7 +142,6 @@ impl SplashPrivate {
|
||||||
*self.height.borrow(),
|
*self.height.borrow(),
|
||||||
2.,
|
2.,
|
||||||
8.,
|
8.,
|
||||||
8.,
|
|
||||||
(0.7, 0., 1.),
|
(0.7, 0., 1.),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -333,7 +326,7 @@ impl Splash {
|
||||||
let _ = context.set_source(&*background);
|
let _ = context.set_source(&*background);
|
||||||
let _ = context.paint();
|
let _ = context.paint();
|
||||||
|
|
||||||
let state = s.imp().state.borrow().clone();
|
let state = *s.imp().state.borrow();
|
||||||
|
|
||||||
let time = match state {
|
let time = match state {
|
||||||
State::Running { deadline, .. } => deadline - Instant::now(),
|
State::Running { deadline, .. } => deadline - Instant::now(),
|
||||||
|
@ -359,7 +352,7 @@ impl Splash {
|
||||||
|
|
||||||
let mut saved_extents = s.imp().time_extents.borrow_mut();
|
let mut saved_extents = s.imp().time_extents.borrow_mut();
|
||||||
if saved_extents.is_none() {
|
if saved_extents.is_none() {
|
||||||
*saved_extents = Some(time_extents.clone());
|
*saved_extents = Some(time_extents);
|
||||||
}
|
}
|
||||||
|
|
||||||
let time_baseline_x = center_x - time_extents.width() / 2.;
|
let time_baseline_x = center_x - time_extents.width() / 2.;
|
||||||
|
@ -372,8 +365,8 @@ impl Splash {
|
||||||
time_baseline_y,
|
time_baseline_y,
|
||||||
);
|
);
|
||||||
let (running, timeout_animation) = match state {
|
let (running, timeout_animation) = match state {
|
||||||
State::Running { timeout, .. } => (true, timeout.clone()),
|
State::Running { timeout, .. } => (true, timeout),
|
||||||
State::Paused { timeout, .. } => (false, timeout.clone()),
|
State::Paused { timeout, .. } => (false, timeout),
|
||||||
};
|
};
|
||||||
match timeout_animation {
|
match timeout_animation {
|
||||||
Some(ref animation) => {
|
Some(ref animation) => {
|
||||||
|
@ -395,8 +388,7 @@ impl Splash {
|
||||||
let _ = context.show_text(&time);
|
let _ = context.show_text(&time);
|
||||||
};
|
};
|
||||||
|
|
||||||
match *s.imp().time_extents.borrow() {
|
if let Some(extents) = *s.imp().time_extents.borrow() {
|
||||||
Some(extents) => {
|
|
||||||
context.set_source_rgb(0.7, 0.0, 1.0);
|
context.set_source_rgb(0.7, 0.0, 1.0);
|
||||||
let time_meter = SlashMeter {
|
let time_meter = SlashMeter {
|
||||||
orientation: gtk::Orientation::Horizontal,
|
orientation: gtk::Orientation::Horizontal,
|
||||||
|
@ -407,9 +399,7 @@ impl Splash {
|
||||||
height: 60.,
|
height: 60.,
|
||||||
length: 100.,
|
length: 100.,
|
||||||
};
|
};
|
||||||
time_meter.draw(&context);
|
time_meter.draw(context);
|
||||||
}
|
|
||||||
None => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -544,7 +534,7 @@ impl SlashMeter {
|
||||||
gtk::Orientation::Horizontal => {
|
gtk::Orientation::Horizontal => {
|
||||||
let angle: f64 = 0.8;
|
let angle: f64 = 0.8;
|
||||||
let run = self.height / angle.tan();
|
let run = self.height / angle.tan();
|
||||||
let width = self.length as f64 / (self.count as f64 * 2.);
|
let width = self.length / (self.count as f64 * 2.);
|
||||||
|
|
||||||
for c in 0..self.count {
|
for c in 0..self.count {
|
||||||
context.set_line_width(1.);
|
context.set_line_width(1.);
|
||||||
|
@ -579,10 +569,6 @@ trait Pen {
|
||||||
struct GlowPen {
|
struct GlowPen {
|
||||||
blur_context: Context,
|
blur_context: Context,
|
||||||
draw_context: Context,
|
draw_context: Context,
|
||||||
|
|
||||||
line_width: f64,
|
|
||||||
blur_line_width: f64,
|
|
||||||
blur_size: f64,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GlowPen {
|
impl GlowPen {
|
||||||
|
@ -591,7 +577,6 @@ impl GlowPen {
|
||||||
height: i32,
|
height: i32,
|
||||||
line_width: f64,
|
line_width: f64,
|
||||||
blur_line_width: f64,
|
blur_line_width: f64,
|
||||||
blur_size: f64,
|
|
||||||
color: (f64, f64, f64),
|
color: (f64, f64, f64),
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let blur_context =
|
let blur_context =
|
||||||
|
@ -611,9 +596,6 @@ impl GlowPen {
|
||||||
Self {
|
Self {
|
||||||
blur_context,
|
blur_context,
|
||||||
draw_context,
|
draw_context,
|
||||||
line_width,
|
|
||||||
blur_line_width,
|
|
||||||
blur_size,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -630,8 +612,10 @@ impl Pen for GlowPen {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn stroke(&self) {
|
fn stroke(&self) {
|
||||||
self.blur_context.stroke();
|
self.blur_context.stroke().expect("to draw the blur line");
|
||||||
self.draw_context.stroke();
|
self.draw_context
|
||||||
|
.stroke()
|
||||||
|
.expect("to draw the regular line");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn finish(self) -> Pattern {
|
fn finish(self) -> Pattern {
|
||||||
|
@ -681,7 +665,7 @@ fn main() {
|
||||||
let countdown = match options.lookup::<String>("countdown") {
|
let countdown = match options.lookup::<String>("countdown") {
|
||||||
Ok(Some(countdown_str)) => {
|
Ok(Some(countdown_str)) => {
|
||||||
let parts = countdown_str.split(':').collect::<Vec<&str>>();
|
let parts = countdown_str.split(':').collect::<Vec<&str>>();
|
||||||
let duration = match parts.len() {
|
match parts.len() {
|
||||||
2 => {
|
2 => {
|
||||||
let minutes = parts[0].parse::<u64>().unwrap();
|
let minutes = parts[0].parse::<u64>().unwrap();
|
||||||
let seconds = parts[1].parse::<u64>().unwrap();
|
let seconds = parts[1].parse::<u64>().unwrap();
|
||||||
|
@ -692,8 +676,7 @@ fn main() {
|
||||||
Duration::from_secs(seconds)
|
Duration::from_secs(seconds)
|
||||||
}
|
}
|
||||||
_ => Duration::from_secs(300),
|
_ => Duration::from_secs(300),
|
||||||
};
|
}
|
||||||
duration
|
|
||||||
}
|
}
|
||||||
_ => Duration::from_secs(300),
|
_ => Duration::from_secs(300),
|
||||||
};
|
};
|
||||||
|
@ -725,7 +708,7 @@ fn main() {
|
||||||
let window = gtk::ApplicationWindow::new(app);
|
let window = gtk::ApplicationWindow::new(app);
|
||||||
window.present();
|
window.present();
|
||||||
|
|
||||||
let splash = Splash::new(title.read().unwrap().clone(), state.read().unwrap().clone());
|
let splash = Splash::new(title.read().unwrap().clone(), *state.read().unwrap());
|
||||||
|
|
||||||
window.set_child(Some(&splash));
|
window.set_child(Some(&splash));
|
||||||
|
|
||||||
|
@ -763,7 +746,7 @@ fn main() {
|
||||||
loop {
|
loop {
|
||||||
std::thread::sleep(Duration::from_millis(1000 / 60));
|
std::thread::sleep(Duration::from_millis(1000 / 60));
|
||||||
state.write().unwrap().run(Instant::now());
|
state.write().unwrap().run(Instant::now());
|
||||||
let _ = gtk_tx.send(state.read().unwrap().clone());
|
let _ = gtk_tx.send(*state.read().unwrap());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "dashboard"
|
name = "dashboard"
|
||||||
version = "0.1.0"
|
version = "0.1.1"
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
set -x
|
|
||||||
|
VERSION=`cat Cargo.toml | grep "^version =" | sed -r 's/^version = "(.+)"$/\1/'`
|
||||||
|
|
||||||
mkdir -p dist
|
mkdir -p dist
|
||||||
cp dashboard.desktop dist
|
cp dashboard.desktop dist
|
||||||
cp ../target/release/dashboard dist
|
cp ../target/release/dashboard dist
|
||||||
strip dist/dashboard
|
strip dist/dashboard
|
||||||
tar -cf dashboard.tgz dist/
|
tar -czf dashboard-${VERSION}.tgz dist/
|
||||||
|
|
||||||
|
|
|
@ -40,16 +40,16 @@ impl ApplicationWindow {
|
||||||
.vexpand(true)
|
.vexpand(true)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let date_label = Date::new();
|
let date_label = Date::default();
|
||||||
layout.append(&date_label);
|
layout.append(&date_label);
|
||||||
|
|
||||||
let events = Events::new();
|
let events = Events::default();
|
||||||
layout.append(&events);
|
layout.append(&events);
|
||||||
|
|
||||||
let transit_card = TransitCard::new();
|
let transit_card = TransitCard::default();
|
||||||
layout.append(&transit_card);
|
layout.append(&transit_card);
|
||||||
|
|
||||||
let transit_clock = TransitClock::new();
|
let transit_clock = TransitClock::default();
|
||||||
layout.append(&transit_clock);
|
layout.append(&transit_clock);
|
||||||
|
|
||||||
window.set_content(Some(&layout));
|
window.set_content(Some(&layout));
|
||||||
|
|
|
@ -35,8 +35,8 @@ glib::wrapper! {
|
||||||
pub struct Date(ObjectSubclass<DatePrivate>) @extends gtk::Box, gtk::Widget;
|
pub struct Date(ObjectSubclass<DatePrivate>) @extends gtk::Box, gtk::Widget;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Date {
|
impl Default for Date {
|
||||||
pub fn new() -> Self {
|
fn default() -> Self {
|
||||||
let s: Self = Object::builder().build();
|
let s: Self = Object::builder().build();
|
||||||
s.set_margin_bottom(8);
|
s.set_margin_bottom(8);
|
||||||
s.set_margin_top(8);
|
s.set_margin_top(8);
|
||||||
|
@ -48,7 +48,9 @@ impl Date {
|
||||||
s.redraw();
|
s.redraw();
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Date {
|
||||||
pub fn update_date(&self, date: IFC) {
|
pub fn update_date(&self, date: IFC) {
|
||||||
*self.imp().date.borrow_mut() = date;
|
*self.imp().date.borrow_mut() = date;
|
||||||
self.redraw();
|
self.redraw();
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
components::Date,
|
components::Date,
|
||||||
solstices::{self, YearlyEvents},
|
solstices::{self, YearlyEvents},
|
||||||
soluna_client::SunMoon,
|
|
||||||
};
|
};
|
||||||
use chrono::TimeZone;
|
|
||||||
use glib::Object;
|
use glib::Object;
|
||||||
use gtk::{prelude::*, subclass::prelude::*, IconLookupFlags};
|
use gtk::{prelude::*, subclass::prelude::*};
|
||||||
use ifc::IFC;
|
use ifc::IFC;
|
||||||
|
|
||||||
|
/*
|
||||||
#[derive(PartialEq)]
|
#[derive(PartialEq)]
|
||||||
pub enum UpcomingEvent {
|
pub enum UpcomingEvent {
|
||||||
SpringEquinox,
|
SpringEquinox,
|
||||||
|
@ -15,25 +14,15 @@ pub enum UpcomingEvent {
|
||||||
AutumnEquinox,
|
AutumnEquinox,
|
||||||
WinterSolstice,
|
WinterSolstice,
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
pub struct EventsPrivate {
|
pub struct EventsPrivate {
|
||||||
spring_equinox: Date,
|
spring_equinox: Date,
|
||||||
summer_solstice: Date,
|
summer_solstice: Date,
|
||||||
autumn_equinox: Date,
|
autumn_equinox: Date,
|
||||||
winter_solstice: Date,
|
winter_solstice: Date,
|
||||||
next: UpcomingEvent,
|
// next: UpcomingEvent,
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for EventsPrivate {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
spring_equinox: Date::new(),
|
|
||||||
summer_solstice: Date::new(),
|
|
||||||
autumn_equinox: Date::new(),
|
|
||||||
winter_solstice: Date::new(),
|
|
||||||
next: UpcomingEvent::SpringEquinox,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[glib::object_subclass]
|
#[glib::object_subclass]
|
||||||
|
@ -51,8 +40,8 @@ glib::wrapper! {
|
||||||
pub struct Events(ObjectSubclass<EventsPrivate>) @extends gtk::Widget, gtk::Box, @implements gtk::Orientable;
|
pub struct Events(ObjectSubclass<EventsPrivate>) @extends gtk::Widget, gtk::Box, @implements gtk::Orientable;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Events {
|
impl Default for Events {
|
||||||
pub fn new() -> Self {
|
fn default() -> Self {
|
||||||
let s: Self = Object::builder().build();
|
let s: Self = Object::builder().build();
|
||||||
s.set_orientation(gtk::Orientation::Horizontal);
|
s.set_orientation(gtk::Orientation::Horizontal);
|
||||||
s.set_spacing(8);
|
s.set_spacing(8);
|
||||||
|
@ -64,7 +53,9 @@ impl Events {
|
||||||
|
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Events {
|
||||||
pub fn set_events(&self, events: YearlyEvents, next_event: solstices::Event) {
|
pub fn set_events(&self, events: YearlyEvents, next_event: solstices::Event) {
|
||||||
self.imp()
|
self.imp()
|
||||||
.spring_equinox
|
.spring_equinox
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
use crate::soluna_client::SunMoon;
|
|
||||||
use glib::Object;
|
use glib::Object;
|
||||||
use gtk::{prelude::*, subclass::prelude::*, IconLookupFlags};
|
use gtk::{prelude::*, subclass::prelude::*};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct LabelPrivate {
|
pub struct LabelPrivate {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::{components::Label, soluna_client::SunMoon};
|
use crate::{components::Label, soluna_client::SunMoon};
|
||||||
use glib::Object;
|
use glib::Object;
|
||||||
use gtk::{prelude::*, subclass::prelude::*, IconLookupFlags};
|
use gtk::{prelude::*, subclass::prelude::*};
|
||||||
|
|
||||||
pub struct TransitCardPrivate {
|
pub struct TransitCardPrivate {
|
||||||
sunrise: Label,
|
sunrise: Label,
|
||||||
|
@ -35,8 +35,8 @@ glib::wrapper! {
|
||||||
pub struct TransitCard(ObjectSubclass<TransitCardPrivate>) @extends gtk::Grid, gtk::Widget;
|
pub struct TransitCard(ObjectSubclass<TransitCardPrivate>) @extends gtk::Grid, gtk::Widget;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TransitCard {
|
impl Default for TransitCard {
|
||||||
pub fn new() -> Self {
|
fn default() -> Self {
|
||||||
let s: Self = Object::builder().build();
|
let s: Self = Object::builder().build();
|
||||||
s.add_css_class("card");
|
s.add_css_class("card");
|
||||||
s.set_column_homogeneous(true);
|
s.set_column_homogeneous(true);
|
||||||
|
@ -48,7 +48,9 @@ impl TransitCard {
|
||||||
|
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TransitCard {
|
||||||
pub fn update_transit(&self, transit_info: &SunMoon) {
|
pub fn update_transit(&self, transit_info: &SunMoon) {
|
||||||
self.imp()
|
self.imp()
|
||||||
.sunrise
|
.sunrise
|
||||||
|
|
|
@ -7,18 +7,11 @@ use glib::Object;
|
||||||
use gtk::{prelude::*, subclass::prelude::*};
|
use gtk::{prelude::*, subclass::prelude::*};
|
||||||
use std::{cell::RefCell, f64::consts::PI, rc::Rc};
|
use std::{cell::RefCell, f64::consts::PI, rc::Rc};
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
pub struct TransitClockPrivate {
|
pub struct TransitClockPrivate {
|
||||||
info: Rc<RefCell<Option<SunMoon>>>,
|
info: Rc<RefCell<Option<SunMoon>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for TransitClockPrivate {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
info: Rc::new(RefCell::new(None)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[glib::object_subclass]
|
#[glib::object_subclass]
|
||||||
impl ObjectSubclass for TransitClockPrivate {
|
impl ObjectSubclass for TransitClockPrivate {
|
||||||
const NAME: &'static str = "TransitClock";
|
const NAME: &'static str = "TransitClock";
|
||||||
|
@ -34,8 +27,8 @@ glib::wrapper! {
|
||||||
pub struct TransitClock(ObjectSubclass<TransitClockPrivate>) @extends gtk::DrawingArea, gtk::Widget;
|
pub struct TransitClock(ObjectSubclass<TransitClockPrivate>) @extends gtk::DrawingArea, gtk::Widget;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TransitClock {
|
impl Default for TransitClock {
|
||||||
pub fn new() -> Self {
|
fn default() -> Self {
|
||||||
let s: Self = Object::builder().build();
|
let s: Self = Object::builder().build();
|
||||||
s.set_width_request(500);
|
s.set_width_request(500);
|
||||||
s.set_height_request(500);
|
s.set_height_request(500);
|
||||||
|
@ -100,7 +93,9 @@ impl TransitClock {
|
||||||
|
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TransitClock {
|
||||||
pub fn update_transit(&self, transit_info: SunMoon) {
|
pub fn update_transit(&self, transit_info: SunMoon) {
|
||||||
*self.imp().info.borrow_mut() = Some(transit_info);
|
*self.imp().info.borrow_mut() = Some(transit_info);
|
||||||
self.queue_draw();
|
self.queue_draw();
|
||||||
|
|
|
@ -90,7 +90,7 @@ pub fn main() {
|
||||||
tx: Arc::new(RwLock::new(None)),
|
tx: Arc::new(RwLock::new(None)),
|
||||||
};
|
};
|
||||||
|
|
||||||
let _ = runtime.spawn({
|
runtime.spawn({
|
||||||
let core = core.clone();
|
let core = core.clone();
|
||||||
async move {
|
async move {
|
||||||
let soluna_client = SolunaClient::new();
|
let soluna_client = SolunaClient::new();
|
||||||
|
@ -110,8 +110,10 @@ pub fn main() {
|
||||||
|
|
||||||
if let Some(ref gtk_tx) = *core.tx.read().unwrap() {
|
if let Some(ref gtk_tx) = *core.tx.read().unwrap() {
|
||||||
let _ = gtk_tx.send(Message::Refresh(state.clone()));
|
let _ = gtk_tx.send(Message::Refresh(state.clone()));
|
||||||
}
|
|
||||||
std::thread::sleep(std::time::Duration::from_secs(60));
|
std::thread::sleep(std::time::Duration::from_secs(60));
|
||||||
|
} else {
|
||||||
|
std::thread::sleep(std::time::Duration::from_secs(1));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -134,8 +136,6 @@ pub fn main() {
|
||||||
Continue(true)
|
Continue(true)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
std::thread::spawn(move || {});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let args: Vec<String> = env::args().collect();
|
let args: Vec<String> = env::args().collect();
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
use chrono;
|
|
||||||
use chrono::prelude::*;
|
use chrono::prelude::*;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use serde_derive::{Deserialize, Serialize};
|
use serde_derive::{Deserialize, Serialize};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
// http://astropixels.com/ephemeris/soleq2001.html
|
// http://astropixels.com/ephemeris/soleq2001.html
|
||||||
const SOLSTICE_TEXT: &'static str = "
|
const SOLSTICE_TEXT: &str = "
|
||||||
2001 Mar 20 13:31 Jun 21 07:38 Sep 22 23:05 Dec 21 19:22
|
2001 Mar 20 13:31 Jun 21 07:38 Sep 22 23:05 Dec 21 19:22
|
||||||
2002 Mar 20 19:16 Jun 21 13:25 Sep 23 04:56 Dec 22 01:15
|
2002 Mar 20 19:16 Jun 21 13:25 Sep 23 04:56 Dec 22 01:15
|
||||||
2003 Mar 21 01:00 Jun 21 19:11 Sep 23 10:47 Dec 22 07:04
|
2003 Mar 21 01:00 Jun 21 19:11 Sep 23 10:47 Dec 22 07:04
|
||||||
|
@ -91,12 +90,14 @@ impl Event {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_time<'a>(
|
fn parse_time<'a>(
|
||||||
jaro: &str,
|
year: &str,
|
||||||
iter: impl Iterator<Item = &'a str>,
|
iter: impl Iterator<Item = &'a str>,
|
||||||
) -> chrono::DateTime<chrono::Utc> {
|
) -> chrono::DateTime<chrono::Utc> {
|
||||||
let partoj = iter.collect::<Vec<&str>>();
|
let parts = iter.collect::<Vec<&str>>();
|
||||||
let p = format!("{} {} {} {}", jaro, partoj[0], partoj[1], partoj[2]);
|
let p = format!("{} {} {} {}", year, parts[0], parts[1], parts[2]);
|
||||||
chrono::Utc.datetime_from_str(&p, "%Y %b %d %H:%M").unwrap()
|
NaiveDateTime::parse_from_str(&p, "%Y %b %d %H:%M")
|
||||||
|
.unwrap()
|
||||||
|
.and_utc()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_line(year: &str, rest: &[&str]) -> YearlyEvents {
|
fn parse_line(year: &str, rest: &[&str]) -> YearlyEvents {
|
||||||
|
@ -118,7 +119,7 @@ fn parse_events() -> Vec<Option<YearlyEvents>> {
|
||||||
.lines()
|
.lines()
|
||||||
.map(|line| {
|
.map(|line| {
|
||||||
match line
|
match line
|
||||||
.split(" ")
|
.split(' ')
|
||||||
.filter(|elem| !elem.is_empty())
|
.filter(|elem| !elem.is_empty())
|
||||||
.collect::<Vec<&str>>()
|
.collect::<Vec<&str>>()
|
||||||
.as_slice()
|
.as_slice()
|
||||||
|
@ -134,7 +135,7 @@ pub struct Solstices(HashMap<i32, YearlyEvents>);
|
||||||
|
|
||||||
impl Solstices {
|
impl Solstices {
|
||||||
pub fn yearly_events(&self, year: i32) -> Option<YearlyEvents> {
|
pub fn yearly_events(&self, year: i32) -> Option<YearlyEvents> {
|
||||||
self.0.get(&year).map(|c| c.clone())
|
self.0.get(&year).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn next_event(&self, date: chrono::DateTime<chrono::Utc>) -> Option<Event> {
|
pub fn next_event(&self, date: chrono::DateTime<chrono::Utc>) -> Option<Event> {
|
||||||
|
@ -142,17 +143,17 @@ impl Solstices {
|
||||||
match year_events {
|
match year_events {
|
||||||
Some(year_events) => {
|
Some(year_events) => {
|
||||||
if date <= year_events.spring_equinox {
|
if date <= year_events.spring_equinox {
|
||||||
Some(Event::SpringEquinox(year_events.spring_equinox.clone()))
|
Some(Event::SpringEquinox(year_events.spring_equinox))
|
||||||
} else if date <= year_events.summer_solstice {
|
} else if date <= year_events.summer_solstice {
|
||||||
Some(Event::SummerSolstice(year_events.summer_solstice.clone()))
|
Some(Event::SummerSolstice(year_events.summer_solstice))
|
||||||
} else if date <= year_events.autumn_equinox {
|
} else if date <= year_events.autumn_equinox {
|
||||||
Some(Event::AutumnEquinox(year_events.autumn_equinox.clone()))
|
Some(Event::AutumnEquinox(year_events.autumn_equinox))
|
||||||
} else if date <= year_events.winter_solstice {
|
} else if date <= year_events.winter_solstice {
|
||||||
Some(Event::WinterSolstice(year_events.winter_solstice.clone()))
|
Some(Event::WinterSolstice(year_events.winter_solstice))
|
||||||
} else {
|
} else {
|
||||||
self.0
|
self.0
|
||||||
.get(&(date.year() + 1))
|
.get(&(date.year() + 1))
|
||||||
.map(|_| Event::SpringEquinox(year_events.spring_equinox.clone()))
|
.map(|_| Event::SpringEquinox(year_events.spring_equinox))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => None,
|
None => None,
|
||||||
|
@ -165,7 +166,7 @@ impl From<Vec<Option<YearlyEvents>>> for Solstices {
|
||||||
Solstices(event_list.iter().fold(HashMap::new(), |mut m, record| {
|
Solstices(event_list.iter().fold(HashMap::new(), |mut m, record| {
|
||||||
match record {
|
match record {
|
||||||
Some(record) => {
|
Some(record) => {
|
||||||
m.insert(record.year, record.clone());
|
m.insert(record.year, *record);
|
||||||
}
|
}
|
||||||
None => (),
|
None => (),
|
||||||
}
|
}
|
||||||
|
@ -177,3 +178,24 @@ impl From<Vec<Option<YearlyEvents>>> for Solstices {
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref EVENTS: Solstices = Solstices::from(parse_events());
|
pub static ref EVENTS: Solstices = Solstices::from(parse_events());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use chrono::{NaiveDate, NaiveDateTime};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_can_parse_a_solstice_time() {
|
||||||
|
let p = "2001 Mar 20 13:31".to_owned();
|
||||||
|
let parsed_date = NaiveDateTime::parse_from_str(&p, "%Y %b %d %H:%M")
|
||||||
|
.unwrap()
|
||||||
|
.and_utc();
|
||||||
|
assert_eq!(
|
||||||
|
parsed_date,
|
||||||
|
NaiveDate::from_ymd_opt(2001, 03, 20)
|
||||||
|
.unwrap()
|
||||||
|
.and_hms_opt(13, 31, 0)
|
||||||
|
.unwrap()
|
||||||
|
.and_utc()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
use chrono::{DateTime, Duration, Local, NaiveTime, Offset, TimeZone, Timelike, Utc};
|
use chrono::{DateTime, Duration, Local, NaiveTime, Offset, TimeZone, Timelike, Utc};
|
||||||
use geo_types::{Latitude, Longitude};
|
use geo_types::{Latitude, Longitude};
|
||||||
use memorycache::MemoryCache;
|
use memorycache::MemoryCache;
|
||||||
use reqwest;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
const ENDPOINT: &str = "https://api.solunar.org/solunar";
|
const ENDPOINT: &str = "https://api.solunar.org/solunar";
|
||||||
|
@ -26,8 +25,8 @@ impl SunMoon {
|
||||||
|
|
||||||
let sunrise = parse_time(val.sunrise).unwrap();
|
let sunrise = parse_time(val.sunrise).unwrap();
|
||||||
let sunset = parse_time(val.sunset).unwrap();
|
let sunset = parse_time(val.sunset).unwrap();
|
||||||
let moonrise = val.moonrise.and_then(|v| parse_time(v));
|
let moonrise = val.moonrise.and_then(parse_time);
|
||||||
let moonset = val.moonset.and_then(|v| parse_time(v));
|
let moonset = val.moonset.and_then(parse_time);
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
sunrise,
|
sunrise,
|
||||||
|
@ -82,7 +81,7 @@ impl SolunaClient {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
client: reqwest::Client::new(),
|
client: reqwest::Client::new(),
|
||||||
memory_cache: MemoryCache::new(),
|
memory_cache: MemoryCache::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -110,7 +109,7 @@ impl SolunaClient {
|
||||||
.get(reqwest::header::EXPIRES)
|
.get(reqwest::header::EXPIRES)
|
||||||
.and_then(|header| header.to_str().ok())
|
.and_then(|header| header.to_str().ok())
|
||||||
.and_then(|expiration| DateTime::parse_from_rfc2822(expiration).ok())
|
.and_then(|expiration| DateTime::parse_from_rfc2822(expiration).ok())
|
||||||
.map(|dt_local| DateTime::<Utc>::from(dt_local))
|
.map(DateTime::<Utc>::from)
|
||||||
.unwrap_or(
|
.unwrap_or(
|
||||||
Local::now()
|
Local::now()
|
||||||
.with_hour(0)
|
.with_hour(0)
|
||||||
|
|
|
@ -10,7 +10,6 @@ Luminescent Dreams Tools is distributed in the hope that it will be useful, but
|
||||||
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use date_time_tz::DateTimeTz;
|
|
||||||
use types::{Recordable, Timestamp};
|
use types::{Recordable, Timestamp};
|
||||||
|
|
||||||
/// This trait is used for constructing queries for searching the database.
|
/// This trait is used for constructing queries for searching the database.
|
||||||
|
|
|
@ -33,19 +33,13 @@ use std::{fmt, str::FromStr};
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub struct DateTimeTz(pub chrono::DateTime<chrono_tz::Tz>);
|
pub struct DateTimeTz(pub chrono::DateTime<chrono_tz::Tz>);
|
||||||
|
|
||||||
impl DateTimeTz {
|
impl fmt::Display for DateTimeTz {
|
||||||
pub fn map<F>(&self, f: F) -> DateTimeTz
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||||
where
|
|
||||||
F: FnOnce(chrono::DateTime<chrono_tz::Tz>) -> chrono::DateTime<chrono_tz::Tz>,
|
|
||||||
{
|
|
||||||
DateTimeTz(f(self.0))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_string(&self) -> String {
|
|
||||||
if self.0.timezone() == UTC {
|
if self.0.timezone() == UTC {
|
||||||
self.0.to_rfc3339_opts(SecondsFormat::Secs, true)
|
write!(f, "{}", self.0.to_rfc3339_opts(SecondsFormat::Secs, true))
|
||||||
} else {
|
} else {
|
||||||
format!(
|
write!(
|
||||||
|
f,
|
||||||
"{} {}",
|
"{} {}",
|
||||||
self.0
|
self.0
|
||||||
.with_timezone(&chrono_tz::Etc::UTC)
|
.with_timezone(&chrono_tz::Etc::UTC)
|
||||||
|
@ -56,11 +50,20 @@ impl DateTimeTz {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl DateTimeTz {
|
||||||
|
pub fn map<F>(&self, f: F) -> DateTimeTz
|
||||||
|
where
|
||||||
|
F: FnOnce(chrono::DateTime<chrono_tz::Tz>) -> chrono::DateTime<chrono_tz::Tz>,
|
||||||
|
{
|
||||||
|
DateTimeTz(f(self.0))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl std::str::FromStr for DateTimeTz {
|
impl std::str::FromStr for DateTimeTz {
|
||||||
type Err = chrono::ParseError;
|
type Err = chrono::ParseError;
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
let v: Vec<&str> = s.split_terminator(" ").collect();
|
let v: Vec<&str> = s.split_terminator(' ').collect();
|
||||||
if v.len() == 2 {
|
if v.len() == 2 {
|
||||||
let tz = v[1].parse::<chrono_tz::Tz>().unwrap();
|
let tz = v[1].parse::<chrono_tz::Tz>().unwrap();
|
||||||
chrono::DateTime::parse_from_rfc3339(v[0]).map(|ts| DateTimeTz(ts.with_timezone(&tz)))
|
chrono::DateTime::parse_from_rfc3339(v[0]).map(|ts| DateTimeTz(ts.with_timezone(&tz)))
|
||||||
|
@ -86,9 +89,9 @@ impl<'de> Visitor<'de> for DateTimeTzVisitor {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_str<E: de::Error>(self, s: &str) -> Result<Self::Value, E> {
|
fn visit_str<E: de::Error>(self, s: &str) -> Result<Self::Value, E> {
|
||||||
DateTimeTz::from_str(s).or(Err(E::custom(format!(
|
DateTimeTz::from_str(s).or(Err(E::custom(
|
||||||
"string is not a parsable datetime representation"
|
"string is not a parsable datetime representation".to_owned(),
|
||||||
))))
|
)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -117,28 +120,43 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_creates_timestamp_with_z() {
|
fn it_creates_timestamp_with_z() {
|
||||||
let t = DateTimeTz(UTC.ymd(2019, 5, 15).and_hms(12, 0, 0));
|
let t = DateTimeTz(UTC.with_ymd_and_hms(2019, 5, 15, 12, 0, 0).unwrap());
|
||||||
assert_eq!(t.to_string(), "2019-05-15T12:00:00Z");
|
assert_eq!(t.to_string(), "2019-05-15T12:00:00Z");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_parses_utc_rfc3339_z() {
|
fn it_parses_utc_rfc3339_z() {
|
||||||
let t = DateTimeTz::from_str("2019-05-15T12:00:00Z").unwrap();
|
let t = DateTimeTz::from_str("2019-05-15T12:00:00Z").unwrap();
|
||||||
assert_eq!(t, DateTimeTz(UTC.ymd(2019, 5, 15).and_hms(12, 0, 0)));
|
assert_eq!(
|
||||||
|
t,
|
||||||
|
DateTimeTz(UTC.with_ymd_and_hms(2019, 5, 15, 12, 0, 0).unwrap())
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_parses_rfc3339_with_offset() {
|
fn it_parses_rfc3339_with_offset() {
|
||||||
let t = DateTimeTz::from_str("2019-05-15T12:00:00-06:00").unwrap();
|
let t = DateTimeTz::from_str("2019-05-15T12:00:00-06:00").unwrap();
|
||||||
assert_eq!(t, DateTimeTz(UTC.ymd(2019, 5, 15).and_hms(18, 0, 0)));
|
assert_eq!(
|
||||||
|
t,
|
||||||
|
DateTimeTz(UTC.with_ymd_and_hms(2019, 5, 15, 18, 0, 0).unwrap())
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_parses_rfc3339_with_tz() {
|
fn it_parses_rfc3339_with_tz() {
|
||||||
let t = DateTimeTz::from_str("2019-06-15T19:00:00Z US/Arizona").unwrap();
|
let t = DateTimeTz::from_str("2019-06-15T19:00:00Z US/Arizona").unwrap();
|
||||||
assert_eq!(t, DateTimeTz(UTC.ymd(2019, 6, 15).and_hms(19, 0, 0)));
|
assert_eq!(
|
||||||
assert_eq!(t, DateTimeTz(Arizona.ymd(2019, 6, 15).and_hms(12, 0, 0)));
|
t,
|
||||||
assert_eq!(t, DateTimeTz(Central.ymd(2019, 6, 15).and_hms(14, 0, 0)));
|
DateTimeTz(UTC.with_ymd_and_hms(2019, 6, 15, 19, 0, 0).unwrap())
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
t,
|
||||||
|
DateTimeTz(Arizona.with_ymd_and_hms(2019, 6, 15, 12, 0, 0).unwrap())
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
t,
|
||||||
|
DateTimeTz(Central.with_ymd_and_hms(2019, 6, 15, 14, 0, 0).unwrap())
|
||||||
|
);
|
||||||
assert_eq!(t.to_string(), "2019-06-15T19:00:00Z US/Arizona");
|
assert_eq!(t.to_string(), "2019-06-15T19:00:00Z US/Arizona");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -172,6 +190,9 @@ mod test {
|
||||||
fn it_json_parses() {
|
fn it_json_parses() {
|
||||||
let t =
|
let t =
|
||||||
serde_json::from_str::<DateTimeTz>("\"2019-06-15T19:00:00Z America/Phoenix\"").unwrap();
|
serde_json::from_str::<DateTimeTz>("\"2019-06-15T19:00:00Z America/Phoenix\"").unwrap();
|
||||||
assert_eq!(t, DateTimeTz(Phoenix.ymd(2019, 6, 15).and_hms(12, 0, 0)));
|
assert_eq!(
|
||||||
|
t,
|
||||||
|
DateTimeTz(Phoenix.with_ymd_and_hms(2019, 6, 15, 12, 0, 0).unwrap())
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,7 +47,7 @@ where
|
||||||
.read(true)
|
.read(true)
|
||||||
.append(true)
|
.append(true)
|
||||||
.create(true)
|
.create(true)
|
||||||
.open(&path)
|
.open(path)
|
||||||
.map_err(EmseriesReadError::IOError)?;
|
.map_err(EmseriesReadError::IOError)?;
|
||||||
|
|
||||||
let records = Series::load_file(&f)?;
|
let records = Series::load_file(&f)?;
|
||||||
|
@ -88,8 +88,8 @@ where
|
||||||
/// Put a new record into the database. A unique id will be assigned to the record and
|
/// Put a new record into the database. A unique id will be assigned to the record and
|
||||||
/// returned.
|
/// returned.
|
||||||
pub fn put(&mut self, entry: T) -> Result<UniqueId, EmseriesWriteError> {
|
pub fn put(&mut self, entry: T) -> Result<UniqueId, EmseriesWriteError> {
|
||||||
let uuid = UniqueId::new();
|
let uuid = UniqueId::default();
|
||||||
self.update(uuid.clone(), entry).and_then(|_| Ok(uuid))
|
self.update(uuid.clone(), entry).map(|_| uuid)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update an existing record. The `UniqueId` of the record passed into this function must match
|
/// Update an existing record. The `UniqueId` of the record passed into this function must match
|
||||||
|
@ -138,7 +138,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get all of the records in the database.
|
/// Get all of the records in the database.
|
||||||
pub fn records<'s>(&'s self) -> impl Iterator<Item = (&'s UniqueId, &'s T)> + 's {
|
pub fn records(&self) -> impl Iterator<Item = (&UniqueId, &T)> {
|
||||||
self.records.iter()
|
self.records.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ where
|
||||||
|
|
||||||
/// Get an exact record from the database based on unique id.
|
/// Get an exact record from the database based on unique id.
|
||||||
pub fn get(&self, uuid: &UniqueId) -> Option<T> {
|
pub fn get(&self, uuid: &UniqueId) -> Option<T> {
|
||||||
self.records.get(uuid).map(|v| v.clone())
|
self.records.get(uuid).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -55,8 +55,8 @@ impl str::FromStr for Timestamp {
|
||||||
type Err = chrono::ParseError;
|
type Err = chrono::ParseError;
|
||||||
fn from_str(line: &str) -> Result<Self, Self::Err> {
|
fn from_str(line: &str) -> Result<Self, Self::Err> {
|
||||||
DateTimeTz::from_str(line)
|
DateTimeTz::from_str(line)
|
||||||
.map(|dtz| Timestamp::DateTime(dtz))
|
.map(Timestamp::DateTime)
|
||||||
.or(NaiveDate::from_str(line).map(|d| Timestamp::Date(d)))
|
.or(NaiveDate::from_str(line).map(Timestamp::Date))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,8 +70,8 @@ impl Ord for Timestamp {
|
||||||
fn cmp(&self, other: &Timestamp) -> Ordering {
|
fn cmp(&self, other: &Timestamp) -> Ordering {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
(Timestamp::DateTime(dt1), Timestamp::DateTime(dt2)) => dt1.cmp(dt2),
|
(Timestamp::DateTime(dt1), Timestamp::DateTime(dt2)) => dt1.cmp(dt2),
|
||||||
(Timestamp::DateTime(dt1), Timestamp::Date(dt2)) => dt1.0.date().naive_utc().cmp(&dt2),
|
(Timestamp::DateTime(dt1), Timestamp::Date(dt2)) => dt1.0.date_naive().cmp(dt2),
|
||||||
(Timestamp::Date(dt1), Timestamp::DateTime(dt2)) => dt1.cmp(&dt2.0.date().naive_utc()),
|
(Timestamp::Date(dt1), Timestamp::DateTime(dt2)) => dt1.cmp(&dt2.0.date_naive()),
|
||||||
(Timestamp::Date(dt1), Timestamp::Date(dt2)) => dt1.cmp(dt2),
|
(Timestamp::Date(dt1), Timestamp::Date(dt2)) => dt1.cmp(dt2),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -105,11 +105,9 @@ pub trait Recordable {
|
||||||
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
|
||||||
pub struct UniqueId(Uuid);
|
pub struct UniqueId(Uuid);
|
||||||
|
|
||||||
impl UniqueId {
|
impl Default for UniqueId {
|
||||||
/// Create a new V4 UUID (this is the most common type in use these days).
|
fn default() -> Self {
|
||||||
pub fn new() -> UniqueId {
|
Self(Uuid::new_v4())
|
||||||
let id = Uuid::new_v4();
|
|
||||||
UniqueId(id)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -120,14 +118,14 @@ impl str::FromStr for UniqueId {
|
||||||
fn from_str(val: &str) -> Result<Self, Self::Err> {
|
fn from_str(val: &str) -> Result<Self, Self::Err> {
|
||||||
Uuid::parse_str(val)
|
Uuid::parse_str(val)
|
||||||
.map(UniqueId)
|
.map(UniqueId)
|
||||||
.map_err(|err| EmseriesReadError::UUIDParseError(err))
|
.map_err(EmseriesReadError::UUIDParseError)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for UniqueId {
|
impl fmt::Display for UniqueId {
|
||||||
/// Convert to a hyphenated string
|
/// Convert to a hyphenated string
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
||||||
write!(f, "{}", self.0.to_hyphenated().to_string())
|
write!(f, "{}", self.0.to_hyphenated())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -146,7 +144,7 @@ where
|
||||||
type Err = EmseriesReadError;
|
type Err = EmseriesReadError;
|
||||||
|
|
||||||
fn from_str(line: &str) -> Result<Self, Self::Err> {
|
fn from_str(line: &str) -> Result<Self, Self::Err> {
|
||||||
serde_json::from_str(&line).map_err(|err| EmseriesReadError::JSONParseError(err))
|
serde_json::from_str(line).map_err(EmseriesReadError::JSONParseError)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -184,7 +182,9 @@ mod test {
|
||||||
fn timestamp_parses_datetimetz_without_timezone() {
|
fn timestamp_parses_datetimetz_without_timezone() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
"2003-11-10T06:00:00Z".parse::<Timestamp>().unwrap(),
|
"2003-11-10T06:00:00Z".parse::<Timestamp>().unwrap(),
|
||||||
Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0))),
|
Timestamp::DateTime(DateTimeTz(
|
||||||
|
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0).unwrap()
|
||||||
|
)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -210,7 +210,9 @@ mod test {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
rec.data,
|
rec.data,
|
||||||
Some(WeightRecord {
|
Some(WeightRecord {
|
||||||
date: Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0))),
|
date: Timestamp::DateTime(DateTimeTz(
|
||||||
|
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0).unwrap()
|
||||||
|
)),
|
||||||
weight: Weight(77.79109 * KG),
|
weight: Weight(77.79109 * KG),
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
@ -219,7 +221,9 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn serialization_output() {
|
fn serialization_output() {
|
||||||
let rec = WeightRecord {
|
let rec = WeightRecord {
|
||||||
date: Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0))),
|
date: Timestamp::DateTime(DateTimeTz(
|
||||||
|
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0).unwrap(),
|
||||||
|
)),
|
||||||
weight: Weight(77.0 * KG),
|
weight: Weight(77.0 * KG),
|
||||||
};
|
};
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -228,7 +232,12 @@ mod test {
|
||||||
);
|
);
|
||||||
|
|
||||||
let rec2 = WeightRecord {
|
let rec2 = WeightRecord {
|
||||||
date: Timestamp::DateTime(Central.ymd(2003, 11, 10).and_hms(0, 0, 0).into()),
|
date: Timestamp::DateTime(
|
||||||
|
Central
|
||||||
|
.with_ymd_and_hms(2003, 11, 10, 0, 0, 0)
|
||||||
|
.unwrap()
|
||||||
|
.into(),
|
||||||
|
),
|
||||||
weight: Weight(77.0 * KG),
|
weight: Weight(77.0 * KG),
|
||||||
};
|
};
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -239,22 +248,28 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn two_datetimes_can_be_compared() {
|
fn two_datetimes_can_be_compared() {
|
||||||
let time1 = Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0)));
|
let time1 = Timestamp::DateTime(DateTimeTz(
|
||||||
let time2 = Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 11).and_hms(6, 0, 0)));
|
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0).unwrap(),
|
||||||
|
));
|
||||||
|
let time2 = Timestamp::DateTime(DateTimeTz(
|
||||||
|
UTC.with_ymd_and_hms(2003, 11, 11, 6, 0, 0).unwrap(),
|
||||||
|
));
|
||||||
assert!(time1 < time2);
|
assert!(time1 < time2);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn two_dates_can_be_compared() {
|
fn two_dates_can_be_compared() {
|
||||||
let time1 = Timestamp::Date(NaiveDate::from_ymd(2003, 11, 10));
|
let time1 = Timestamp::Date(NaiveDate::from_ymd_opt(2003, 11, 10).unwrap());
|
||||||
let time2 = Timestamp::Date(NaiveDate::from_ymd(2003, 11, 11));
|
let time2 = Timestamp::Date(NaiveDate::from_ymd_opt(2003, 11, 11).unwrap());
|
||||||
assert!(time1 < time2);
|
assert!(time1 < time2);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn datetime_and_date_can_be_compared() {
|
fn datetime_and_date_can_be_compared() {
|
||||||
let time1 = Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0)));
|
let time1 = Timestamp::DateTime(DateTimeTz(
|
||||||
let time2 = Timestamp::Date(NaiveDate::from_ymd(2003, 11, 11));
|
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0).unwrap(),
|
||||||
|
));
|
||||||
|
let time2 = Timestamp::Date(NaiveDate::from_ymd_opt(2003, 11, 11).unwrap());
|
||||||
assert!(time1 < time2)
|
assert!(time1 < time2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ extern crate emseries;
|
||||||
mod test {
|
mod test {
|
||||||
use chrono::prelude::*;
|
use chrono::prelude::*;
|
||||||
use chrono_tz::Etc::UTC;
|
use chrono_tz::Etc::UTC;
|
||||||
use dimensioned::si::{Kilogram, Meter, Second, KG, M, S};
|
use dimensioned::si::{Kilogram, Meter, Second, M, S};
|
||||||
|
|
||||||
use emseries::*;
|
use emseries::*;
|
||||||
|
|
||||||
|
@ -52,31 +52,31 @@ mod test {
|
||||||
fn mk_trips() -> [BikeTrip; 5] {
|
fn mk_trips() -> [BikeTrip; 5] {
|
||||||
[
|
[
|
||||||
BikeTrip {
|
BikeTrip {
|
||||||
datetime: DateTimeTz(UTC.ymd(2011, 10, 29).and_hms(0, 0, 0)),
|
datetime: DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 29, 0, 0, 0).unwrap()),
|
||||||
distance: Distance(58741.055 * M),
|
distance: Distance(58741.055 * M),
|
||||||
duration: Duration(11040.0 * S),
|
duration: Duration(11040.0 * S),
|
||||||
comments: String::from("long time ago"),
|
comments: String::from("long time ago"),
|
||||||
},
|
},
|
||||||
BikeTrip {
|
BikeTrip {
|
||||||
datetime: DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)),
|
datetime: DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()),
|
||||||
distance: Distance(17702.0 * M),
|
distance: Distance(17702.0 * M),
|
||||||
duration: Duration(2880.0 * S),
|
duration: Duration(2880.0 * S),
|
||||||
comments: String::from("day 2"),
|
comments: String::from("day 2"),
|
||||||
},
|
},
|
||||||
BikeTrip {
|
BikeTrip {
|
||||||
datetime: DateTimeTz(UTC.ymd(2011, 11, 02).and_hms(0, 0, 0)),
|
datetime: DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 02, 0, 0, 0).unwrap()),
|
||||||
distance: Distance(41842.945 * M),
|
distance: Distance(41842.945 * M),
|
||||||
duration: Duration(7020.0 * S),
|
duration: Duration(7020.0 * S),
|
||||||
comments: String::from("Do Some Distance!"),
|
comments: String::from("Do Some Distance!"),
|
||||||
},
|
},
|
||||||
BikeTrip {
|
BikeTrip {
|
||||||
datetime: DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)),
|
datetime: DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 04, 0, 0, 0).unwrap()),
|
||||||
distance: Distance(34600.895 * M),
|
distance: Distance(34600.895 * M),
|
||||||
duration: Duration(5580.0 * S),
|
duration: Duration(5580.0 * S),
|
||||||
comments: String::from("I did a lot of distance back then"),
|
comments: String::from("I did a lot of distance back then"),
|
||||||
},
|
},
|
||||||
BikeTrip {
|
BikeTrip {
|
||||||
datetime: DateTimeTz(UTC.ymd(2011, 11, 05).and_hms(0, 0, 0)),
|
datetime: DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 05, 0, 0, 0).unwrap()),
|
||||||
distance: Distance(6437.376 * M),
|
distance: Distance(6437.376 * M),
|
||||||
duration: Duration(960.0 * S),
|
duration: Duration(960.0 * S),
|
||||||
comments: String::from("day 5"),
|
comments: String::from("day 5"),
|
||||||
|
@ -122,7 +122,7 @@ mod test {
|
||||||
Some(tr) => {
|
Some(tr) => {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tr.timestamp(),
|
tr.timestamp(),
|
||||||
DateTimeTz(UTC.ymd(2011, 10, 29).and_hms(0, 0, 0)).into()
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 29, 0, 0, 0).unwrap()).into()
|
||||||
);
|
);
|
||||||
assert_eq!(tr.duration, Duration(11040.0 * S));
|
assert_eq!(tr.duration, Duration(11040.0 * S));
|
||||||
assert_eq!(tr.comments, String::from("long time ago"));
|
assert_eq!(tr.comments, String::from("long time ago"));
|
||||||
|
@ -145,7 +145,7 @@ mod test {
|
||||||
|
|
||||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts
|
let v: Vec<(&UniqueId, &BikeTrip)> = ts
|
||||||
.search(exact_time(
|
.search(exact_time(
|
||||||
DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)).into(),
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
||||||
))
|
))
|
||||||
.collect();
|
.collect();
|
||||||
assert_eq!(v.len(), 1);
|
assert_eq!(v.len(), 1);
|
||||||
|
@ -166,9 +166,9 @@ mod test {
|
||||||
|
|
||||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
||||||
time_range(
|
time_range(
|
||||||
DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)).into(),
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
||||||
true,
|
true,
|
||||||
DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)).into(),
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 04, 0, 0, 0).unwrap()).into(),
|
||||||
true,
|
true,
|
||||||
),
|
),
|
||||||
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
||||||
|
@ -199,9 +199,9 @@ mod test {
|
||||||
.expect("expect the time series to open correctly");
|
.expect("expect the time series to open correctly");
|
||||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
||||||
time_range(
|
time_range(
|
||||||
DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)).into(),
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
||||||
true,
|
true,
|
||||||
DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)).into(),
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 04, 0, 0, 0).unwrap()).into(),
|
||||||
true,
|
true,
|
||||||
),
|
),
|
||||||
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
||||||
|
@ -233,9 +233,9 @@ mod test {
|
||||||
.expect("expect the time series to open correctly");
|
.expect("expect the time series to open correctly");
|
||||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
||||||
time_range(
|
time_range(
|
||||||
DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)).into(),
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
||||||
true,
|
true,
|
||||||
DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)).into(),
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 04, 0, 0, 0).unwrap()).into(),
|
||||||
true,
|
true,
|
||||||
),
|
),
|
||||||
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
||||||
|
@ -252,9 +252,9 @@ mod test {
|
||||||
.expect("expect the time series to open correctly");
|
.expect("expect the time series to open correctly");
|
||||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
||||||
time_range(
|
time_range(
|
||||||
DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)).into(),
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
||||||
true,
|
true,
|
||||||
DateTimeTz(UTC.ymd(2011, 11, 05).and_hms(0, 0, 0)).into(),
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 05, 0, 0, 0).unwrap()).into(),
|
||||||
true,
|
true,
|
||||||
),
|
),
|
||||||
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
||||||
|
@ -294,7 +294,7 @@ mod test {
|
||||||
Some(trip) => {
|
Some(trip) => {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
trip.datetime,
|
trip.datetime,
|
||||||
DateTimeTz(UTC.ymd(2011, 11, 02).and_hms(0, 0, 0))
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 02, 0, 0, 0).unwrap())
|
||||||
);
|
);
|
||||||
assert_eq!(trip.distance, Distance(50000.0 * M));
|
assert_eq!(trip.distance, Distance(50000.0 * M));
|
||||||
assert_eq!(trip.duration, Duration(7020.0 * S));
|
assert_eq!(trip.duration, Duration(7020.0 * S));
|
||||||
|
@ -335,13 +335,13 @@ mod test {
|
||||||
|
|
||||||
let trips: Vec<(&UniqueId, &BikeTrip)> = ts
|
let trips: Vec<(&UniqueId, &BikeTrip)> = ts
|
||||||
.search(exact_time(
|
.search(exact_time(
|
||||||
DateTimeTz(UTC.ymd(2011, 11, 02).and_hms(0, 0, 0)).into(),
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 02, 0, 0, 0).unwrap()).into(),
|
||||||
))
|
))
|
||||||
.collect();
|
.collect();
|
||||||
assert_eq!(trips.len(), 1);
|
assert_eq!(trips.len(), 1);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
trips[0].1.datetime,
|
trips[0].1.datetime,
|
||||||
DateTimeTz(UTC.ymd(2011, 11, 02).and_hms(0, 0, 0))
|
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 02, 0, 0, 0).unwrap())
|
||||||
);
|
);
|
||||||
assert_eq!(trips[0].1.distance, Distance(50000.0 * M));
|
assert_eq!(trips[0].1.distance, Distance(50000.0 * M));
|
||||||
assert_eq!(trips[0].1.duration, Duration(7020.0 * S));
|
assert_eq!(trips[0].1.duration, Duration(7020.0 * S));
|
||||||
|
@ -361,7 +361,6 @@ mod test {
|
||||||
let trip_id = ts.put(trips[0].clone()).expect("expect a successful put");
|
let trip_id = ts.put(trips[0].clone()).expect("expect a successful put");
|
||||||
ts.put(trips[1].clone()).expect("expect a successful put");
|
ts.put(trips[1].clone()).expect("expect a successful put");
|
||||||
ts.put(trips[2].clone()).expect("expect a successful put");
|
ts.put(trips[2].clone()).expect("expect a successful put");
|
||||||
|
|
||||||
ts.delete(&trip_id).expect("successful delete");
|
ts.delete(&trip_id).expect("successful delete");
|
||||||
|
|
||||||
let recs: Vec<(&UniqueId, &BikeTrip)> = ts.records().collect();
|
let recs: Vec<(&UniqueId, &BikeTrip)> = ts.records().collect();
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
fixtures
|
|
@ -0,0 +1,2 @@
|
||||||
|
fixtures
|
||||||
|
var
|
|
@ -0,0 +1,51 @@
|
||||||
|
[package]
|
||||||
|
name = "file-service"
|
||||||
|
version = "0.1.1"
|
||||||
|
authors = ["savanni@luminescent-dreams.com"]
|
||||||
|
edition = "2018"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "file_service"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "file-service"
|
||||||
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "auth-cli"
|
||||||
|
path = "src/bin/cli.rs"
|
||||||
|
|
||||||
|
[target.auth-cli.dependencies]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
base64ct = { version = "1", features = [ "alloc" ] }
|
||||||
|
build_html = { version = "2" }
|
||||||
|
bytes = { version = "1" }
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
clap = { version = "4", features = [ "derive" ] }
|
||||||
|
cookie = { version = "0.17" }
|
||||||
|
futures-util = { version = "0.3" }
|
||||||
|
hex-string = "0.1.0"
|
||||||
|
http = { version = "0.2" }
|
||||||
|
image = "0.23.5"
|
||||||
|
logger = "*"
|
||||||
|
log = { version = "0.4" }
|
||||||
|
mime = "0.3.16"
|
||||||
|
mime_guess = "2.0.3"
|
||||||
|
pretty_env_logger = { version = "0.5" }
|
||||||
|
serde_json = "*"
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
sha2 = "0.10"
|
||||||
|
sqlx = { version = "0.7", features = [ "runtime-tokio", "sqlite" ] }
|
||||||
|
thiserror = "1.0.20"
|
||||||
|
tokio = { version = "1", features = [ "full" ] }
|
||||||
|
uuid = { version = "0.4", features = [ "serde", "v4" ] }
|
||||||
|
warp = { version = "0.3" }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
cool_asserts = { version = "2" }
|
||||||
|
tempdir = { version = "0.3" }
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
[{"jti":"ac3a46c6-3fa1-4d0a-af12-e7d3fefdc878","aud":"savanni","exp":1621351436,"iss":"savanni","iat":1589729036,"sub":"https://savanni.luminescent-dreams.com/file-service/","perms":["admin"]}]
|
|
@ -0,0 +1,13 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
VERSION=`cat Cargo.toml | grep "^version =" | sed -r 's/^version = "(.+)"$/\1/'`
|
||||||
|
|
||||||
|
mkdir -p dist
|
||||||
|
cp ../target/release/file-service dist
|
||||||
|
cp ../target/release/auth-cli dist
|
||||||
|
strip dist/file-service
|
||||||
|
strip dist/auth-cli
|
||||||
|
tar -czf file-service-${VERSION}.tgz dist/
|
||||||
|
|
Binary file not shown.
After Width: | Height: | Size: 23 KiB |
|
@ -0,0 +1,11 @@
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
id INTEGER PRIMARY KEY NOT NULL,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
token TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS sessions (
|
||||||
|
token TEXT NOT NULL,
|
||||||
|
user_id INTEGER,
|
||||||
|
FOREIGN KEY(user_id) REFERENCES users(id)
|
||||||
|
);
|
|
@ -0,0 +1,40 @@
|
||||||
|
use clap::{Parser, Subcommand};
|
||||||
|
use file_service::{AuthDB, Username};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(Subcommand, Debug)]
|
||||||
|
enum Commands {
|
||||||
|
AddUser { username: String },
|
||||||
|
DeleteUser { username: String },
|
||||||
|
ListUsers,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
struct Args {
|
||||||
|
#[command(subcommand)]
|
||||||
|
command: Commands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
pub async fn main() {
|
||||||
|
let args = Args::parse();
|
||||||
|
let authdb = AuthDB::new(PathBuf::from(&std::env::var("AUTHDB").unwrap()))
|
||||||
|
.await
|
||||||
|
.expect("to be able to open the database");
|
||||||
|
|
||||||
|
match args.command {
|
||||||
|
Commands::AddUser { username } => {
|
||||||
|
match authdb.add_user(Username::from(username.clone())).await {
|
||||||
|
Ok(token) => {
|
||||||
|
println!("User {} created. Auth token: {}", username, *token);
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
println!("Could not create user {}", username);
|
||||||
|
println!("\tError: {:?}", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Commands::DeleteUser { .. } => {}
|
||||||
|
Commands::ListUsers => {}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,260 @@
|
||||||
|
use build_html::Html;
|
||||||
|
use bytes::Buf;
|
||||||
|
use file_service::WriteFileError;
|
||||||
|
use futures_util::StreamExt;
|
||||||
|
use http::{Error, StatusCode};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::io::Read;
|
||||||
|
use warp::{filters::multipart::FormData, http::Response, multipart::Part};
|
||||||
|
|
||||||
|
use crate::{pages, App, AuthToken, FileId, FileInfo, ReadFileError, SessionToken};
|
||||||
|
|
||||||
|
const CSS: &str = include_str!("../templates/style.css");
|
||||||
|
|
||||||
|
pub async fn handle_index(
|
||||||
|
app: App,
|
||||||
|
token: Option<SessionToken>,
|
||||||
|
) -> Result<Response<String>, Error> {
|
||||||
|
match token {
|
||||||
|
Some(token) => match app.validate_session(token).await {
|
||||||
|
Ok(_) => render_gallery_page(app).await,
|
||||||
|
Err(err) => render_auth_page(Some(format!("session expired: {:?}", err))),
|
||||||
|
},
|
||||||
|
None => render_auth_page(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_css() -> Result<Response<String>, Error> {
|
||||||
|
Response::builder()
|
||||||
|
.header("content-type", "text/css")
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(CSS.to_owned())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn render_auth_page(message: Option<String>) -> Result<Response<String>, Error> {
|
||||||
|
Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(pages::auth(message).to_html_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn render_gallery_page(app: App) -> Result<Response<String>, Error> {
|
||||||
|
match app.list_files().await {
|
||||||
|
Ok(ids) => {
|
||||||
|
let mut files = vec![];
|
||||||
|
for id in ids.into_iter() {
|
||||||
|
let file = app.get_file(&id).await;
|
||||||
|
files.push(file);
|
||||||
|
}
|
||||||
|
Response::builder()
|
||||||
|
.header("content-type", "text/html")
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(pages::gallery(files).to_html_string())
|
||||||
|
}
|
||||||
|
Err(_) => Response::builder()
|
||||||
|
.header("content-type", "text/html")
|
||||||
|
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
.body("".to_owned()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn thumbnail(
|
||||||
|
app: App,
|
||||||
|
id: String,
|
||||||
|
old_etags: Option<String>,
|
||||||
|
) -> Result<Response<Vec<u8>>, Error> {
|
||||||
|
match app.get_file(&FileId::from(id)).await {
|
||||||
|
Ok(file) => serve_file(file.info.clone(), || file.thumbnail(), old_etags),
|
||||||
|
Err(_err) => Response::builder()
|
||||||
|
.status(StatusCode::NOT_FOUND)
|
||||||
|
.body(vec![]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn file(
|
||||||
|
app: App,
|
||||||
|
id: String,
|
||||||
|
old_etags: Option<String>,
|
||||||
|
) -> Result<Response<Vec<u8>>, Error> {
|
||||||
|
match app.get_file(&FileId::from(id)).await {
|
||||||
|
Ok(file) => serve_file(file.info.clone(), || file.content(), old_etags),
|
||||||
|
Err(_err) => Response::builder()
|
||||||
|
.status(StatusCode::NOT_FOUND)
|
||||||
|
.body(vec![]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_auth(
|
||||||
|
app: App,
|
||||||
|
form: HashMap<String, String>,
|
||||||
|
) -> Result<http::Response<String>, Error> {
|
||||||
|
match form.get("token") {
|
||||||
|
Some(token) => match app.authenticate(AuthToken::from(token.clone())).await {
|
||||||
|
Ok(Some(session_token)) => Response::builder()
|
||||||
|
.header("location", "/")
|
||||||
|
.header(
|
||||||
|
"set-cookie",
|
||||||
|
format!(
|
||||||
|
"session={}; Secure; HttpOnly; SameSite=Strict",
|
||||||
|
*session_token
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.status(StatusCode::SEE_OTHER)
|
||||||
|
.body("".to_owned()),
|
||||||
|
Ok(None) => render_auth_page(Some("no user found".to_owned())),
|
||||||
|
Err(_) => render_auth_page(Some("invalid auth token".to_owned())),
|
||||||
|
},
|
||||||
|
None => render_auth_page(Some("no token available".to_owned())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_upload(
|
||||||
|
app: App,
|
||||||
|
token: SessionToken,
|
||||||
|
form: FormData,
|
||||||
|
) -> Result<http::Response<String>, Error> {
|
||||||
|
match app.validate_session(token).await {
|
||||||
|
Ok(Some(_)) => match process_file_upload(app, form).await {
|
||||||
|
Ok(_) => Response::builder()
|
||||||
|
.header("location", "/")
|
||||||
|
.status(StatusCode::SEE_OTHER)
|
||||||
|
.body("".to_owned()),
|
||||||
|
Err(UploadError::FilenameMissing) => Response::builder()
|
||||||
|
.status(StatusCode::BAD_REQUEST)
|
||||||
|
.body("filename is required for all files".to_owned()),
|
||||||
|
Err(UploadError::WriteFileError(err)) => Response::builder()
|
||||||
|
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
.body(format!("could not write to the file system: {:?}", err)),
|
||||||
|
Err(UploadError::WarpError(err)) => Response::builder()
|
||||||
|
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
.body(format!("error with the app framework: {:?}", err)),
|
||||||
|
},
|
||||||
|
_ => Response::builder()
|
||||||
|
.status(StatusCode::UNAUTHORIZED)
|
||||||
|
.body("".to_owned()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serve_file<F>(
|
||||||
|
info: FileInfo,
|
||||||
|
file: F,
|
||||||
|
old_etags: Option<String>,
|
||||||
|
) -> http::Result<http::Response<Vec<u8>>>
|
||||||
|
where
|
||||||
|
F: FnOnce() -> Result<Vec<u8>, ReadFileError>,
|
||||||
|
{
|
||||||
|
match old_etags {
|
||||||
|
Some(old_etags) if old_etags != info.hash => Response::builder()
|
||||||
|
.header("content-type", info.file_type)
|
||||||
|
.status(StatusCode::NOT_MODIFIED)
|
||||||
|
.body(vec![]),
|
||||||
|
_ => match file() {
|
||||||
|
Ok(content) => Response::builder()
|
||||||
|
.header("content-type", info.file_type)
|
||||||
|
.header("etag", info.hash)
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(content),
|
||||||
|
Err(_) => Response::builder()
|
||||||
|
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
.body(vec![]),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn collect_multipart(
|
||||||
|
mut stream: warp::filters::multipart::FormData,
|
||||||
|
) -> Result<Vec<(Option<String>, Option<String>, Vec<u8>)>, warp::Error> {
|
||||||
|
let mut content: Vec<(Option<String>, Option<String>, Vec<u8>)> = Vec::new();
|
||||||
|
|
||||||
|
while let Some(part) = stream.next().await {
|
||||||
|
match part {
|
||||||
|
Ok(part) => content.push(collect_content(part).await.unwrap()),
|
||||||
|
Err(err) => return Err(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(content)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn collect_content(
|
||||||
|
mut part: Part,
|
||||||
|
) -> Result<(Option<String>, Option<String>, Vec<u8>), String> {
|
||||||
|
let mut content: Vec<u8> = Vec::new();
|
||||||
|
|
||||||
|
while let Some(Ok(data)) = part.data().await {
|
||||||
|
let mut reader = data.reader();
|
||||||
|
reader.read_to_end(&mut content).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
part.content_type().map(|s| s.to_owned()),
|
||||||
|
part.filename().map(|s| s.to_owned()),
|
||||||
|
content,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
async fn handle_upload(
|
||||||
|
form: warp::filters::multipart::FormData,
|
||||||
|
app: App,
|
||||||
|
) -> warp::http::Result<warp::http::Response<String>> {
|
||||||
|
let files = collect_multipart(form).await;
|
||||||
|
match files {
|
||||||
|
Ok(files) => {
|
||||||
|
for (_, filename, content) in files {
|
||||||
|
match filename {
|
||||||
|
Some(filename) => {
|
||||||
|
app.add_file(filename, content).unwrap();
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
return warp::http::Response::builder()
|
||||||
|
.status(StatusCode::BAD_REQUEST)
|
||||||
|
.body("".to_owned())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_err) => {
|
||||||
|
return warp::http::Response::builder()
|
||||||
|
.status(StatusCode::BAD_REQUEST)
|
||||||
|
.body("".to_owned())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// println!("file length: {:?}", files.map(|f| f.len()));
|
||||||
|
warp::http::Response::builder()
|
||||||
|
.header("location", "/")
|
||||||
|
.status(StatusCode::SEE_OTHER)
|
||||||
|
.body("".to_owned())
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
enum UploadError {
|
||||||
|
FilenameMissing,
|
||||||
|
WriteFileError(WriteFileError),
|
||||||
|
WarpError(warp::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<WriteFileError> for UploadError {
|
||||||
|
fn from(err: WriteFileError) -> Self {
|
||||||
|
Self::WriteFileError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<warp::Error> for UploadError {
|
||||||
|
fn from(err: warp::Error) -> Self {
|
||||||
|
Self::WarpError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn process_file_upload(app: App, form: FormData) -> Result<(), UploadError> {
|
||||||
|
let files = collect_multipart(form).await?;
|
||||||
|
for (_, filename, content) in files {
|
||||||
|
match filename {
|
||||||
|
Some(filename) => {
|
||||||
|
app.add_file(filename, content).await?;
|
||||||
|
}
|
||||||
|
None => return Err(UploadError::FilenameMissing),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
|
@ -0,0 +1,276 @@
|
||||||
|
use build_html::{self, Html, HtmlContainer};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Default)]
|
||||||
|
pub struct Attributes(Vec<(String, String)>);
|
||||||
|
|
||||||
|
/*
|
||||||
|
impl FromIterator<(String, String)> for Attributes {
|
||||||
|
fn from_iter<T>(iter: T) -> Self
|
||||||
|
where
|
||||||
|
T: IntoIterator<Item = (String, String)>,
|
||||||
|
{
|
||||||
|
Attributes(iter.collect::<Vec<(String, String)>>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromIterator<(&str, &str)> for Attributes {
|
||||||
|
fn from_iter<T>(iter: T) -> Self
|
||||||
|
where
|
||||||
|
T: IntoIterator<Item = (&str, &str)>,
|
||||||
|
{
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
impl ToString for Attributes {
|
||||||
|
fn to_string(&self) -> String {
|
||||||
|
self.0
|
||||||
|
.iter()
|
||||||
|
.map(|(key, value)| format!("{}=\"{}\"", key, value))
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(" ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Form {
|
||||||
|
path: String,
|
||||||
|
method: String,
|
||||||
|
encoding: Option<String>,
|
||||||
|
elements: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Form {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
path: "/".to_owned(),
|
||||||
|
method: "get".to_owned(),
|
||||||
|
encoding: None,
|
||||||
|
elements: "".to_owned(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_path(mut self, path: &str) -> Self {
|
||||||
|
self.path = path.to_owned();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_method(mut self, method: &str) -> Self {
|
||||||
|
self.method = method.to_owned();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_encoding(mut self, encoding: &str) -> Self {
|
||||||
|
self.encoding = Some(encoding.to_owned());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Html for Form {
|
||||||
|
fn to_html_string(&self) -> String {
|
||||||
|
let encoding = match self.encoding {
|
||||||
|
Some(ref encoding) => format!("enctype=\"{encoding}\"", encoding = encoding),
|
||||||
|
None => "".to_owned(),
|
||||||
|
};
|
||||||
|
format!(
|
||||||
|
"<form action=\"{path}\" method=\"{method}\" {encoding}\n{elements}\n</form>\n",
|
||||||
|
path = self.path,
|
||||||
|
method = self.method,
|
||||||
|
encoding = encoding,
|
||||||
|
elements = self.elements.to_html_string(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HtmlContainer for Form {
|
||||||
|
fn add_html<H: Html>(&mut self, html: H) {
|
||||||
|
self.elements.push_str(&html.to_html_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Input {
|
||||||
|
ty: String,
|
||||||
|
name: String,
|
||||||
|
id: Option<String>,
|
||||||
|
value: Option<String>,
|
||||||
|
attributes: Attributes,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Html for Input {
|
||||||
|
fn to_html_string(&self) -> String {
|
||||||
|
let id = match self.id {
|
||||||
|
Some(ref id) => format!("id=\"{}\"", id),
|
||||||
|
None => "".to_owned(),
|
||||||
|
};
|
||||||
|
let value = match self.value {
|
||||||
|
Some(ref value) => format!("value=\"{}\"", value),
|
||||||
|
None => "".to_owned(),
|
||||||
|
};
|
||||||
|
let attrs = self.attributes.to_string();
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"<input type=\"{ty}\" name=\"{name}\" {id} {value} {attrs} />\n",
|
||||||
|
ty = self.ty,
|
||||||
|
name = self.name,
|
||||||
|
id = id,
|
||||||
|
value = value,
|
||||||
|
attrs = attrs,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Input {
|
||||||
|
pub fn new(ty: &str, name: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
ty: ty.to_owned(),
|
||||||
|
name: name.to_owned(),
|
||||||
|
id: None,
|
||||||
|
value: None,
|
||||||
|
attributes: Attributes::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_id(mut self, val: &str) -> Self {
|
||||||
|
self.id = Some(val.to_owned());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_value(mut self, val: &str) -> Self {
|
||||||
|
self.value = Some(val.to_owned());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_attributes<'a>(
|
||||||
|
mut self,
|
||||||
|
values: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||||
|
) -> Self {
|
||||||
|
self.attributes = Attributes(
|
||||||
|
values
|
||||||
|
.into_iter()
|
||||||
|
.map(|(a, b)| (a.to_owned(), b.to_owned()))
|
||||||
|
.collect::<Vec<(String, String)>>(),
|
||||||
|
);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Label {
|
||||||
|
target: String,
|
||||||
|
text: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Label {
|
||||||
|
pub fn new(target: &str, text: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
target: target.to_owned(),
|
||||||
|
text: text.to_owned(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Html for Label {
|
||||||
|
fn to_html_string(&self) -> String {
|
||||||
|
format!(
|
||||||
|
"<label for=\"{target}\">{text}</label>",
|
||||||
|
target = self.target,
|
||||||
|
text = self.text
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Button {
|
||||||
|
ty: Option<String>,
|
||||||
|
name: Option<String>,
|
||||||
|
label: String,
|
||||||
|
attributes: Attributes,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Button {
|
||||||
|
pub fn new(label: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
ty: None,
|
||||||
|
name: None,
|
||||||
|
label: label.to_owned(),
|
||||||
|
attributes: Attributes::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_type(mut self, ty: &str) -> Self {
|
||||||
|
self.ty = Some(ty.to_owned());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_attributes<'a>(
|
||||||
|
mut self,
|
||||||
|
values: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||||
|
) -> Self {
|
||||||
|
self.attributes = Attributes(
|
||||||
|
values
|
||||||
|
.into_iter()
|
||||||
|
.map(|(a, b)| (a.to_owned(), b.to_owned()))
|
||||||
|
.collect::<Vec<(String, String)>>(),
|
||||||
|
);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Html for Button {
|
||||||
|
fn to_html_string(&self) -> String {
|
||||||
|
let ty = match self.ty {
|
||||||
|
Some(ref ty) => format!("type={}", ty),
|
||||||
|
None => "".to_owned(),
|
||||||
|
};
|
||||||
|
let name = match self.name {
|
||||||
|
Some(ref name) => format!("name={}", name),
|
||||||
|
None => "".to_owned(),
|
||||||
|
};
|
||||||
|
format!(
|
||||||
|
"<button {ty} {name} {attrs}>{label}</button>",
|
||||||
|
name = name,
|
||||||
|
label = self.label,
|
||||||
|
attrs = self.attributes.to_string()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Image {
|
||||||
|
path: String,
|
||||||
|
attributes: Attributes,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Image {
|
||||||
|
pub fn new(path: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
path: path.to_owned(),
|
||||||
|
attributes: Attributes::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_attributes<'a>(
|
||||||
|
mut self,
|
||||||
|
values: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||||
|
) -> Self {
|
||||||
|
self.attributes = Attributes(
|
||||||
|
values
|
||||||
|
.into_iter()
|
||||||
|
.map(|(a, b)| (a.to_owned(), b.to_owned()))
|
||||||
|
.collect::<Vec<(String, String)>>(),
|
||||||
|
);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Html for Image {
|
||||||
|
fn to_html_string(&self) -> String {
|
||||||
|
format!(
|
||||||
|
"<img src={path} {attrs} />",
|
||||||
|
path = self.path,
|
||||||
|
attrs = self.attributes.to_string()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
mod store;
|
||||||
|
|
||||||
|
pub use store::{
|
||||||
|
AuthDB, AuthError, AuthToken, FileHandle, FileId, FileInfo, ReadFileError, SessionToken, Store,
|
||||||
|
Username, WriteFileError,
|
||||||
|
};
|
|
@ -0,0 +1,161 @@
|
||||||
|
extern crate log;
|
||||||
|
|
||||||
|
use cookie::Cookie;
|
||||||
|
use handlers::{file, handle_auth, handle_css, handle_upload, thumbnail};
|
||||||
|
use std::{
|
||||||
|
collections::{HashMap, HashSet},
|
||||||
|
convert::Infallible,
|
||||||
|
net::{IpAddr, Ipv4Addr, SocketAddr},
|
||||||
|
path::PathBuf,
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
use warp::{Filter, Rejection};
|
||||||
|
|
||||||
|
mod handlers;
|
||||||
|
mod html;
|
||||||
|
mod pages;
|
||||||
|
|
||||||
|
const MAX_UPLOAD: u64 = 15 * 1024 * 1024;
|
||||||
|
|
||||||
|
pub use file_service::{
|
||||||
|
AuthDB, AuthError, AuthToken, FileHandle, FileId, FileInfo, ReadFileError, SessionToken, Store,
|
||||||
|
Username, WriteFileError,
|
||||||
|
};
|
||||||
|
pub use handlers::handle_index;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct App {
|
||||||
|
authdb: Arc<RwLock<AuthDB>>,
|
||||||
|
store: Arc<RwLock<Store>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl App {
|
||||||
|
pub fn new(authdb: AuthDB, store: Store) -> Self {
|
||||||
|
Self {
|
||||||
|
authdb: Arc::new(RwLock::new(authdb)),
|
||||||
|
store: Arc::new(RwLock::new(store)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn authenticate(&self, token: AuthToken) -> Result<Option<SessionToken>, AuthError> {
|
||||||
|
self.authdb.read().await.authenticate(token).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn validate_session(
|
||||||
|
&self,
|
||||||
|
token: SessionToken,
|
||||||
|
) -> Result<Option<Username>, AuthError> {
|
||||||
|
self.authdb.read().await.validate_session(token).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_files(&self) -> Result<HashSet<FileId>, ReadFileError> {
|
||||||
|
self.store.read().await.list_files()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_file(&self, id: &FileId) -> Result<FileHandle, ReadFileError> {
|
||||||
|
self.store.read().await.get_file(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn add_file(
|
||||||
|
&self,
|
||||||
|
filename: String,
|
||||||
|
content: Vec<u8>,
|
||||||
|
) -> Result<FileHandle, WriteFileError> {
|
||||||
|
self.store.write().await.add_file(filename, content)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_app(app: App) -> impl Filter<Extract = (App,), Error = Infallible> + Clone {
|
||||||
|
warp::any().map(move || app.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_cookies(cookie_str: &str) -> Result<HashMap<String, String>, cookie::ParseError> {
|
||||||
|
Cookie::split_parse(cookie_str)
|
||||||
|
.map(|c| c.map(|c| (c.name().to_owned(), c.value().to_owned())))
|
||||||
|
.collect::<Result<HashMap<String, String>, cookie::ParseError>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_session_token(cookies: HashMap<String, String>) -> Option<SessionToken> {
|
||||||
|
cookies.get("session").cloned().map(SessionToken::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn maybe_with_session() -> impl Filter<Extract = (Option<SessionToken>,), Error = Rejection> + Copy
|
||||||
|
{
|
||||||
|
warp::any()
|
||||||
|
.and(warp::header::optional::<String>("cookie"))
|
||||||
|
.map(|cookie_str: Option<String>| match cookie_str {
|
||||||
|
Some(cookie_str) => parse_cookies(&cookie_str).ok().and_then(get_session_token),
|
||||||
|
None => None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_session() -> impl Filter<Extract = (SessionToken,), Error = Rejection> + Copy {
|
||||||
|
warp::any()
|
||||||
|
.and(warp::header::<String>("cookie"))
|
||||||
|
.and_then(|cookie_str: String| async move {
|
||||||
|
match parse_cookies(&cookie_str).ok().and_then(get_session_token) {
|
||||||
|
Some(session_token) => Ok(session_token),
|
||||||
|
None => Err(warp::reject()),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
pub async fn main() {
|
||||||
|
pretty_env_logger::init();
|
||||||
|
|
||||||
|
let authdb = AuthDB::new(PathBuf::from(&std::env::var("AUTHDB").unwrap()))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let store = Store::new(PathBuf::from(&std::env::var("FILE_SHARE_DIR").unwrap()));
|
||||||
|
|
||||||
|
let app = App::new(authdb, store);
|
||||||
|
|
||||||
|
let log = warp::log("file_service");
|
||||||
|
let root = warp::path!()
|
||||||
|
.and(warp::get())
|
||||||
|
.and(with_app(app.clone()))
|
||||||
|
.and(maybe_with_session())
|
||||||
|
.then(handle_index);
|
||||||
|
|
||||||
|
let styles = warp::path!("css").and(warp::get()).then(handle_css);
|
||||||
|
|
||||||
|
let auth = warp::path!("auth")
|
||||||
|
.and(warp::post())
|
||||||
|
.and(with_app(app.clone()))
|
||||||
|
.and(warp::filters::body::form())
|
||||||
|
.then(handle_auth);
|
||||||
|
|
||||||
|
let upload_via_form = warp::path!("upload")
|
||||||
|
.and(warp::post())
|
||||||
|
.and(with_app(app.clone()))
|
||||||
|
.and(with_session())
|
||||||
|
.and(warp::multipart::form().max_length(MAX_UPLOAD))
|
||||||
|
.then(handle_upload);
|
||||||
|
|
||||||
|
let thumbnail = warp::path!(String / "tn")
|
||||||
|
.and(warp::get())
|
||||||
|
.and(warp::header::optional::<String>("if-none-match"))
|
||||||
|
.and(with_app(app.clone()))
|
||||||
|
.then(move |id, old_etags, app: App| thumbnail(app, id, old_etags));
|
||||||
|
|
||||||
|
let file = warp::path!(String)
|
||||||
|
.and(warp::get())
|
||||||
|
.and(warp::header::optional::<String>("if-none-match"))
|
||||||
|
.and(with_app(app.clone()))
|
||||||
|
.then(move |id, old_etags, app: App| file(app, id, old_etags));
|
||||||
|
|
||||||
|
let server = warp::serve(
|
||||||
|
root.or(styles)
|
||||||
|
.or(auth)
|
||||||
|
.or(upload_via_form)
|
||||||
|
.or(thumbnail)
|
||||||
|
.or(file)
|
||||||
|
.with(log),
|
||||||
|
);
|
||||||
|
|
||||||
|
server
|
||||||
|
.run(SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 8002))
|
||||||
|
.await;
|
||||||
|
}
|
|
@ -0,0 +1,102 @@
|
||||||
|
use crate::html::*;
|
||||||
|
use build_html::{self, Container, ContainerType, Html, HtmlContainer};
|
||||||
|
use file_service::{FileHandle, FileId, ReadFileError};
|
||||||
|
|
||||||
|
pub fn auth(_message: Option<String>) -> build_html::HtmlPage {
|
||||||
|
build_html::HtmlPage::new()
|
||||||
|
.with_title("Authentication")
|
||||||
|
.with_stylesheet("/css")
|
||||||
|
.with_container(
|
||||||
|
Container::new(ContainerType::Div)
|
||||||
|
.with_attributes([("class", "authentication-page")])
|
||||||
|
.with_container(
|
||||||
|
Container::new(ContainerType::Div)
|
||||||
|
.with_attributes([("class", "card authentication-form")])
|
||||||
|
.with_html(
|
||||||
|
Form::new()
|
||||||
|
.with_path("/auth")
|
||||||
|
.with_method("post")
|
||||||
|
.with_container(
|
||||||
|
Container::new(ContainerType::Div)
|
||||||
|
.with_attributes([("class", "authentication-form__label")])
|
||||||
|
.with_html(Label::new("for-token-input", "Authentication")),
|
||||||
|
)
|
||||||
|
.with_container(
|
||||||
|
Container::new(ContainerType::Div)
|
||||||
|
.with_attributes([("class", "authentication-form__input")])
|
||||||
|
.with_html(
|
||||||
|
Input::new("token", "token")
|
||||||
|
.with_id("for-token-input")
|
||||||
|
.with_attributes([("size", "50")]),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn gallery(handles: Vec<Result<FileHandle, ReadFileError>>) -> build_html::HtmlPage {
|
||||||
|
let mut page = build_html::HtmlPage::new()
|
||||||
|
.with_title("Gallery")
|
||||||
|
.with_stylesheet("/css")
|
||||||
|
.with_container(
|
||||||
|
Container::new(ContainerType::Div)
|
||||||
|
.with_attributes([("class", "gallery-page")])
|
||||||
|
.with_header(1, "Gallery")
|
||||||
|
.with_html(upload_form()),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut gallery = Container::new(ContainerType::Div).with_attributes([("class", "gallery")]);
|
||||||
|
for handle in handles {
|
||||||
|
let container = match handle {
|
||||||
|
Ok(ref handle) => thumbnail(&handle.id).with_html(
|
||||||
|
Form::new()
|
||||||
|
.with_path(&format!("/{}", *handle.id))
|
||||||
|
.with_method("post")
|
||||||
|
.with_html(Input::new("hidden", "_method").with_value("delete"))
|
||||||
|
.with_html(Button::new("Delete")),
|
||||||
|
),
|
||||||
|
|
||||||
|
Err(err) => Container::new(ContainerType::Div)
|
||||||
|
.with_attributes(vec![("class", "file")])
|
||||||
|
.with_paragraph(format!("{:?}", err)),
|
||||||
|
};
|
||||||
|
gallery.add_container(container);
|
||||||
|
}
|
||||||
|
page.add_container(gallery);
|
||||||
|
page
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn upload_form() -> Form {
|
||||||
|
Form::new()
|
||||||
|
.with_path("/upload")
|
||||||
|
.with_method("post")
|
||||||
|
.with_encoding("multipart/form-data")
|
||||||
|
.with_container(
|
||||||
|
Container::new(ContainerType::Div)
|
||||||
|
.with_attributes([("class", "card upload-form")])
|
||||||
|
.with_html(Input::new("file", "file").with_attributes([
|
||||||
|
("id", "for-selector-input"),
|
||||||
|
("placeholder", "select file"),
|
||||||
|
("class", "upload-form__selector"),
|
||||||
|
]))
|
||||||
|
.with_html(
|
||||||
|
Button::new("Upload file")
|
||||||
|
.with_attributes([("class", "upload-form__button")])
|
||||||
|
.with_type("submit"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn thumbnail(id: &FileId) -> Container {
|
||||||
|
Container::new(ContainerType::Div)
|
||||||
|
.with_attributes(vec![("class", "card thumbnail")])
|
||||||
|
.with_html(
|
||||||
|
Container::new(ContainerType::Div).with_link(
|
||||||
|
format!("/{}", **id),
|
||||||
|
Image::new(&format!("{}/tn", **id))
|
||||||
|
.with_attributes([("class", "thumbnail__image")])
|
||||||
|
.to_html_string(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
|
@ -0,0 +1,282 @@
|
||||||
|
use super::{fileinfo::FileInfo, FileId, ReadFileError, WriteFileError};
|
||||||
|
use chrono::prelude::*;
|
||||||
|
use hex_string::HexString;
|
||||||
|
use image::imageops::FilterType;
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::{
|
||||||
|
convert::TryFrom,
|
||||||
|
io::{Read, Write},
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
use thiserror::Error;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum PathError {
|
||||||
|
#[error("path cannot be derived from input")]
|
||||||
|
InvalidPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct PathResolver {
|
||||||
|
base: PathBuf,
|
||||||
|
id: FileId,
|
||||||
|
extension: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PathResolver {
|
||||||
|
pub fn new(base: &Path, id: FileId, extension: String) -> Self {
|
||||||
|
Self {
|
||||||
|
base: base.to_owned(),
|
||||||
|
id,
|
||||||
|
extension,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn metadata_path_by_id(base: &Path, id: FileId) -> PathBuf {
|
||||||
|
let mut path = base.to_path_buf();
|
||||||
|
path.push(PathBuf::from(id.clone()));
|
||||||
|
path.set_extension("json");
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn id(&self) -> FileId {
|
||||||
|
self.id.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_path(&self) -> PathBuf {
|
||||||
|
let mut path = self.base.clone();
|
||||||
|
path.push(PathBuf::from(self.id.clone()));
|
||||||
|
path.set_extension(self.extension.clone());
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn metadata_path(&self) -> PathBuf {
|
||||||
|
let mut path = self.base.clone();
|
||||||
|
path.push(PathBuf::from(self.id.clone()));
|
||||||
|
path.set_extension("json");
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn thumbnail_path(&self) -> PathBuf {
|
||||||
|
let mut path = self.base.clone();
|
||||||
|
path.push(PathBuf::from(self.id.clone()));
|
||||||
|
path.set_extension(format!("tn.{}", self.extension));
|
||||||
|
path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<String> for PathResolver {
|
||||||
|
type Error = PathError;
|
||||||
|
fn try_from(s: String) -> Result<Self, Self::Error> {
|
||||||
|
PathResolver::try_from(s.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<&str> for PathResolver {
|
||||||
|
type Error = PathError;
|
||||||
|
fn try_from(s: &str) -> Result<Self, Self::Error> {
|
||||||
|
PathResolver::try_from(Path::new(s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<PathBuf> for PathResolver {
|
||||||
|
type Error = PathError;
|
||||||
|
fn try_from(path: PathBuf) -> Result<Self, Self::Error> {
|
||||||
|
PathResolver::try_from(path.as_path())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<&Path> for PathResolver {
|
||||||
|
type Error = PathError;
|
||||||
|
fn try_from(path: &Path) -> Result<Self, Self::Error> {
|
||||||
|
Ok(Self {
|
||||||
|
base: path
|
||||||
|
.parent()
|
||||||
|
.map(|s| s.to_owned())
|
||||||
|
.ok_or(PathError::InvalidPath)?,
|
||||||
|
id: path
|
||||||
|
.file_stem()
|
||||||
|
.and_then(|s| s.to_str().map(FileId::from))
|
||||||
|
.ok_or(PathError::InvalidPath)?,
|
||||||
|
extension: path
|
||||||
|
.extension()
|
||||||
|
.and_then(|s| s.to_str().map(|s| s.to_owned()))
|
||||||
|
.ok_or(PathError::InvalidPath)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// One file in the database, complete with the path of the file and information about the
|
||||||
|
/// thumbnail of the file.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct FileHandle {
|
||||||
|
pub id: FileId,
|
||||||
|
pub path: PathResolver,
|
||||||
|
pub info: FileInfo,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileHandle {
|
||||||
|
/// Create a new entry in the database
|
||||||
|
pub fn new(filename: String, root: PathBuf) -> Result<Self, WriteFileError> {
|
||||||
|
let id = FileId::from(Uuid::new_v4().hyphenated().to_string());
|
||||||
|
|
||||||
|
let extension = PathBuf::from(filename)
|
||||||
|
.extension()
|
||||||
|
.and_then(|s| s.to_str().map(|s| s.to_owned()))
|
||||||
|
.ok_or(WriteFileError::InvalidPath)?;
|
||||||
|
let path = PathResolver {
|
||||||
|
base: root.clone(),
|
||||||
|
id: id.clone(),
|
||||||
|
extension: extension.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let file_type = mime_guess::from_ext(&extension)
|
||||||
|
.first_or_text_plain()
|
||||||
|
.essence_str()
|
||||||
|
.to_owned();
|
||||||
|
|
||||||
|
let info = FileInfo {
|
||||||
|
id: id.clone(),
|
||||||
|
size: 0,
|
||||||
|
created: Utc::now(),
|
||||||
|
file_type,
|
||||||
|
hash: "".to_owned(),
|
||||||
|
extension,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut md_file = std::fs::File::create(path.metadata_path())?;
|
||||||
|
let _ = md_file.write(&serde_json::to_vec(&info)?)?;
|
||||||
|
|
||||||
|
Ok(Self { id, path, info })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn load(id: &FileId, root: &Path) -> Result<Self, ReadFileError> {
|
||||||
|
let info = FileInfo::load(PathResolver::metadata_path_by_id(root, id.clone()))?;
|
||||||
|
let resolver = PathResolver::new(root, id.clone(), info.extension.clone());
|
||||||
|
Ok(Self {
|
||||||
|
id: info.id.clone(),
|
||||||
|
path: resolver,
|
||||||
|
info,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_content(&mut self, content: Vec<u8>) -> Result<(), WriteFileError> {
|
||||||
|
let mut content_file = std::fs::File::create(self.path.file_path())?;
|
||||||
|
let byte_count = content_file.write(&content)?;
|
||||||
|
self.info.size = byte_count;
|
||||||
|
self.info.hash = self.hash_content(&content).as_string();
|
||||||
|
|
||||||
|
let mut md_file = std::fs::File::create(self.path.metadata_path())?;
|
||||||
|
let _ = md_file.write(&serde_json::to_vec(&self.info)?)?;
|
||||||
|
|
||||||
|
self.write_thumbnail()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn content(&self) -> Result<Vec<u8>, ReadFileError> {
|
||||||
|
load_content(&self.path.file_path())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn thumbnail(&self) -> Result<Vec<u8>, ReadFileError> {
|
||||||
|
load_content(&self.path.thumbnail_path())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hash_content(&self, data: &Vec<u8>) -> HexString {
|
||||||
|
HexString::from_bytes(&Sha256::digest(data).to_vec())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_thumbnail(&self) -> Result<(), WriteFileError> {
|
||||||
|
let img = image::open(self.path.file_path())?;
|
||||||
|
let tn = img.resize(640, 640, FilterType::Nearest);
|
||||||
|
tn.save(self.path.thumbnail_path())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete(self) {
|
||||||
|
let _ = std::fs::remove_file(self.path.thumbnail_path());
|
||||||
|
let _ = std::fs::remove_file(self.path.file_path());
|
||||||
|
let _ = std::fs::remove_file(self.path.metadata_path());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_content(path: &Path) -> Result<Vec<u8>, ReadFileError> {
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
let mut file = std::fs::File::open(path)?;
|
||||||
|
file.read_to_end(&mut buf)?;
|
||||||
|
Ok(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use std::{convert::TryFrom, path::PathBuf};
|
||||||
|
use tempdir::TempDir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn paths() {
|
||||||
|
let resolver = PathResolver::try_from("path/82420255-d3c8-4d90-a582-f94be588c70c.png")
|
||||||
|
.expect("to have a valid path");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
resolver.file_path(),
|
||||||
|
PathBuf::from("path/82420255-d3c8-4d90-a582-f94be588c70c.png")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
resolver.metadata_path(),
|
||||||
|
PathBuf::from("path/82420255-d3c8-4d90-a582-f94be588c70c.json")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
resolver.thumbnail_path(),
|
||||||
|
PathBuf::from("path/82420255-d3c8-4d90-a582-f94be588c70c.tn.png")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_opens_a_file() {
|
||||||
|
let tmp = TempDir::new("var").unwrap();
|
||||||
|
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_deletes_a_file() {
|
||||||
|
let tmp = TempDir::new("var").unwrap();
|
||||||
|
let f =
|
||||||
|
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
|
||||||
|
f.delete();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_can_return_a_thumbnail() {
|
||||||
|
let tmp = TempDir::new("var").unwrap();
|
||||||
|
let _ =
|
||||||
|
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
|
||||||
|
/*
|
||||||
|
assert_eq!(
|
||||||
|
f.thumbnail(),
|
||||||
|
Thumbnail {
|
||||||
|
id: String::from("rawr.png"),
|
||||||
|
root: PathBuf::from("var/"),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_can_return_a_file_stream() {
|
||||||
|
let tmp = TempDir::new("var").unwrap();
|
||||||
|
let _ =
|
||||||
|
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
|
||||||
|
// f.stream().expect("to succeed");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_raises_an_error_when_file_not_found() {
|
||||||
|
let tmp = TempDir::new("var").unwrap();
|
||||||
|
match FileHandle::load(&FileId::from("rawr"), tmp.path()) {
|
||||||
|
Err(ReadFileError::FileNotFound(_)) => assert!(true),
|
||||||
|
_ => assert!(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,69 @@
|
||||||
|
use crate::FileId;
|
||||||
|
|
||||||
|
use super::{ReadFileError, WriteFileError};
|
||||||
|
use chrono::prelude::*;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::{
|
||||||
|
io::{Read, Write},
|
||||||
|
path::PathBuf,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct FileInfo {
|
||||||
|
pub id: FileId,
|
||||||
|
pub size: usize,
|
||||||
|
pub created: DateTime<Utc>,
|
||||||
|
pub file_type: String,
|
||||||
|
pub hash: String,
|
||||||
|
pub extension: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileInfo {
|
||||||
|
pub fn load(path: PathBuf) -> Result<Self, ReadFileError> {
|
||||||
|
let mut content: Vec<u8> = Vec::new();
|
||||||
|
let mut file =
|
||||||
|
std::fs::File::open(path.clone()).map_err(|_| ReadFileError::FileNotFound(path))?;
|
||||||
|
file.read_to_end(&mut content)?;
|
||||||
|
let js = serde_json::from_slice(&content)?;
|
||||||
|
|
||||||
|
Ok(js)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save(&self, path: PathBuf) -> Result<(), WriteFileError> {
|
||||||
|
let ser = serde_json::to_string(self).unwrap();
|
||||||
|
let mut file = std::fs::File::create(path)?;
|
||||||
|
let _ = file.write(ser.as_bytes())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use crate::store::FileId;
|
||||||
|
use tempdir::TempDir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_saves_and_loads_metadata() {
|
||||||
|
let tmp = TempDir::new("var").unwrap();
|
||||||
|
let created = Utc::now();
|
||||||
|
|
||||||
|
let info = FileInfo {
|
||||||
|
id: FileId("temp-id".to_owned()),
|
||||||
|
size: 23777,
|
||||||
|
created,
|
||||||
|
file_type: "image/png".to_owned(),
|
||||||
|
hash: "abcdefg".to_owned(),
|
||||||
|
extension: "png".to_owned(),
|
||||||
|
};
|
||||||
|
let mut path = tmp.path().to_owned();
|
||||||
|
path.push(&PathBuf::from(info.id.clone()));
|
||||||
|
info.save(path.clone()).unwrap();
|
||||||
|
|
||||||
|
let info_ = FileInfo::load(path).unwrap();
|
||||||
|
assert_eq!(info_.size, 23777);
|
||||||
|
assert_eq!(info_.created, info.created);
|
||||||
|
assert_eq!(info_.file_type, "image/png");
|
||||||
|
assert_eq!(info_.hash, info.hash);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,539 @@
|
||||||
|
use base64ct::{Base64, Encoding};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use sqlx::{
|
||||||
|
sqlite::{SqlitePool, SqliteRow},
|
||||||
|
Row,
|
||||||
|
};
|
||||||
|
use std::{collections::HashSet, ops::Deref, path::PathBuf};
|
||||||
|
use thiserror::Error;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
mod filehandle;
|
||||||
|
mod fileinfo;
|
||||||
|
|
||||||
|
pub use filehandle::FileHandle;
|
||||||
|
pub use fileinfo::FileInfo;
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum WriteFileError {
|
||||||
|
#[error("root file path does not exist")]
|
||||||
|
RootNotFound,
|
||||||
|
|
||||||
|
#[error("permission denied")]
|
||||||
|
PermissionDenied,
|
||||||
|
|
||||||
|
#[error("invalid path")]
|
||||||
|
InvalidPath,
|
||||||
|
|
||||||
|
#[error("no metadata available")]
|
||||||
|
NoMetadata,
|
||||||
|
|
||||||
|
#[error("file could not be loaded")]
|
||||||
|
LoadError(#[from] ReadFileError),
|
||||||
|
|
||||||
|
#[error("image conversion failed")]
|
||||||
|
ImageError(#[from] image::ImageError),
|
||||||
|
|
||||||
|
#[error("JSON error")]
|
||||||
|
JSONError(#[from] serde_json::error::Error),
|
||||||
|
|
||||||
|
#[error("IO error")]
|
||||||
|
IOError(#[from] std::io::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum ReadFileError {
|
||||||
|
#[error("file not found")]
|
||||||
|
FileNotFound(PathBuf),
|
||||||
|
|
||||||
|
#[error("path is not a file")]
|
||||||
|
NotAFile,
|
||||||
|
|
||||||
|
#[error("permission denied")]
|
||||||
|
PermissionDenied,
|
||||||
|
|
||||||
|
#[error("invalid path")]
|
||||||
|
InvalidPath,
|
||||||
|
|
||||||
|
#[error("JSON error")]
|
||||||
|
JSONError(#[from] serde_json::error::Error),
|
||||||
|
|
||||||
|
#[error("IO error")]
|
||||||
|
IOError(#[from] std::io::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum AuthError {
|
||||||
|
#[error("authentication token is duplicated")]
|
||||||
|
DuplicateAuthToken,
|
||||||
|
|
||||||
|
#[error("session token is duplicated")]
|
||||||
|
DuplicateSessionToken,
|
||||||
|
|
||||||
|
#[error("database failed")]
|
||||||
|
SqlError(sqlx::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<sqlx::Error> for AuthError {
|
||||||
|
fn from(err: sqlx::Error) -> AuthError {
|
||||||
|
AuthError::SqlError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
||||||
|
pub struct Username(String);
|
||||||
|
|
||||||
|
impl From<String> for Username {
|
||||||
|
fn from(s: String) -> Self {
|
||||||
|
Self(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for Username {
|
||||||
|
fn from(s: &str) -> Self {
|
||||||
|
Self(s.to_owned())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Username> for String {
|
||||||
|
fn from(s: Username) -> Self {
|
||||||
|
Self::from(&s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Username> for String {
|
||||||
|
fn from(s: &Username) -> Self {
|
||||||
|
let Username(s) = s;
|
||||||
|
Self::from(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for Username {
|
||||||
|
type Target = String;
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl sqlx::FromRow<'_, SqliteRow> for Username {
|
||||||
|
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
|
||||||
|
let name: String = row.try_get("username")?;
|
||||||
|
Ok(Username::from(name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
||||||
|
pub struct AuthToken(String);
|
||||||
|
|
||||||
|
impl From<String> for AuthToken {
|
||||||
|
fn from(s: String) -> Self {
|
||||||
|
Self(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for AuthToken {
|
||||||
|
fn from(s: &str) -> Self {
|
||||||
|
Self(s.to_owned())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<AuthToken> for PathBuf {
|
||||||
|
fn from(s: AuthToken) -> Self {
|
||||||
|
Self::from(&s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&AuthToken> for PathBuf {
|
||||||
|
fn from(s: &AuthToken) -> Self {
|
||||||
|
let AuthToken(s) = s;
|
||||||
|
Self::from(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for AuthToken {
|
||||||
|
type Target = String;
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
||||||
|
pub struct SessionToken(String);
|
||||||
|
|
||||||
|
impl From<String> for SessionToken {
|
||||||
|
fn from(s: String) -> Self {
|
||||||
|
Self(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for SessionToken {
|
||||||
|
fn from(s: &str) -> Self {
|
||||||
|
Self(s.to_owned())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SessionToken> for PathBuf {
|
||||||
|
fn from(s: SessionToken) -> Self {
|
||||||
|
Self::from(&s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&SessionToken> for PathBuf {
|
||||||
|
fn from(s: &SessionToken) -> Self {
|
||||||
|
let SessionToken(s) = s;
|
||||||
|
Self::from(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for SessionToken {
|
||||||
|
type Target = String;
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
||||||
|
pub struct FileId(String);
|
||||||
|
|
||||||
|
impl From<String> for FileId {
|
||||||
|
fn from(s: String) -> Self {
|
||||||
|
Self(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for FileId {
|
||||||
|
fn from(s: &str) -> Self {
|
||||||
|
Self(s.to_owned())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<FileId> for PathBuf {
|
||||||
|
fn from(s: FileId) -> Self {
|
||||||
|
Self::from(&s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&FileId> for PathBuf {
|
||||||
|
fn from(s: &FileId) -> Self {
|
||||||
|
let FileId(s) = s;
|
||||||
|
Self::from(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for FileId {
|
||||||
|
type Target = String;
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait FileRoot {
|
||||||
|
fn root(&self) -> PathBuf;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Context(PathBuf);
|
||||||
|
|
||||||
|
impl FileRoot for Context {
|
||||||
|
fn root(&self) -> PathBuf {
|
||||||
|
self.0.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct AuthDB {
|
||||||
|
pool: SqlitePool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuthDB {
|
||||||
|
pub async fn new(path: PathBuf) -> Result<Self, sqlx::Error> {
|
||||||
|
let migrator = sqlx::migrate!("./migrations");
|
||||||
|
let pool = SqlitePool::connect(&format!("sqlite://{}", path.to_str().unwrap())).await?;
|
||||||
|
migrator.run(&pool).await?;
|
||||||
|
Ok(Self { pool })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn add_user(&self, username: Username) -> Result<AuthToken, AuthError> {
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
hasher.update(Uuid::new_v4().hyphenated().to_string());
|
||||||
|
hasher.update(username.to_string());
|
||||||
|
let auth_token = Base64::encode_string(&hasher.finalize());
|
||||||
|
|
||||||
|
let _ = sqlx::query("INSERT INTO users (username, token) VALUES ($1, $2)")
|
||||||
|
.bind(username.to_string())
|
||||||
|
.bind(auth_token.clone())
|
||||||
|
.execute(&self.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(AuthToken::from(auth_token))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_users(&self) -> Result<Vec<Username>, AuthError> {
|
||||||
|
let usernames = sqlx::query_as::<_, Username>("SELECT (username) FROM users")
|
||||||
|
.fetch_all(&self.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(usernames)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn authenticate(&self, token: AuthToken) -> Result<Option<SessionToken>, AuthError> {
|
||||||
|
let results = sqlx::query("SELECT * FROM users WHERE token = $1")
|
||||||
|
.bind(token.to_string())
|
||||||
|
.fetch_all(&self.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if results.len() > 1 {
|
||||||
|
return Err(AuthError::DuplicateAuthToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
if results.is_empty() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let user_id: i64 = results[0].try_get("id")?;
|
||||||
|
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
hasher.update(Uuid::new_v4().hyphenated().to_string());
|
||||||
|
hasher.update(token.to_string());
|
||||||
|
let session_token = Base64::encode_string(&hasher.finalize());
|
||||||
|
|
||||||
|
let _ = sqlx::query("INSERT INTO sessions (token, user_id) VALUES ($1, $2)")
|
||||||
|
.bind(session_token.clone())
|
||||||
|
.bind(user_id)
|
||||||
|
.execute(&self.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Some(SessionToken::from(session_token)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn validate_session(
|
||||||
|
&self,
|
||||||
|
token: SessionToken,
|
||||||
|
) -> Result<Option<Username>, AuthError> {
|
||||||
|
let rows = sqlx::query(
|
||||||
|
"SELECT users.username FROM sessions INNER JOIN users ON sessions.user_id = users.id WHERE sessions.token = $1",
|
||||||
|
)
|
||||||
|
.bind(token.to_string())
|
||||||
|
.fetch_all(&self.pool)
|
||||||
|
.await?;
|
||||||
|
if rows.len() > 1 {
|
||||||
|
return Err(AuthError::DuplicateSessionToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
if rows.is_empty() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let username: String = rows[0].try_get("username")?;
|
||||||
|
Ok(Some(Username::from(username)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Store {
|
||||||
|
files_root: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Store {
|
||||||
|
pub fn new(files_root: PathBuf) -> Self {
|
||||||
|
Self { files_root }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn list_files(&self) -> Result<HashSet<FileId>, ReadFileError> {
|
||||||
|
let paths = std::fs::read_dir(&self.files_root)?;
|
||||||
|
let info_files = paths
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|path| {
|
||||||
|
let path_ = path.unwrap().path();
|
||||||
|
if path_.extension().and_then(|s| s.to_str()) == Some("json") {
|
||||||
|
let stem = path_.file_stem().and_then(|s| s.to_str()).unwrap();
|
||||||
|
Some(FileId::from(stem))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<HashSet<FileId>>();
|
||||||
|
Ok(info_files)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_file(
|
||||||
|
&mut self,
|
||||||
|
filename: String,
|
||||||
|
content: Vec<u8>,
|
||||||
|
) -> Result<FileHandle, WriteFileError> {
|
||||||
|
let mut file = FileHandle::new(filename, self.files_root.clone())?;
|
||||||
|
file.set_content(content)?;
|
||||||
|
Ok(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_file(&self, id: &FileId) -> Result<FileHandle, ReadFileError> {
|
||||||
|
FileHandle::load(id, &self.files_root)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_file(&mut self, id: &FileId) -> Result<(), WriteFileError> {
|
||||||
|
let handle = FileHandle::load(id, &self.files_root)?;
|
||||||
|
handle.delete();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_metadata(&self, id: &FileId) -> Result<FileInfo, ReadFileError> {
|
||||||
|
let mut path = self.files_root.clone();
|
||||||
|
path.push(PathBuf::from(id));
|
||||||
|
path.set_extension("json");
|
||||||
|
FileInfo::load(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use cool_asserts::assert_matches;
|
||||||
|
use std::{collections::HashSet, io::Read};
|
||||||
|
use tempdir::TempDir;
|
||||||
|
|
||||||
|
fn with_file<F>(test_fn: F)
|
||||||
|
where
|
||||||
|
F: FnOnce(Store, FileId, TempDir),
|
||||||
|
{
|
||||||
|
let tmp = TempDir::new("var").unwrap();
|
||||||
|
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
let mut file = std::fs::File::open("fixtures/rawr.png").unwrap();
|
||||||
|
file.read_to_end(&mut buf).unwrap();
|
||||||
|
|
||||||
|
let mut store = Store::new(PathBuf::from(tmp.path()));
|
||||||
|
let file_record = store.add_file("rawr.png".to_owned(), buf).unwrap();
|
||||||
|
|
||||||
|
test_fn(store, file_record.id, tmp);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn adds_files() {
|
||||||
|
with_file(|store, id, tmp| {
|
||||||
|
let file = store.get_file(&id).expect("to retrieve the file");
|
||||||
|
|
||||||
|
assert_eq!(file.content().map(|file| file.len()).unwrap(), 23777);
|
||||||
|
|
||||||
|
assert!(tmp.path().join(&(*id)).with_extension("png").exists());
|
||||||
|
assert!(tmp.path().join(&(*id)).with_extension("json").exists());
|
||||||
|
assert!(tmp.path().join(&(*id)).with_extension("tn.png").exists());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sets_up_metadata_for_file() {
|
||||||
|
with_file(|store, id, tmp| {
|
||||||
|
assert!(tmp.path().join(&(*id)).with_extension("png").exists());
|
||||||
|
let info = store.get_metadata(&id).expect("to retrieve the metadata");
|
||||||
|
|
||||||
|
assert_matches!(info, FileInfo { size, file_type, hash, extension, .. } => {
|
||||||
|
assert_eq!(size, 23777);
|
||||||
|
assert_eq!(file_type, "image/png");
|
||||||
|
assert_eq!(hash, "b6cd35e113b95d62f53d9cbd27ccefef47d3e324aef01a2db6c0c6d3a43c89ee".to_owned());
|
||||||
|
assert_eq!(extension, "png".to_owned());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
#[test]
|
||||||
|
fn sets_up_thumbnail_for_file() {
|
||||||
|
with_file(|store, id| {
|
||||||
|
let (_, thumbnail) = store.get_thumbnail(&id).expect("to retrieve the thumbnail");
|
||||||
|
assert_eq!(thumbnail.content().map(|file| file.len()).unwrap(), 48869);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn deletes_associated_files() {
|
||||||
|
with_file(|mut store, id, tmp| {
|
||||||
|
store.delete_file(&id).expect("file to be deleted");
|
||||||
|
|
||||||
|
assert!(!tmp.path().join(&(*id)).with_extension("png").exists());
|
||||||
|
assert!(!tmp.path().join(&(*id)).with_extension("json").exists());
|
||||||
|
assert!(!tmp.path().join(&(*id)).with_extension("tn.png").exists());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn lists_files_in_the_db() {
|
||||||
|
with_file(|store, id, _| {
|
||||||
|
let resolvers = store.list_files().expect("file listing to succeed");
|
||||||
|
let ids = resolvers.into_iter().collect::<HashSet<FileId>>();
|
||||||
|
|
||||||
|
assert_eq!(ids.len(), 1);
|
||||||
|
assert!(ids.contains(&id));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod authdb_test {
|
||||||
|
use super::*;
|
||||||
|
use cool_asserts::assert_matches;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn can_create_and_list_users() {
|
||||||
|
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||||
|
.await
|
||||||
|
.expect("a memory-only database will be created");
|
||||||
|
let _ = db
|
||||||
|
.add_user(Username::from("savanni"))
|
||||||
|
.await
|
||||||
|
.expect("user to be created");
|
||||||
|
assert_matches!(db.list_users().await, Ok(names) => {
|
||||||
|
let names = names.into_iter().collect::<HashSet<Username>>();
|
||||||
|
assert!(names.contains(&Username::from("savanni")));
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn unknown_auth_token_returns_nothing() {
|
||||||
|
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||||
|
.await
|
||||||
|
.expect("a memory-only database will be created");
|
||||||
|
let _ = db
|
||||||
|
.add_user(Username::from("savanni"))
|
||||||
|
.await
|
||||||
|
.expect("user to be created");
|
||||||
|
|
||||||
|
let token = AuthToken::from("0000000000");
|
||||||
|
|
||||||
|
assert_matches!(db.authenticate(token).await, Ok(None));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn auth_token_becomes_session_token() {
|
||||||
|
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||||
|
.await
|
||||||
|
.expect("a memory-only database will be created");
|
||||||
|
let token = db
|
||||||
|
.add_user(Username::from("savanni"))
|
||||||
|
.await
|
||||||
|
.expect("user to be created");
|
||||||
|
|
||||||
|
assert_matches!(db.authenticate(token).await, Ok(_));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn can_validate_session_token() {
|
||||||
|
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||||
|
.await
|
||||||
|
.expect("a memory-only database will be created");
|
||||||
|
let token = db
|
||||||
|
.add_user(Username::from("savanni"))
|
||||||
|
.await
|
||||||
|
.expect("user to be created");
|
||||||
|
let session = db
|
||||||
|
.authenticate(token)
|
||||||
|
.await
|
||||||
|
.expect("token authentication should succeed")
|
||||||
|
.expect("session token should be found");
|
||||||
|
|
||||||
|
assert_matches!(
|
||||||
|
db.validate_session(session).await,
|
||||||
|
Ok(Some(username)) => {
|
||||||
|
assert_eq!(username, Username::from("savanni"));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,91 @@
|
||||||
|
use super::{ReadFileError, WriteFileError};
|
||||||
|
use image::imageops::FilterType;
|
||||||
|
use std::{
|
||||||
|
fs::remove_file,
|
||||||
|
io::Read,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
pub struct Thumbnail {
|
||||||
|
pub path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Thumbnail {
|
||||||
|
pub fn open(
|
||||||
|
origin_path: PathBuf,
|
||||||
|
thumbnail_path: PathBuf,
|
||||||
|
) -> Result<Thumbnail, WriteFileError> {
|
||||||
|
let s = Thumbnail {
|
||||||
|
path: PathBuf::from(thumbnail_path),
|
||||||
|
};
|
||||||
|
|
||||||
|
if !s.path.exists() {
|
||||||
|
let img = image::open(&origin_path)?;
|
||||||
|
let tn = img.resize(640, 640, FilterType::Nearest);
|
||||||
|
tn.save(&s.path)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn load(path: PathBuf) -> Result<Thumbnail, ReadFileError> {
|
||||||
|
let s = Thumbnail { path: path.clone() };
|
||||||
|
|
||||||
|
if !s.path.exists() {
|
||||||
|
return Err(ReadFileError::FileNotFound(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
pub fn from_path(path: &Path) -> Result<Thumbnail, ReadFileError> {
|
||||||
|
let id = path
|
||||||
|
.file_name()
|
||||||
|
.map(|s| String::from(s.to_string_lossy()))
|
||||||
|
.ok_or(ReadFileError::NotAnImage(PathBuf::from(path)))?;
|
||||||
|
|
||||||
|
let path = path
|
||||||
|
.parent()
|
||||||
|
.ok_or(ReadFileError::FileNotFound(PathBuf::from(path)))?;
|
||||||
|
|
||||||
|
Thumbnail::open(&id, root)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
pub fn stream(&self) -> Result<std::fs::File, ReadFileError> {
|
||||||
|
std::fs::File::open(self.path.clone()).map_err(|err| {
|
||||||
|
if err.kind() == std::io::ErrorKind::NotFound {
|
||||||
|
ReadFileError::FileNotFound
|
||||||
|
} else {
|
||||||
|
ReadFileError::from(err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
pub fn delete(self) -> Result<(), WriteFileError> {
|
||||||
|
remove_file(self.path).map_err(WriteFileError::from)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use crate::store::utils::FileCleanup;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn it_creates_a_thumbnail_if_one_does_not_exist() {
|
||||||
|
let _ = FileCleanup(PathBuf::from("var/rawr.tn.png"));
|
||||||
|
let _ = Thumbnail::open(
|
||||||
|
PathBuf::from("fixtures/rawr.png"),
|
||||||
|
PathBuf::from("var/rawr.tn.png"),
|
||||||
|
)
|
||||||
|
.expect("thumbnail open must work");
|
||||||
|
assert!(Path::new("var/rawr.tn.png").is_file());
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
<html>
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<title> {{title}} </title>
|
||||||
|
<link href="/css" rel="stylesheet" type="text/css" media="screen" />
|
||||||
|
<script src="/script"></script>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<a href="/file/{{id}}"><img src="/tn/{{id}}" /></a>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
|
@ -0,0 +1,54 @@
|
||||||
|
<html>
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<title> Admin list of files </title>
|
||||||
|
<link href="/css" rel="stylesheet" type="text/css" media="screen" />
|
||||||
|
<script src="/script"></script>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<h1> Admin list of files </h1>
|
||||||
|
|
||||||
|
<div class="uploadform">
|
||||||
|
<form action="/" method="post" enctype="multipart/form-data">
|
||||||
|
<div id="file-selector">
|
||||||
|
<input type="file" name="file" id="file-selector-input" />
|
||||||
|
<label for="file-selector-input" onclick="selectFile('file-selector')">Select a file</label>
|
||||||
|
</div>
|
||||||
|
<input type="submit" value="Upload file" />
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="files">
|
||||||
|
{{#files}}
|
||||||
|
<div class="file">
|
||||||
|
{{#error}}
|
||||||
|
<div>
|
||||||
|
<p> {{error}} </p>
|
||||||
|
</div>
|
||||||
|
{{/error}}
|
||||||
|
|
||||||
|
{{#file}}
|
||||||
|
<div class="thumbnail">
|
||||||
|
<a href="/file/{{id}}"><img src="/tn/{{id}}" /></a>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<ul>
|
||||||
|
<li> {{date}} </li>
|
||||||
|
<li> {{type_}} </li>
|
||||||
|
<li> {{size}} </li>
|
||||||
|
</ul>
|
||||||
|
<div>
|
||||||
|
<form action="/{{id}}" method="post">
|
||||||
|
<input type="hidden" name="_method" value="delete" />
|
||||||
|
<input type="submit" value="Delete" />
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{{/file}}
|
||||||
|
</div>
|
||||||
|
{{/files}}
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
|
@ -0,0 +1,10 @@
|
||||||
|
const selectFile = (selectorId) => {
|
||||||
|
console.log("wide arrow functions work: " + selectorId);
|
||||||
|
const input = document.querySelector("#" + selectorId + " input[type='file']")
|
||||||
|
const label = document.querySelector("#" + selectorId + " label")
|
||||||
|
input.addEventListener("change", (e) => {
|
||||||
|
if (input.files.length > 0) {
|
||||||
|
label.innerHTML = input.files[0].name
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,171 @@
|
||||||
|
:root {
|
||||||
|
--main-bg-color: #e5f0fc;
|
||||||
|
--fg-color: #449dfc;
|
||||||
|
|
||||||
|
--space-small: 4px;
|
||||||
|
--space-medium: 8px;
|
||||||
|
--space-large: 12px;
|
||||||
|
|
||||||
|
--hover-low: 4px 4px 4px gray;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: 'Ariel', sans-serif;
|
||||||
|
background-color: var(--main-bg-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.card {
|
||||||
|
border: 1px solid black;
|
||||||
|
border-radius: 5px;
|
||||||
|
box-shadow: var(--hover-low);
|
||||||
|
margin: var(--space-large);
|
||||||
|
padding: var(--space-medium);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
.authentication-page {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
height: 200px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.authentication-form {
|
||||||
|
}
|
||||||
|
|
||||||
|
.authentication-form__label {
|
||||||
|
margin: var(--space-small);
|
||||||
|
}
|
||||||
|
|
||||||
|
.authentication-form__input {
|
||||||
|
margin: var(--space-small);
|
||||||
|
}
|
||||||
|
|
||||||
|
.gallery-page {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-form {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-form__selector {
|
||||||
|
margin: var(--space-small);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-form__button {
|
||||||
|
margin: var(--space-small);
|
||||||
|
}
|
||||||
|
|
||||||
|
.gallery {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail {
|
||||||
|
width: 300px;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail__image {
|
||||||
|
max-width: 100%;
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
[type="submit"] {
|
||||||
|
border-radius: 1em;
|
||||||
|
margin: 1em;
|
||||||
|
padding: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.uploadform {
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
[type="file"] {
|
||||||
|
border: 0;
|
||||||
|
clip: rect(0, 0, 0, 0);
|
||||||
|
height: 1px;
|
||||||
|
overflow: hidden;
|
||||||
|
padding: 0;
|
||||||
|
position: absolute !important;
|
||||||
|
white-space: nowrap;
|
||||||
|
width: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
[type="file"] + label {
|
||||||
|
background-color: rgb(0, 86, 112);
|
||||||
|
border-radius: 1em;
|
||||||
|
color: #fff;
|
||||||
|
cursor: pointer;
|
||||||
|
display: inline-block;
|
||||||
|
padding: 1em;
|
||||||
|
margin: 1em;
|
||||||
|
transition: background-color 0.3s;
|
||||||
|
}
|
||||||
|
|
||||||
|
[type="file"]:focus + label,
|
||||||
|
[type="file"] + label:hover {
|
||||||
|
background-color: #67b0ff;
|
||||||
|
}
|
||||||
|
|
||||||
|
[type="file"]:focus + label {
|
||||||
|
outline: 1px dotted #000;
|
||||||
|
outline: -webkit-focus-ring-color auto 5px;
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
@media screen and (max-width: 1080px) { /* This is the screen width of a OnePlus 8 */
|
||||||
|
body {
|
||||||
|
font-size: xx-large;
|
||||||
|
}
|
||||||
|
|
||||||
|
.authentication-form {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-form__selector {
|
||||||
|
font-size: larger;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-form__button {
|
||||||
|
font-size: larger;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
[type="submit"] {
|
||||||
|
font-size: xx-large;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.uploadform {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
|
||||||
|
[type="file"] + label {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail {
|
||||||
|
max-width: 100%;
|
||||||
|
margin: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.file {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
}
|
|
@ -43,6 +43,7 @@
|
||||||
pkgs.cargo-nextest
|
pkgs.cargo-nextest
|
||||||
pkgs.crate2nix
|
pkgs.crate2nix
|
||||||
pkgs.wasm-pack
|
pkgs.wasm-pack
|
||||||
|
pkgs.sqlx-cli
|
||||||
typeshare.packages."x86_64-linux".default
|
typeshare.packages."x86_64-linux".default
|
||||||
];
|
];
|
||||||
LIBCLANG_PATH="${pkgs.llvmPackages.libclang.lib}/lib";
|
LIBCLANG_PATH="${pkgs.llvmPackages.libclang.lib}/lib";
|
||||||
|
|
|
@ -5,7 +5,6 @@ edition = "2018"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
description = "An ergonomics wrapper around Fluent-RS"
|
description = "An ergonomics wrapper around Fluent-RS"
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
license-file = "../COPYING"
|
|
||||||
homepage = "https://github.com/luminescent-dreams/fluent-ergonomics"
|
homepage = "https://github.com/luminescent-dreams/fluent-ergonomics"
|
||||||
repository = "https://github.com/luminescent-dreams/fluent-ergonomics"
|
repository = "https://github.com/luminescent-dreams/fluent-ergonomics"
|
||||||
categories = ["internationalization"]
|
categories = ["internationalization"]
|
||||||
|
|
|
@ -171,14 +171,14 @@ impl FluentErgo {
|
||||||
match entry {
|
match entry {
|
||||||
Entry::Occupied(mut e) => {
|
Entry::Occupied(mut e) => {
|
||||||
let bundle = e.get_mut();
|
let bundle = e.get_mut();
|
||||||
bundle.add_resource(res).map_err(|err| Error::from(err))
|
bundle.add_resource(res).map_err(Error::from)
|
||||||
}
|
}
|
||||||
Entry::Vacant(e) => {
|
Entry::Vacant(e) => {
|
||||||
let mut bundle: FluentBundle<
|
let mut bundle: FluentBundle<
|
||||||
FluentResource,
|
FluentResource,
|
||||||
intl_memoizer::concurrent::IntlLangMemoizer,
|
intl_memoizer::concurrent::IntlLangMemoizer,
|
||||||
> = FluentBundle::new_concurrent(vec![lang]);
|
> = FluentBundle::new_concurrent(vec![lang]);
|
||||||
bundle.add_resource(res).map_err(|err| Error::from(err))?;
|
bundle.add_resource(res).map_err(Error::from)?;
|
||||||
e.insert(bundle);
|
e.insert(bundle);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -248,16 +248,10 @@ impl FluentErgo {
|
||||||
///
|
///
|
||||||
pub fn tr(&self, msgid: &str, args: Option<&FluentArgs>) -> Result<String, Error> {
|
pub fn tr(&self, msgid: &str, args: Option<&FluentArgs>) -> Result<String, Error> {
|
||||||
let bundles = self.bundles.read().unwrap();
|
let bundles = self.bundles.read().unwrap();
|
||||||
let result: Option<String> = self
|
let result: Option<String> = self.languages.iter().find_map(|lang| {
|
||||||
.languages
|
|
||||||
.iter()
|
|
||||||
.map(|lang| {
|
|
||||||
let bundle = bundles.get(lang)?;
|
let bundle = bundles.get(lang)?;
|
||||||
self.tr_(bundle, msgid, args)
|
self.tr_(bundle, msgid, args)
|
||||||
})
|
});
|
||||||
.filter(|v| v.is_some())
|
|
||||||
.map(|v| v.unwrap())
|
|
||||||
.next();
|
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Some(r) => Ok(r),
|
Some(r) => Ok(r),
|
||||||
|
@ -276,8 +270,8 @@ impl FluentErgo {
|
||||||
let res = match pattern {
|
let res = match pattern {
|
||||||
None => None,
|
None => None,
|
||||||
Some(p) => {
|
Some(p) => {
|
||||||
let res = bundle.format_pattern(&p, args, &mut errors);
|
let res = bundle.format_pattern(p, args, &mut errors);
|
||||||
if errors.len() > 0 {
|
if !errors.is_empty() {
|
||||||
println!("Errors in formatting: {:?}", errors)
|
println!("Errors in formatting: {:?}", errors)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ pub struct Latitude(f32);
|
||||||
|
|
||||||
impl From<f32> for Latitude {
|
impl From<f32> for Latitude {
|
||||||
fn from(val: f32) -> Self {
|
fn from(val: f32) -> Self {
|
||||||
if val > 90.0 || val < -90.0 {
|
if !(-90.0..=90.0).contains(&val) {
|
||||||
panic!("Latitude is outside of range");
|
panic!("Latitude is outside of range");
|
||||||
}
|
}
|
||||||
Self(val)
|
Self(val)
|
||||||
|
@ -23,7 +23,7 @@ pub struct Longitude(f32);
|
||||||
|
|
||||||
impl From<f32> for Longitude {
|
impl From<f32> for Longitude {
|
||||||
fn from(val: f32) -> Self {
|
fn from(val: f32) -> Self {
|
||||||
if val > 180.0 || val < -180.0 {
|
if !(-180.0..=180.0).contains(&val) {
|
||||||
panic!("Longitude is outside fo range");
|
panic!("Longitude is outside fo range");
|
||||||
}
|
}
|
||||||
Self(val)
|
Self(val)
|
||||||
|
|
|
@ -45,7 +45,7 @@ impl ApplicationWindow {
|
||||||
]
|
]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|name| {
|
.map(|name| {
|
||||||
let playlist = PlaylistCard::new();
|
let playlist = PlaylistCard::default();
|
||||||
playlist.set_name(name);
|
playlist.set_name(name);
|
||||||
playlist
|
playlist
|
||||||
})
|
})
|
||||||
|
|
|
@ -31,8 +31,8 @@ glib::wrapper! {
|
||||||
pub struct PlaylistCard(ObjectSubclass<PlaylistCardPrivate>) @extends gtk::Box, gtk::Widget, @implements gtk::Orientable;
|
pub struct PlaylistCard(ObjectSubclass<PlaylistCardPrivate>) @extends gtk::Box, gtk::Widget, @implements gtk::Orientable;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PlaylistCard {
|
impl Default for PlaylistCard {
|
||||||
pub fn new() -> Self {
|
fn default() -> Self {
|
||||||
let s: Self = Object::builder().build();
|
let s: Self = Object::builder().build();
|
||||||
s.set_orientation(gtk::Orientation::Vertical);
|
s.set_orientation(gtk::Orientation::Vertical);
|
||||||
s.add_css_class("playlist-card");
|
s.add_css_class("playlist-card");
|
||||||
|
@ -43,7 +43,9 @@ impl PlaylistCard {
|
||||||
|
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PlaylistCard {
|
||||||
pub fn set_name(&self, s: &str) {
|
pub fn set_name(&self, s: &str) {
|
||||||
self.imp().name.set_text(s);
|
self.imp().name.set_text(s);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
use gtk::prelude::*;
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
gtk::init();
|
let _ = gtk::init();
|
||||||
for name in gtk::IconTheme::new().icon_names() {
|
for name in gtk::IconTheme::new().icon_names() {
|
||||||
println!("{}", name);
|
println!("{}", name);
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,10 +10,9 @@ Luminescent Dreams Tools is distributed in the hope that it will be useful, but
|
||||||
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use cairo::Context;
|
|
||||||
use coordinates::{hex_map::parse_data, AxialAddr};
|
use coordinates::{hex_map::parse_data, AxialAddr};
|
||||||
use gio::resources_lookup_data;
|
use gio::resources_lookup_data;
|
||||||
use glib::{subclass::InitializingObject, Object};
|
use glib::Object;
|
||||||
use gtk::{gio, prelude::*, subclass::prelude::*, Application, DrawingArea};
|
use gtk::{gio, prelude::*, subclass::prelude::*, Application, DrawingArea};
|
||||||
use image::io::Reader as ImageReader;
|
use image::io::Reader as ImageReader;
|
||||||
use std::{cell::RefCell, io::Cursor, rc::Rc};
|
use std::{cell::RefCell, io::Cursor, rc::Rc};
|
||||||
|
@ -23,7 +22,7 @@ mod palette_entry;
|
||||||
mod tile;
|
mod tile;
|
||||||
mod utilities;
|
mod utilities;
|
||||||
|
|
||||||
const APP_ID: &'static str = "com.luminescent-dreams.hex-grid";
|
const APP_ID: &str = "com.luminescent-dreams.hex-grid";
|
||||||
const HEX_RADIUS: f64 = 50.;
|
const HEX_RADIUS: f64 = 50.;
|
||||||
const MAP_RADIUS: usize = 3;
|
const MAP_RADIUS: usize = 3;
|
||||||
const DRAWING_ORIGIN: (f64, f64) = (1024. / 2., 768. / 2.);
|
const DRAWING_ORIGIN: (f64, f64) = (1024. / 2., 768. / 2.);
|
||||||
|
@ -178,14 +177,14 @@ impl ObjectImpl for HexGridWindowPrivate {
|
||||||
let norm_x = x - DRAWING_ORIGIN.0;
|
let norm_x = x - DRAWING_ORIGIN.0;
|
||||||
let norm_y = y - DRAWING_ORIGIN.1;
|
let norm_y = y - DRAWING_ORIGIN.1;
|
||||||
let q = (2. / 3. * norm_x) / HEX_RADIUS;
|
let q = (2. / 3. * norm_x) / HEX_RADIUS;
|
||||||
let r = (-1. / 3. * norm_x + (3. as f64).sqrt() / 3. * norm_y) / HEX_RADIUS;
|
let r = (-1. / 3. * norm_x + (3_f64).sqrt() / 3. * norm_y) / HEX_RADIUS;
|
||||||
|
|
||||||
let (q, r) = axial_round(q, r);
|
let (q, r) = axial_round(q, r);
|
||||||
let coordinate = AxialAddr::new(q, r);
|
let coordinate = AxialAddr::new(q, r);
|
||||||
canvas_address.set_value(&format!("{:.0} {:.0}", x, y));
|
canvas_address.set_value(&format!("{:.0} {:.0}", x, y));
|
||||||
|
|
||||||
if coordinate.distance(&AxialAddr::origin()) > MAP_RADIUS {
|
if coordinate.distance(&AxialAddr::origin()) > MAP_RADIUS {
|
||||||
hex_address.set_value(&format!("-----"));
|
hex_address.set_value("-----");
|
||||||
*c.borrow_mut() = None;
|
*c.borrow_mut() = None;
|
||||||
} else {
|
} else {
|
||||||
hex_address.set_value(&format!("{:.0} {:.0}", coordinate.q(), coordinate.r()));
|
hex_address.set_value(&format!("{:.0} {:.0}", coordinate.q(), coordinate.r()));
|
||||||
|
@ -209,10 +208,10 @@ impl ObjectImpl for HexGridWindowPrivate {
|
||||||
DRAWING_ORIGIN.0 + HEX_RADIUS * (3. / 2. * (coordinate.q() as f64));
|
DRAWING_ORIGIN.0 + HEX_RADIUS * (3. / 2. * (coordinate.q() as f64));
|
||||||
let center_y = DRAWING_ORIGIN.1
|
let center_y = DRAWING_ORIGIN.1
|
||||||
+ HEX_RADIUS
|
+ HEX_RADIUS
|
||||||
* ((3. as f64).sqrt() / 2. * (coordinate.q() as f64)
|
* ((3_f64).sqrt() / 2. * (coordinate.q() as f64)
|
||||||
+ (3. as f64).sqrt() * (coordinate.r() as f64));
|
+ (3_f64).sqrt() * (coordinate.r() as f64));
|
||||||
let translate_x = center_x - HEX_RADIUS;
|
let translate_x = center_x - HEX_RADIUS;
|
||||||
let translate_y = center_y - (3. as f64).sqrt() * HEX_RADIUS / 2.;
|
let translate_y = center_y - (3_f64).sqrt() * HEX_RADIUS / 2.;
|
||||||
|
|
||||||
let tile = match hex_map.get(&coordinate).unwrap() {
|
let tile = match hex_map.get(&coordinate).unwrap() {
|
||||||
tile::Terrain::Mountain => &mountain,
|
tile::Terrain::Mountain => &mountain,
|
||||||
|
@ -249,10 +248,11 @@ impl HexGridWindow {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
fn draw_hexagon(context: &Context, center_x: f64, center_y: f64, radius: f64) {
|
fn draw_hexagon(context: &Context, center_x: f64, center_y: f64, radius: f64) {
|
||||||
let ul_x = center_x - radius;
|
let ul_x = center_x - radius;
|
||||||
let ul_y = center_y - (3. as f64).sqrt() * radius / 2.;
|
let ul_y = center_y - (3_f64).sqrt() * radius / 2.;
|
||||||
let points: Vec<(f64, f64)> = utilities::hexagon(radius * 2., (3. as f64).sqrt() * radius);
|
let points: Vec<(f64, f64)> = utilities::hexagon(radius * 2., (3_f64).sqrt() * radius);
|
||||||
context.new_path();
|
context.new_path();
|
||||||
context.move_to(ul_x + points[0].0, ul_y + points[0].1);
|
context.move_to(ul_x + points[0].0, ul_y + points[0].1);
|
||||||
context.line_to(ul_x + points[1].0, ul_y + points[1].1);
|
context.line_to(ul_x + points[1].0, ul_y + points[1].1);
|
||||||
|
@ -262,6 +262,7 @@ fn draw_hexagon(context: &Context, center_x: f64, center_y: f64, radius: f64) {
|
||||||
context.line_to(ul_x + points[5].0, ul_y + points[5].1);
|
context.line_to(ul_x + points[5].0, ul_y + points[5].1);
|
||||||
context.close_path();
|
context.close_path();
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
fn axial_round(q_f64: f64, r_f64: f64) -> (i32, i32) {
|
fn axial_round(q_f64: f64, r_f64: f64) -> (i32, i32) {
|
||||||
let s_f64 = -q_f64 - r_f64;
|
let s_f64 = -q_f64 - r_f64;
|
||||||
|
|
|
@ -27,20 +27,20 @@ pub fn hexagon(width: f64, height: f64) -> Vec<(f64, f64)> {
|
||||||
(center_x + radius, center_y),
|
(center_x + radius, center_y),
|
||||||
(
|
(
|
||||||
center_x + radius / 2.,
|
center_x + radius / 2.,
|
||||||
center_y + (3. as f64).sqrt() * radius / 2.,
|
center_y + (3_f64).sqrt() * radius / 2.,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
center_x - radius / 2.,
|
center_x - radius / 2.,
|
||||||
center_y + (3. as f64).sqrt() * radius / 2.,
|
center_y + (3_f64).sqrt() * radius / 2.,
|
||||||
),
|
),
|
||||||
(center_x - radius, center_y),
|
(center_x - radius, center_y),
|
||||||
(
|
(
|
||||||
center_x - radius / 2.,
|
center_x - radius / 2.,
|
||||||
center_y - (3. as f64).sqrt() * radius / 2.,
|
center_y - (3_f64).sqrt() * radius / 2.,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
center_x + radius / 2.,
|
center_x + radius / 2.,
|
||||||
center_y - (3. as f64).sqrt() * radius / 2.,
|
center_y - (3_f64).sqrt() * radius / 2.,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,6 @@ edition = "2018"
|
||||||
keywords = ["date", "time", "calendar"]
|
keywords = ["date", "time", "calendar"]
|
||||||
categories = ["date-and-time"]
|
categories = ["date-and-time"]
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
license-file = "../COPYING"
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = { version = "0.4" }
|
chrono = { version = "0.4" }
|
||||||
|
|
|
@ -141,9 +141,7 @@ impl IFC {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ymd(year: i32, month: u8, day: u8) -> Result<Self, Error> {
|
pub fn ymd(year: i32, month: u8, day: u8) -> Result<Self, Error> {
|
||||||
if month < 1 || month > 13 {
|
if !(1..=13).contains(&month) || !(1..=28).contains(&day) {
|
||||||
Err(Error::InvalidDate)
|
|
||||||
} else if day < 1 || day > 28 {
|
|
||||||
Err(Error::InvalidDate)
|
Err(Error::InvalidDate)
|
||||||
} else {
|
} else {
|
||||||
Ok(Self::Day(Day { year, month, day }))
|
Ok(Self::Day(Day { year, month, day }))
|
||||||
|
@ -248,12 +246,12 @@ impl From<chrono::NaiveDate> for IFC {
|
||||||
if is_leap_year(date.year())
|
if is_leap_year(date.year())
|
||||||
&& date > NaiveDate::from_ymd_opt(date.year(), 6, 17).unwrap()
|
&& date > NaiveDate::from_ymd_opt(date.year(), 6, 17).unwrap()
|
||||||
{
|
{
|
||||||
days = days - 1;
|
days -= 1;
|
||||||
}
|
}
|
||||||
let mut month: u8 = (days / 28).try_into().unwrap();
|
let mut month: u8 = (days / 28).try_into().unwrap();
|
||||||
let mut day: u8 = (days % 28).try_into().unwrap();
|
let mut day: u8 = (days % 28).try_into().unwrap();
|
||||||
if day == 0 {
|
if day == 0 {
|
||||||
month = month - 1;
|
month -= 1;
|
||||||
day = 28;
|
day = 28;
|
||||||
}
|
}
|
||||||
Self::Day(Day {
|
Self::Day(Day {
|
||||||
|
|
|
@ -120,7 +120,7 @@ impl CoreApp {
|
||||||
app_state.game = Some(GameState {
|
app_state.game = Some(GameState {
|
||||||
white_player,
|
white_player,
|
||||||
black_player,
|
black_player,
|
||||||
..GameState::new()
|
..GameState::default()
|
||||||
});
|
});
|
||||||
let game_state = app_state.game.as_ref().unwrap();
|
let game_state = app_state.game.as_ref().unwrap();
|
||||||
CoreResponse::PlayingFieldView(playing_field(game_state))
|
CoreResponse::PlayingFieldView(playing_field(game_state))
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::{BoardError, Color, Size};
|
use crate::{BoardError, Color, Size};
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug, Default)]
|
||||||
pub struct Board {
|
pub struct Board {
|
||||||
pub size: Size,
|
pub size: Size,
|
||||||
pub groups: Vec<Group>,
|
pub groups: Vec<Group>,
|
||||||
|
@ -14,7 +14,7 @@ impl std::fmt::Display for Board {
|
||||||
for c in 0..19 {
|
for c in 0..19 {
|
||||||
write!(f, "{:2}", c)?;
|
write!(f, "{:2}", c)?;
|
||||||
}
|
}
|
||||||
writeln!(f, "")?;
|
writeln!(f)?;
|
||||||
|
|
||||||
for row in 0..self.size.height {
|
for row in 0..self.size.height {
|
||||||
write!(f, " {:2}", row)?;
|
write!(f, " {:2}", row)?;
|
||||||
|
@ -25,7 +25,7 @@ impl std::fmt::Display for Board {
|
||||||
Some(Color::White) => write!(f, " O")?,
|
Some(Color::White) => write!(f, " O")?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
writeln!(f, "")?;
|
writeln!(f)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -38,16 +38,16 @@ impl PartialEq for Board {
|
||||||
}
|
}
|
||||||
|
|
||||||
for group in self.groups.iter() {
|
for group in self.groups.iter() {
|
||||||
if !other.groups.contains(&group) {
|
if !other.groups.contains(group) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for group in other.groups.iter() {
|
for group in other.groups.iter() {
|
||||||
if !self.groups.contains(&group) {
|
if !self.groups.contains(group) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,7 +79,7 @@ pub struct Coordinate {
|
||||||
|
|
||||||
impl Board {
|
impl Board {
|
||||||
pub fn place_stone(mut self, coordinate: Coordinate, color: Color) -> Result<Self, BoardError> {
|
pub fn place_stone(mut self, coordinate: Coordinate, color: Color) -> Result<Self, BoardError> {
|
||||||
if let Some(_) = self.stone(&coordinate) {
|
if self.stone(&coordinate).is_some() {
|
||||||
return Err(BoardError::InvalidPosition);
|
return Err(BoardError::InvalidPosition);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@ impl Board {
|
||||||
acc.union(&set).cloned().collect()
|
acc.union(&set).cloned().collect()
|
||||||
});
|
});
|
||||||
|
|
||||||
friendly_group.insert(coordinate.clone());
|
friendly_group.insert(coordinate);
|
||||||
|
|
||||||
self.groups
|
self.groups
|
||||||
.retain(|g| g.coordinates.is_disjoint(&friendly_group));
|
.retain(|g| g.coordinates.is_disjoint(&friendly_group));
|
||||||
|
@ -138,7 +138,7 @@ impl Board {
|
||||||
let mut grps: Vec<Group> = Vec::new();
|
let mut grps: Vec<Group> = Vec::new();
|
||||||
|
|
||||||
adjacent_spaces.for_each(|coord| match self.group(&coord) {
|
adjacent_spaces.for_each(|coord| match self.group(&coord) {
|
||||||
None => return,
|
None => {}
|
||||||
Some(adj) => {
|
Some(adj) => {
|
||||||
if group.color == adj.color {
|
if group.color == adj.color {
|
||||||
return;
|
return;
|
||||||
|
@ -157,15 +157,14 @@ impl Board {
|
||||||
group
|
group
|
||||||
.coordinates
|
.coordinates
|
||||||
.iter()
|
.iter()
|
||||||
.map(|c| self.adjacencies(c))
|
.flat_map(|c| self.adjacencies(c))
|
||||||
.flatten()
|
|
||||||
.collect::<HashSet<Coordinate>>()
|
.collect::<HashSet<Coordinate>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn liberties(&self, group: &Group) -> usize {
|
pub fn liberties(&self, group: &Group) -> usize {
|
||||||
self.group_halo(group)
|
self.group_halo(group)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|c| self.stone(&c) == None)
|
.filter(|c| self.stone(c).is_none())
|
||||||
.count()
|
.count()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
use std::{ffi::OsStr, io::Read, os::unix::ffi::OsStrExt, path::PathBuf};
|
use std::{io::Read, path::PathBuf};
|
||||||
|
|
||||||
use sgf::{go, parse_sgf, Game};
|
use sgf::{go, parse_sgf, Game};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
|
/*
|
||||||
#[error("Database permission denied")]
|
#[error("Database permission denied")]
|
||||||
PermissionDenied,
|
PermissionDenied,
|
||||||
|
*/
|
||||||
#[error("An IO error occurred: {0}")]
|
#[error("An IO error occurred: {0}")]
|
||||||
IOError(std::io::Error),
|
IOError(std::io::Error),
|
||||||
}
|
}
|
||||||
|
@ -19,7 +21,6 @@ impl From<std::io::Error> for Error {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Database {
|
pub struct Database {
|
||||||
path: PathBuf,
|
|
||||||
games: Vec<go::Game>,
|
games: Vec<go::Game>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -56,11 +57,7 @@ impl Database {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Database { path, games })
|
Ok(Database { games })
|
||||||
}
|
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
self.games.len()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn all_games(&self) -> impl Iterator<Item = &go::Game> {
|
pub fn all_games(&self) -> impl Iterator<Item = &go::Game> {
|
||||||
|
@ -86,14 +83,13 @@ mod test {
|
||||||
let db =
|
let db =
|
||||||
Database::open_path(PathBuf::from("fixtures/five_games/")).expect("database to open");
|
Database::open_path(PathBuf::from("fixtures/five_games/")).expect("database to open");
|
||||||
assert_eq!(db.all_games().count(), 5);
|
assert_eq!(db.all_games().count(), 5);
|
||||||
for game in db.all_games() {}
|
|
||||||
|
|
||||||
assert_matches!(db.all_games().find(|g| g.info.black_player == Some("Steve".to_owned())),
|
assert_matches!(db.all_games().find(|g| g.info.black_player == Some("Steve".to_owned())),
|
||||||
Some(game) => {
|
Some(game) => {
|
||||||
assert_eq!(game.info.black_player, Some("Steve".to_owned()));
|
assert_eq!(game.info.black_player, Some("Steve".to_owned()));
|
||||||
assert_eq!(game.info.white_player, Some("Savanni".to_owned()));
|
assert_eq!(game.info.white_player, Some("Savanni".to_owned()));
|
||||||
assert_eq!(game.info.date, vec![Date::Date(chrono::NaiveDate::from_ymd_opt(2023, 4, 19).unwrap())]);
|
assert_eq!(game.info.date, vec![Date::Date(chrono::NaiveDate::from_ymd_opt(2023, 4, 19).unwrap())]);
|
||||||
assert_eq!(game.info.komi, Some(6.5));
|
// assert_eq!(game.info.komi, Some(6.5));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
#[macro_use]
|
|
||||||
extern crate config_derive;
|
extern crate config_derive;
|
||||||
|
|
||||||
mod api;
|
mod api;
|
||||||
|
@ -10,11 +9,6 @@ pub use api::{
|
||||||
mod board;
|
mod board;
|
||||||
pub use board::*;
|
pub use board::*;
|
||||||
|
|
||||||
/*
|
|
||||||
mod config;
|
|
||||||
pub use config::*;
|
|
||||||
*/
|
|
||||||
|
|
||||||
mod database;
|
mod database;
|
||||||
|
|
||||||
mod types;
|
mod types;
|
||||||
|
|
|
@ -77,21 +77,18 @@ pub struct AppState {
|
||||||
impl AppState {
|
impl AppState {
|
||||||
pub fn new(database_path: DatabasePath) -> Self {
|
pub fn new(database_path: DatabasePath) -> Self {
|
||||||
Self {
|
Self {
|
||||||
game: Some(GameState::new()),
|
game: Some(GameState::default()),
|
||||||
database: Database::open_path(database_path.to_path_buf()).unwrap(),
|
database: Database::open_path(database_path.to_path_buf()).unwrap(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn place_stone(&mut self, req: PlayStoneRequest) {
|
pub fn place_stone(&mut self, req: PlayStoneRequest) {
|
||||||
match self.game {
|
if let Some(ref mut game) = self.game {
|
||||||
Some(ref mut game) => {
|
|
||||||
let _ = game.place_stone(Coordinate {
|
let _ = game.place_stone(Coordinate {
|
||||||
column: req.column,
|
column: req.column,
|
||||||
row: req.row,
|
row: req.row,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
None => {}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -142,9 +139,9 @@ pub struct GameState {
|
||||||
pub black_clock: Duration,
|
pub black_clock: Duration,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GameState {
|
impl Default for GameState {
|
||||||
pub fn new() -> GameState {
|
fn default() -> Self {
|
||||||
GameState {
|
Self {
|
||||||
board: Board::new(),
|
board: Board::new(),
|
||||||
past_positions: vec![],
|
past_positions: vec![],
|
||||||
conversation: vec![],
|
conversation: vec![],
|
||||||
|
@ -161,7 +158,9 @@ impl GameState {
|
||||||
black_clock: Duration::from_secs(600),
|
black_clock: Duration::from_secs(600),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GameState {
|
||||||
fn place_stone(&mut self, coordinate: Coordinate) -> Result<(), BoardError> {
|
fn place_stone(&mut self, coordinate: Coordinate) -> Result<(), BoardError> {
|
||||||
let board = self.board.clone();
|
let board = self.board.clone();
|
||||||
let new_board = board.place_stone(coordinate, self.current_player)?;
|
let new_board = board.place_stone(coordinate, self.current_player)?;
|
||||||
|
@ -186,7 +185,7 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn current_player_changes_after_move() {
|
fn current_player_changes_after_move() {
|
||||||
let mut state = GameState::new();
|
let mut state = GameState::default();
|
||||||
assert_eq!(state.current_player, Color::Black);
|
assert_eq!(state.current_player, Color::Black);
|
||||||
state.place_stone(Coordinate { column: 9, row: 9 }).unwrap();
|
state.place_stone(Coordinate { column: 9, row: 9 }).unwrap();
|
||||||
assert_eq!(state.current_player, Color::White);
|
assert_eq!(state.current_player, Color::White);
|
||||||
|
@ -194,7 +193,7 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn current_player_remains_the_same_after_self_capture() {
|
fn current_player_remains_the_same_after_self_capture() {
|
||||||
let mut state = GameState::new();
|
let mut state = GameState::default();
|
||||||
state.board = Board::from_coordinates(
|
state.board = Board::from_coordinates(
|
||||||
vec![
|
vec![
|
||||||
(Coordinate { column: 17, row: 0 }, Color::White),
|
(Coordinate { column: 17, row: 0 }, Color::White),
|
||||||
|
@ -215,7 +214,7 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn ko_rules_are_enforced() {
|
fn ko_rules_are_enforced() {
|
||||||
let mut state = GameState::new();
|
let mut state = GameState::default();
|
||||||
state.board = Board::from_coordinates(
|
state.board = Board::from_coordinates(
|
||||||
vec![
|
vec![
|
||||||
(Coordinate { column: 7, row: 9 }, Color::White),
|
(Coordinate { column: 7, row: 9 }, Color::White),
|
||||||
|
|
|
@ -76,7 +76,7 @@ impl BoardElement {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn addr(&self, column: u8, row: u8) -> usize {
|
fn addr(&self, column: u8, row: u8) -> usize {
|
||||||
((row as usize) * (self.size.width as usize) + (column as usize)) as usize
|
(row as usize) * (self.size.width as usize) + (column as usize)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,5 +28,5 @@ where
|
||||||
let result = f();
|
let result = f();
|
||||||
let end = std::time::Instant::now();
|
let end = std::time::Instant::now();
|
||||||
println!("[Trace: {}] {:?}", trace_name, end - start);
|
println!("[Trace: {}] {:?}", trace_name, end - start);
|
||||||
return result;
|
result
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,8 +34,8 @@ fn handle_response(api: CoreApi, app_window: &AppWindow, message: CoreResponse)
|
||||||
app_window.set_content(&field);
|
app_window.set_content(&field);
|
||||||
*playing_field = Some(field);
|
*playing_field = Some(field);
|
||||||
})
|
})
|
||||||
} else {
|
} else if let Some(field) = playing_field.as_ref() {
|
||||||
playing_field.as_ref().map(|field| field.update_view(view));
|
field.update_view(view)
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
CoreResponse::UpdatedConfigurationView(view) => perftrace("UpdatedConfiguration", || {
|
CoreResponse::UpdatedConfigurationView(view) => perftrace("UpdatedConfiguration", || {
|
||||||
|
@ -56,13 +56,13 @@ fn main() {
|
||||||
);
|
);
|
||||||
|
|
||||||
let config_path = std::env::var("CONFIG")
|
let config_path = std::env::var("CONFIG")
|
||||||
.and_then(|config| Ok(std::path::PathBuf::from(config)))
|
.map(std::path::PathBuf::from)
|
||||||
.or({
|
.or({
|
||||||
std::env::var("HOME").and_then(|base| {
|
std::env::var("HOME").map(|base| {
|
||||||
let mut config_path = std::path::PathBuf::from(base);
|
let mut config_path = std::path::PathBuf::from(base);
|
||||||
config_path.push(".config");
|
config_path.push(".config");
|
||||||
config_path.push("kifu");
|
config_path.push("kifu");
|
||||||
Ok(config_path)
|
config_path
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.expect("no config path could be found");
|
.expect("no config path could be found");
|
||||||
|
@ -87,7 +87,7 @@ fn main() {
|
||||||
let (gtk_tx, gtk_rx) =
|
let (gtk_tx, gtk_rx) =
|
||||||
gtk::glib::MainContext::channel::<CoreResponse>(gtk::glib::PRIORITY_DEFAULT);
|
gtk::glib::MainContext::channel::<CoreResponse>(gtk::glib::PRIORITY_DEFAULT);
|
||||||
|
|
||||||
let app_window = AppWindow::new(&app);
|
let app_window = AppWindow::new(app);
|
||||||
|
|
||||||
let api = CoreApi {
|
let api = CoreApi {
|
||||||
gtk_tx,
|
gtk_tx,
|
||||||
|
@ -123,5 +123,5 @@ fn main() {
|
||||||
println!("running the gtk loop");
|
println!("running the gtk loop");
|
||||||
app.run();
|
app.run();
|
||||||
|
|
||||||
let _ = runtime.block_on(async { core_handle.await });
|
let _ = runtime.block_on(core_handle);
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,7 +89,7 @@ impl ObjectImpl for BoardPrivate {
|
||||||
|
|
||||||
match background {
|
match background {
|
||||||
Ok(Some(ref background)) => {
|
Ok(Some(ref background)) => {
|
||||||
context.set_source_pixbuf(&background, 0., 0.);
|
context.set_source_pixbuf(background, 0., 0.);
|
||||||
context.paint().expect("paint should succeed");
|
context.paint().expect("paint should succeed");
|
||||||
}
|
}
|
||||||
Ok(None) | Err(_) => context.set_source_rgb(0.7, 0.7, 0.7),
|
Ok(None) | Err(_) => context.set_source_rgb(0.7, 0.7, 0.7),
|
||||||
|
@ -140,11 +140,8 @@ impl ObjectImpl for BoardPrivate {
|
||||||
|
|
||||||
(0..19).for_each(|col| {
|
(0..19).for_each(|col| {
|
||||||
(0..19).for_each(|row| {
|
(0..19).for_each(|row| {
|
||||||
match board.stone(row, col) {
|
if let IntersectionElement::Filled(stone) = board.stone(row, col) {
|
||||||
IntersectionElement::Filled(stone) => {
|
pen.stone(context, row, col, stone.color, stone.liberties);
|
||||||
pen.stone(&context, row, col, stone.color, stone.liberties);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
@ -152,15 +149,18 @@ impl ObjectImpl for BoardPrivate {
|
||||||
let cursor = cursor_location.borrow();
|
let cursor = cursor_location.borrow();
|
||||||
match *cursor {
|
match *cursor {
|
||||||
None => {}
|
None => {}
|
||||||
Some(ref cursor) => match board.stone(cursor.row, cursor.column) {
|
Some(ref cursor) => {
|
||||||
IntersectionElement::Empty(_) => pen.ghost_stone(
|
if let IntersectionElement::Empty(_) =
|
||||||
|
board.stone(cursor.row, cursor.column)
|
||||||
|
{
|
||||||
|
pen.ghost_stone(
|
||||||
context,
|
context,
|
||||||
cursor.row,
|
cursor.row,
|
||||||
cursor.column,
|
cursor.column,
|
||||||
*current_player.borrow(),
|
*current_player.borrow(),
|
||||||
),
|
)
|
||||||
_ => {}
|
}
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
let render_end = std::time::Instant::now();
|
let render_end = std::time::Instant::now();
|
||||||
println!("board rendering time: {:?}", render_end - render_start);
|
println!("board rendering time: {:?}", render_end - render_start);
|
||||||
|
@ -208,16 +208,17 @@ impl ObjectImpl for BoardPrivate {
|
||||||
let cursor = cursor.borrow();
|
let cursor = cursor.borrow();
|
||||||
match *cursor {
|
match *cursor {
|
||||||
None => {}
|
None => {}
|
||||||
Some(ref cursor) => match board.stone(cursor.row, cursor.column) {
|
Some(ref cursor) => {
|
||||||
IntersectionElement::Empty(request) => {
|
if let IntersectionElement::Empty(request) =
|
||||||
|
board.stone(cursor.row, cursor.column)
|
||||||
|
{
|
||||||
println!("need to send request: {:?}", request);
|
println!("need to send request: {:?}", request);
|
||||||
api.borrow()
|
api.borrow()
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.expect("API must exist")
|
.expect("API must exist")
|
||||||
.dispatch(request);
|
.dispatch(request);
|
||||||
}
|
}
|
||||||
_ => {}
|
}
|
||||||
},
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,7 @@ impl Chat {
|
||||||
element.messages.into_iter().for_each(|msg| {
|
element.messages.into_iter().for_each(|msg| {
|
||||||
s.imp().chat_history.append(
|
s.imp().chat_history.append(
|
||||||
>k::Label::builder()
|
>k::Label::builder()
|
||||||
.label(&msg)
|
.label(msg)
|
||||||
.halign(gtk::Align::Start)
|
.halign(gtk::Align::Start)
|
||||||
.build(),
|
.build(),
|
||||||
)
|
)
|
||||||
|
|
|
@ -26,8 +26,8 @@ glib::wrapper! {
|
||||||
pub struct GamePreview(ObjectSubclass<GamePreviewPrivate>) @extends gtk::Box, gtk::Widget, @implements gtk::Orientable;
|
pub struct GamePreview(ObjectSubclass<GamePreviewPrivate>) @extends gtk::Box, gtk::Widget, @implements gtk::Orientable;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GamePreview {
|
impl Default for GamePreview {
|
||||||
pub fn new() -> GamePreview {
|
fn default() -> Self {
|
||||||
let s: Self = Object::builder().build();
|
let s: Self = Object::builder().build();
|
||||||
s.set_orientation(gtk::Orientation::Horizontal);
|
s.set_orientation(gtk::Orientation::Horizontal);
|
||||||
s.set_homogeneous(true);
|
s.set_homogeneous(true);
|
||||||
|
@ -41,7 +41,9 @@ impl GamePreview {
|
||||||
|
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GamePreview {
|
||||||
pub fn set_game(&self, element: GamePreviewElement) {
|
pub fn set_game(&self, element: GamePreviewElement) {
|
||||||
self.imp().black_player.set_text(&element.black_player);
|
self.imp().black_player.set_text(&element.black_player);
|
||||||
self.imp().white_player.set_text(&element.white_player);
|
self.imp().white_player.set_text(&element.white_player);
|
||||||
|
|
|
@ -137,7 +137,7 @@ impl Home {
|
||||||
.build();
|
.build();
|
||||||
s.append(&new_game_button);
|
s.append(&new_game_button);
|
||||||
|
|
||||||
let library = Library::new();
|
let library = Library::default();
|
||||||
let library_view = gtk::ScrolledWindow::builder()
|
let library_view = gtk::ScrolledWindow::builder()
|
||||||
.hscrollbar_policy(gtk::PolicyType::Never)
|
.hscrollbar_policy(gtk::PolicyType::Never)
|
||||||
.min_content_width(360)
|
.min_content_width(360)
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use crate::ui::GamePreview;
|
|
||||||
use adw::{prelude::*, subclass::prelude::*};
|
use adw::{prelude::*, subclass::prelude::*};
|
||||||
use glib::Object;
|
use glib::Object;
|
||||||
use gtk::{glib, prelude::*, subclass::prelude::*};
|
use gtk::glib;
|
||||||
use kifu_core::ui::GamePreviewElement;
|
use kifu_core::ui::GamePreviewElement;
|
||||||
use std::{cell::RefCell, rc::Rc};
|
use std::{cell::RefCell, rc::Rc};
|
||||||
|
|
||||||
|
@ -92,7 +91,7 @@ impl Default for LibraryPrivate {
|
||||||
.set_child(Some(
|
.set_child(Some(
|
||||||
>k::Label::builder()
|
>k::Label::builder()
|
||||||
.halign(gtk::Align::Start)
|
.halign(gtk::Align::Start)
|
||||||
.ellipsize(pango::EllipsizeMode::End)
|
.ellipsize(gtk::pango::EllipsizeMode::End)
|
||||||
.build(),
|
.build(),
|
||||||
))
|
))
|
||||||
});
|
});
|
||||||
|
@ -100,10 +99,9 @@ impl Default for LibraryPrivate {
|
||||||
let list_item = list_item.downcast_ref::<gtk::ListItem>().unwrap();
|
let list_item = list_item.downcast_ref::<gtk::ListItem>().unwrap();
|
||||||
let game = list_item.item().and_downcast::<GameObject>().unwrap();
|
let game = list_item.item().and_downcast::<GameObject>().unwrap();
|
||||||
let preview = list_item.child().and_downcast::<gtk::Label>().unwrap();
|
let preview = list_item.child().and_downcast::<gtk::Label>().unwrap();
|
||||||
match game.game() {
|
if let Some(game) = game.game() {
|
||||||
Some(game) => preview.set_text(&bind(game)),
|
preview.set_text(&bind(game))
|
||||||
None => (),
|
}
|
||||||
};
|
|
||||||
});
|
});
|
||||||
factory
|
factory
|
||||||
}
|
}
|
||||||
|
@ -148,18 +146,20 @@ glib::wrapper! {
|
||||||
pub struct Library(ObjectSubclass<LibraryPrivate>) @extends adw::Bin, gtk::Widget;
|
pub struct Library(ObjectSubclass<LibraryPrivate>) @extends adw::Bin, gtk::Widget;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Library {
|
impl Default for Library {
|
||||||
pub fn new() -> Self {
|
fn default() -> Self {
|
||||||
let s: Self = Object::builder().build();
|
let s: Self = Object::builder().build();
|
||||||
|
|
||||||
s.set_child(Some(&s.imp().list_view));
|
s.set_child(Some(&s.imp().list_view));
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Library {
|
||||||
pub fn set_games(&self, games: Vec<GamePreviewElement>) {
|
pub fn set_games(&self, games: Vec<GamePreviewElement>) {
|
||||||
let games = games
|
let games = games
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|g| GameObject::new(g))
|
.map(GameObject::new)
|
||||||
.collect::<Vec<GameObject>>();
|
.collect::<Vec<GameObject>>();
|
||||||
self.imp().model.extend_from_slice(&games);
|
self.imp().model.extend_from_slice(&games);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use adw::prelude::*;
|
use adw::prelude::*;
|
||||||
use gio::resources_lookup_data;
|
use gio::resources_lookup_data;
|
||||||
use glib::IsA;
|
use glib::IsA;
|
||||||
use gtk::{prelude::*, STYLE_PROVIDER_PRIORITY_USER};
|
use gtk::STYLE_PROVIDER_PRIORITY_USER;
|
||||||
|
|
||||||
mod chat;
|
mod chat;
|
||||||
pub use chat::Chat;
|
pub use chat::Chat;
|
||||||
|
|
|
@ -50,11 +50,9 @@ impl PlayingField {
|
||||||
let chat = Chat::new(view.chat);
|
let chat = Chat::new(view.chat);
|
||||||
|
|
||||||
*s.imp().board.borrow_mut() = Some(Board::new(api));
|
*s.imp().board.borrow_mut() = Some(Board::new(api));
|
||||||
s.imp()
|
if let Some(board) = s.imp().board.borrow().as_ref() {
|
||||||
.board
|
s.attach(board, 1, 1, 1, 2)
|
||||||
.borrow()
|
}
|
||||||
.as_ref()
|
|
||||||
.map(|board| s.attach(board, 1, 1, 1, 2));
|
|
||||||
s.attach(&player_card_black, 2, 1, 1, 1);
|
s.attach(&player_card_black, 2, 1, 1, 1);
|
||||||
s.attach(&player_card_white, 3, 1, 1, 1);
|
s.attach(&player_card_white, 3, 1, 1, 1);
|
||||||
s.attach(&chat, 2, 2, 2, 1);
|
s.attach(&chat, 2, 2, 2, 1);
|
||||||
|
@ -63,20 +61,20 @@ impl PlayingField {
|
||||||
*s.imp().player_card_black.borrow_mut() = Some(player_card_black);
|
*s.imp().player_card_black.borrow_mut() = Some(player_card_black);
|
||||||
*s.imp().chat.borrow_mut() = Some(chat);
|
*s.imp().chat.borrow_mut() = Some(chat);
|
||||||
|
|
||||||
s.imp().board.borrow().as_ref().map(|board| {
|
if let Some(board) = s.imp().board.borrow().as_ref() {
|
||||||
board.set_board(view.board);
|
board.set_board(view.board);
|
||||||
board.set_current_player(view.current_player);
|
board.set_current_player(view.current_player);
|
||||||
});
|
};
|
||||||
|
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_view(&self, view: PlayingFieldView) {
|
pub fn update_view(&self, view: PlayingFieldView) {
|
||||||
perftrace("update_view", || {
|
perftrace("update_view", || {
|
||||||
self.imp().board.borrow().as_ref().map(|board| {
|
if let Some(board) = self.imp().board.borrow().as_ref() {
|
||||||
board.set_board(view.board);
|
board.set_board(view.board);
|
||||||
board.set_current_player(view.current_player);
|
board.set_current_player(view.current_player);
|
||||||
});
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,11 +13,13 @@ pub struct MemoryCacheRecord<T> {
|
||||||
|
|
||||||
pub struct MemoryCache<T>(Arc<RwLock<HashMap<String, MemoryCacheRecord<T>>>>);
|
pub struct MemoryCache<T>(Arc<RwLock<HashMap<String, MemoryCacheRecord<T>>>>);
|
||||||
|
|
||||||
impl<T: Clone> MemoryCache<T> {
|
impl<T: Clone> Default for MemoryCache<T> {
|
||||||
pub fn new() -> MemoryCache<T> {
|
fn default() -> Self {
|
||||||
MemoryCache(Arc::new(RwLock::new(HashMap::new())))
|
Self(Arc::new(RwLock::new(HashMap::new())))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Clone> MemoryCache<T> {
|
||||||
pub async fn find(&self, key: &str, f: impl Future<Output = (DateTime<Utc>, T)>) -> T {
|
pub async fn find(&self, key: &str, f: impl Future<Output = (DateTime<Utc>, T)>) -> T {
|
||||||
let val = {
|
let val = {
|
||||||
let cache = self.0.read().unwrap();
|
let cache = self.0.read().unwrap();
|
||||||
|
@ -53,7 +55,7 @@ mod tests {
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn it_runs_the_requestor_when_the_value_does_not_exist() {
|
async fn it_runs_the_requestor_when_the_value_does_not_exist() {
|
||||||
let cache = MemoryCache::new();
|
let cache = MemoryCache::default();
|
||||||
let value = cache
|
let value = cache
|
||||||
.find("my_key", async { (Utc::now(), Value(15)) })
|
.find("my_key", async { (Utc::now(), Value(15)) })
|
||||||
.await;
|
.await;
|
||||||
|
@ -63,7 +65,7 @@ mod tests {
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn it_runs_the_requestor_when_the_value_is_old() {
|
async fn it_runs_the_requestor_when_the_value_is_old() {
|
||||||
let run = Arc::new(RwLock::new(false));
|
let run = Arc::new(RwLock::new(false));
|
||||||
let cache = MemoryCache::new();
|
let cache = MemoryCache::default();
|
||||||
let _ = cache
|
let _ = cache
|
||||||
.find("my_key", async {
|
.find("my_key", async {
|
||||||
(Utc::now() - Duration::seconds(10), Value(15))
|
(Utc::now() - Duration::seconds(10), Value(15))
|
||||||
|
@ -82,7 +84,7 @@ mod tests {
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn it_returns_the_cached_value_when_the_value_is_new() {
|
async fn it_returns_the_cached_value_when_the_value_is_new() {
|
||||||
let run = Arc::new(RwLock::new(false));
|
let run = Arc::new(RwLock::new(false));
|
||||||
let cache = MemoryCache::new();
|
let cache = MemoryCache::default();
|
||||||
let _ = cache
|
let _ = cache
|
||||||
.find("my_key", async {
|
.find("my_key", async {
|
||||||
(Utc::now() + Duration::seconds(10), Value(15))
|
(Utc::now() + Duration::seconds(10), Value(15))
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
[package]
|
||||||
|
name = "nom-training"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nom = { version = "7" }
|
||||||
|
cool_asserts = { version = "*" }
|
|
@ -0,0 +1,122 @@
|
||||||
|
// Write two separate parser functions
|
||||||
|
// One function returns `impl Parser<>`
|
||||||
|
// The other function returns `FnMut(I) -> IResult<I, ...`
|
||||||
|
// Test each with the `map` function and the `parse` function
|
||||||
|
use nom::{character::complete::digit1, error::ParseError, IResult, Parser};
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||||
|
struct Container(i32);
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn parse_container_a<'a, E: ParseError<&'a str>>(
|
||||||
|
mut parser: impl Parser<&'a str, i32, E>,
|
||||||
|
) -> impl FnMut(&'a str) -> IResult<&'a str, Container, E> {
|
||||||
|
move |input| {
|
||||||
|
let (input, value) = parser.parse(input)?;
|
||||||
|
Ok((input, Container(value)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
// This form doesn't work. It is not possible in this case to get the ownership
|
||||||
|
// declarations correct on parser. The reason I would want to do this is for more
|
||||||
|
// concise representation of parse_container_a. It probably fails because map consumes
|
||||||
|
// the parser.
|
||||||
|
fn parse_container_b<'a, E: ParseError<&'a str>, P>(
|
||||||
|
mut parser: P,
|
||||||
|
) -> impl Parser<&'a str, Container, E>
|
||||||
|
where
|
||||||
|
P: Parser<&'a str, i32, E>,
|
||||||
|
{
|
||||||
|
move |input| parser.map(|val| Container(val)).parse(input)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn parse_container_c<'a, E: ParseError<&'a str>>(
|
||||||
|
parser: impl Parser<&'a str, i32, E>,
|
||||||
|
) -> impl Parser<&'a str, Container, E> {
|
||||||
|
parser.map(Container)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
// This form also doesn't work, for the same reason as parse_container_b doesn't work.
|
||||||
|
fn parse_container_d<'a, E: ParseError<&'a str>>(
|
||||||
|
parser: impl Parser<&'a str, i32, E>,
|
||||||
|
) -> impl FnMut(&'a str) -> IResult<&'a str, Container, E> {
|
||||||
|
|input| parser.map(|val| Container(val)).parse(input)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
// If I really want to do forms b and d, this works. I do the parser combination before
|
||||||
|
// creating the resulting function.
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn parse_container_e<'a, E: ParseError<&'a str>>(
|
||||||
|
parser: impl Parser<&'a str, i32, E>,
|
||||||
|
) -> impl Parser<&'a str, Container, E> {
|
||||||
|
let mut parser = parser.map(Container);
|
||||||
|
|
||||||
|
move |input| parser.parse(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn parse_number_a<'a, E: ParseError<&'a str>>() -> impl FnMut(&'a str) -> IResult<&'a str, i32, E> {
|
||||||
|
parse_number
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn parse_number_b<'a, E: ParseError<&'a str>>() -> impl Parser<&'a str, i32, E> {
|
||||||
|
parse_number
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn parse_number<'a, E: ParseError<&'a str>>(input: &'a str) -> IResult<&'a str, i32, E> {
|
||||||
|
let (input, val) = digit1(input)?;
|
||||||
|
Ok((input, val.parse::<i32>().unwrap()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use cool_asserts::assert_matches;
|
||||||
|
|
||||||
|
const DATA: &'static str = "15";
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn function() {
|
||||||
|
let resp = parse_number_a::<nom::error::VerboseError<&str>>()
|
||||||
|
.map(|val| Container(val))
|
||||||
|
.parse(DATA);
|
||||||
|
assert_matches!(resp, Ok((_, content)) =>
|
||||||
|
assert_eq!(content, Container(15))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parser() {
|
||||||
|
let resp = parse_number_b::<nom::error::VerboseError<&str>>()
|
||||||
|
.map(|val| Container(val))
|
||||||
|
.parse(DATA);
|
||||||
|
assert_matches!(resp, Ok((_, content)) =>
|
||||||
|
assert_eq!(content, Container(15))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parser_composition_a() {
|
||||||
|
let resp =
|
||||||
|
parse_container_a::<nom::error::VerboseError<&str>>(parse_number_a()).parse(DATA);
|
||||||
|
assert_matches!(resp, Ok((_, content)) =>
|
||||||
|
assert_eq!(content, Container(15))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parser_composition_c() {
|
||||||
|
let resp =
|
||||||
|
parse_container_c::<nom::error::VerboseError<&str>>(parse_number_b()).parse(DATA);
|
||||||
|
assert_matches!(resp, Ok((_, content)) =>
|
||||||
|
assert_eq!(content, Container(15))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,74 @@
|
||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
In the interest of fostering an open and welcoming environment, we as
|
||||||
|
contributors and maintainers pledge to making participation in our project and
|
||||||
|
our community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||||
|
education, socio-economic status, nationality, personal appearance, race,
|
||||||
|
religion, or sexual identity and orientation.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to creating a positive environment
|
||||||
|
include:
|
||||||
|
|
||||||
|
* Using welcoming and inclusive language
|
||||||
|
* Being respectful of differing viewpoints and experiences
|
||||||
|
* Gracefully accepting constructive criticism
|
||||||
|
* Focusing on what is best for the community
|
||||||
|
* Showing empathy towards other community members
|
||||||
|
|
||||||
|
Examples of unacceptable behavior by participants include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||||
|
advances
|
||||||
|
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or electronic
|
||||||
|
address, without explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Our Responsibilities
|
||||||
|
|
||||||
|
Project maintainers are responsible for clarifying the standards of acceptable
|
||||||
|
behavior and are expected to take appropriate and fair corrective action in
|
||||||
|
response to any instances of unacceptable behavior.
|
||||||
|
|
||||||
|
Project maintainers have the right and responsibility to remove, edit, or
|
||||||
|
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||||
|
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||||
|
permanently any contributor for other behaviors that they deem inappropriate,
|
||||||
|
threatening, offensive, or harmful.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies both within project spaces and in public spaces
|
||||||
|
when an individual is representing the project or its community. Examples of
|
||||||
|
representing a project or community include using an official project e-mail
|
||||||
|
address, posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event. Representation of a project may be
|
||||||
|
further defined and clarified by project maintainers.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported by contacting the project team at savanni@luminescent-dreams.com. All
|
||||||
|
complaints will be reviewed and investigated and will result in a response that
|
||||||
|
is deemed necessary and appropriate to the circumstances. The project team is
|
||||||
|
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||||
|
Further details of specific enforcement policies may be posted separately.
|
||||||
|
|
||||||
|
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||||
|
faith may face temporary or permanent repercussions as determined by other
|
||||||
|
members of the project's leadership.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||||
|
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
* [Savanni D'Gerinel](http://github.com/savannidgerinel)
|
||||||
|
* [Daria Phoebe Brasea](http://github.com/dariaphoebe)
|
||||||
|
* [Aria Stewart](http://github.com/aredridel)
|
||||||
|
|
|
@ -0,0 +1,446 @@
|
||||||
|
# This file is automatically @generated by Cargo.
|
||||||
|
# It is not intended for manual editing.
|
||||||
|
version = 3
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ansi_term"
|
||||||
|
version = "0.11.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
|
||||||
|
dependencies = [
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "atty"
|
||||||
|
version = "0.2.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2fc4a1aa4c24c0718a250f0681885c1af91419d242f29eb8f2ab28502d80dbd1"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"termion",
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "base64"
|
||||||
|
version = "0.9.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "85415d2594767338a74a30c1d370b2f3262ec1b4ed2d7bba5b3faf4de40467d9"
|
||||||
|
dependencies = [
|
||||||
|
"byteorder",
|
||||||
|
"safemem",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bitflags"
|
||||||
|
version = "1.0.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d0c54bb8f454c567f21197eefcdbf5679d0bd99f2ddbe52e84c77061952e6789"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "byteorder"
|
||||||
|
version = "1.2.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "74c0b906e9446b0a2e4f760cdb3fa4b2c48cdc6db8766a845c54b6ff063fd2e9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cc"
|
||||||
|
version = "1.0.31"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c9ce8bb087aacff865633f0bd5aeaed910fe2fe55b55f4739527f2e023a2e53d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cfg-if"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "chrono"
|
||||||
|
version = "0.4.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6962c635d530328acc53ac6a955e83093fedc91c5809dfac1fa60fa470830a37"
|
||||||
|
dependencies = [
|
||||||
|
"num-integer",
|
||||||
|
"num-traits",
|
||||||
|
"serde",
|
||||||
|
"time",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap"
|
||||||
|
version = "2.33.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
|
||||||
|
dependencies = [
|
||||||
|
"ansi_term",
|
||||||
|
"atty",
|
||||||
|
"bitflags",
|
||||||
|
"strsim",
|
||||||
|
"textwrap",
|
||||||
|
"unicode-width",
|
||||||
|
"vec_map",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "dtoa"
|
||||||
|
version = "0.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "either"
|
||||||
|
version = "1.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "getrandom"
|
||||||
|
version = "0.2.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"libc",
|
||||||
|
"wasi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itertools"
|
||||||
|
version = "0.10.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf"
|
||||||
|
dependencies = [
|
||||||
|
"either",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itoa"
|
||||||
|
version = "0.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c069bbec61e1ca5a596166e55dfe4773ff745c3d16b700013bcaff9a6df2c682"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jsonwebtoken"
|
||||||
|
version = "5.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8d438ea707d465c230305963b67f8357a1d56fcfad9434797d7cb1c46c2e41df"
|
||||||
|
dependencies = [
|
||||||
|
"base64",
|
||||||
|
"chrono",
|
||||||
|
"ring",
|
||||||
|
"serde",
|
||||||
|
"serde_derive",
|
||||||
|
"serde_json",
|
||||||
|
"untrusted",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lazy_static"
|
||||||
|
version = "1.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libc"
|
||||||
|
version = "0.2.103"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dd8f7255a17a627354f321ef0055d63b898c6fb27eff628af4d1b66b7331edf6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "linked-hash-map"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "70fb39025bc7cdd76305867c4eccf2f2dcf6e9a57f5b21a93e1c2d86cd03ec9e"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "num-integer"
|
||||||
|
version = "0.1.39"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea"
|
||||||
|
dependencies = [
|
||||||
|
"num-traits",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "num-traits"
|
||||||
|
version = "0.2.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "630de1ef5cc79d0cdd78b7e33b81f083cbfe90de0f4b2b2f07f905867c70e9fe"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "orizentic"
|
||||||
|
version = "1.0.1"
|
||||||
|
dependencies = [
|
||||||
|
"chrono",
|
||||||
|
"clap",
|
||||||
|
"itertools",
|
||||||
|
"jsonwebtoken",
|
||||||
|
"serde",
|
||||||
|
"serde_derive",
|
||||||
|
"serde_json",
|
||||||
|
"thiserror",
|
||||||
|
"uuid",
|
||||||
|
"version_check",
|
||||||
|
"yaml-rust",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "proc-macro2"
|
||||||
|
version = "0.4.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "effdb53b25cdad54f8f48843d67398f7ef2e14f12c1b4cb4effc549a6462a4d6"
|
||||||
|
dependencies = [
|
||||||
|
"unicode-xid 0.1.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "proc-macro2"
|
||||||
|
version = "1.0.29"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b9f5105d4fdaab20335ca9565e106a5d9b82b6219b5ba735731124ac6711d23d"
|
||||||
|
dependencies = [
|
||||||
|
"unicode-xid 0.2.2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "quote"
|
||||||
|
version = "0.6.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2 0.4.6",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "quote"
|
||||||
|
version = "1.0.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2 1.0.29",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "redox_syscall"
|
||||||
|
version = "0.1.40"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "redox_termios"
|
||||||
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
|
||||||
|
dependencies = [
|
||||||
|
"redox_syscall",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ring"
|
||||||
|
version = "0.13.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2c4db68a2e35f3497146b7e4563df7d4773a2433230c5e4b448328e31740458a"
|
||||||
|
dependencies = [
|
||||||
|
"cc",
|
||||||
|
"lazy_static",
|
||||||
|
"libc",
|
||||||
|
"untrusted",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "safemem"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e27a8b19b835f7aea908818e871f5cc3a5a186550c30773be987e155e8163d8f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde"
|
||||||
|
version = "1.0.69"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "210e5a3b159c566d7527e9b22e44be73f2e0fcc330bb78fef4dbccb56d2e74c8"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_derive"
|
||||||
|
version = "1.0.69"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dd724d68017ae3a7e63600ee4b2fdb3cad2158ffd1821d44aff4580f63e2b593"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2 0.4.6",
|
||||||
|
"quote 0.6.3",
|
||||||
|
"syn 0.14.4",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_json"
|
||||||
|
version = "1.0.22"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "84b8035cabe9b35878adec8ac5fe03d5f6bc97ff6edd7ccb96b44c1276ba390e"
|
||||||
|
dependencies = [
|
||||||
|
"dtoa",
|
||||||
|
"itoa",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "strsim"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "syn"
|
||||||
|
version = "0.14.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2beff8ebc3658f07512a413866875adddd20f4fd47b2a4e6c9da65cd281baaea"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2 0.4.6",
|
||||||
|
"quote 0.6.3",
|
||||||
|
"unicode-xid 0.1.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "syn"
|
||||||
|
version = "1.0.77"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5239bc68e0fef57495900cfea4e8dc75596d9a319d7e16b1e0a440d24e6fe0a0"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2 1.0.29",
|
||||||
|
"quote 1.0.9",
|
||||||
|
"unicode-xid 0.2.2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "termion"
|
||||||
|
version = "1.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"redox_syscall",
|
||||||
|
"redox_termios",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "textwrap"
|
||||||
|
version = "0.11.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
|
||||||
|
dependencies = [
|
||||||
|
"unicode-width",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thiserror"
|
||||||
|
version = "1.0.29"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "602eca064b2d83369e2b2f34b09c70b605402801927c65c11071ac911d299b88"
|
||||||
|
dependencies = [
|
||||||
|
"thiserror-impl",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thiserror-impl"
|
||||||
|
version = "1.0.29"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bad553cc2c78e8de258400763a647e80e6d1b31ee237275d756f6836d204494c"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2 1.0.29",
|
||||||
|
"quote 1.0.9",
|
||||||
|
"syn 1.0.77",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "time"
|
||||||
|
version = "0.1.40"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d825be0eb33fda1a7e68012d51e9c7f451dc1a69391e7fdc197060bb8c56667b"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"redox_syscall",
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-width"
|
||||||
|
version = "0.1.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "882386231c45df4700b275c7ff55b6f3698780a650026380e72dabe76fa46526"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-xid"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-xid"
|
||||||
|
version = "0.2.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "untrusted"
|
||||||
|
version = "0.6.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "55cd1f4b4e96b46aeb8d4855db4a7a9bd96eeeb5c6a1ab54593328761642ce2f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "uuid"
|
||||||
|
version = "0.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
|
||||||
|
dependencies = [
|
||||||
|
"getrandom",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "vec_map"
|
||||||
|
version = "0.8.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "version_check"
|
||||||
|
version = "0.1.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasi"
|
||||||
|
version = "0.10.2+wasi-snapshot-preview1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi"
|
||||||
|
version = "0.3.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "773ef9dcc5f24b7d850d0ff101e542ff24c3b090a9768e03ff889fdef41f00fd"
|
||||||
|
dependencies = [
|
||||||
|
"winapi-i686-pc-windows-gnu",
|
||||||
|
"winapi-x86_64-pc-windows-gnu",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi-i686-pc-windows-gnu"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi-x86_64-pc-windows-gnu"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "yaml-rust"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "57ab38ee1a4a266ed033496cf9af1828d8d6e6c1cfa5f643a2809effcae4d628"
|
||||||
|
dependencies = [
|
||||||
|
"linked-hash-map",
|
||||||
|
]
|
|
@ -0,0 +1,39 @@
|
||||||
|
[package]
|
||||||
|
name = "orizentic"
|
||||||
|
version = "1.0.1"
|
||||||
|
authors = ["Savanni D'Gerinel <savanni@luminescent-dreams.com>"]
|
||||||
|
description = "A library for inerfacing with a JWT auth token database and a command line tool for managing it."
|
||||||
|
license = "GPL3"
|
||||||
|
documentation = "https://docs.rs/orizentic"
|
||||||
|
homepage = "https://github.com/luminescent-dreams/orizentic"
|
||||||
|
repository = "https://github.com/luminescent-dreams/orizentic"
|
||||||
|
categories = ["authentication", "command-line-utilities"]
|
||||||
|
|
||||||
|
include = [
|
||||||
|
"**/*.rs",
|
||||||
|
"Cargo.toml",
|
||||||
|
"build.rs",
|
||||||
|
]
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
version_check = "0.1.5"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
clap = "2.33"
|
||||||
|
itertools = "0.10"
|
||||||
|
jsonwebtoken = "5"
|
||||||
|
serde = "1"
|
||||||
|
serde_derive = "1"
|
||||||
|
serde_json = "1"
|
||||||
|
thiserror = "1"
|
||||||
|
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||||
|
yaml-rust = "0.4"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "orizentic"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "orizentic"
|
||||||
|
path = "src/bin.rs"
|
|
@ -0,0 +1,30 @@
|
||||||
|
Copyright Savanni D'Gerinel (c) 2017 - 2019
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following
|
||||||
|
disclaimer in the documentation and/or other materials provided
|
||||||
|
with the distribution.
|
||||||
|
|
||||||
|
* Neither the name of Savanni D'Gerinel nor the names of other
|
||||||
|
contributors may be used to endorse or promote products derived
|
||||||
|
from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -0,0 +1,73 @@
|
||||||
|
# Orizentic
|
||||||
|
|
||||||
|
[![CircleCI](https://circleci.com/gh/luminescent-dreams/orizentic/tree/sol.svg?style=svg)](https://circleci.com/gh/luminescent-dreams/orizentic/tree/sol)
|
||||||
|
|
||||||
|
[Documentation](https://docs.rs/orizentic")
|
||||||
|
|
||||||
|
Orizentic provides a library that streamlines token-based authentication, and a CLI tool for maintaining a database of tokens.
|
||||||
|
|
||||||
|
## Credit
|
||||||
|
|
||||||
|
The name is a contraction of Auth(oriz)ation/Auth(entic)ation, and credit goes to [Daria Phoebe Brashear](https://github.com/dariaphoebe).
|
||||||
|
|
||||||
|
The original idea has been debated online for many years, but the push to make this useful comes from [Aria Stewart](https://github.com/aredridel).
|
||||||
|
|
||||||
|
## Tokens
|
||||||
|
|
||||||
|
Tokens are simple [JWTs](https://jwt.io/). This library simplifies the process by easily generating and checking JWTs that have only an issuer, an optional time-to-live, a resource name, a username, and a list of permissions. A typical resulting JWT would look like this:
|
||||||
|
|
||||||
|
{ iss = Savanni
|
||||||
|
, sub = health
|
||||||
|
, aud = "Savanni Desktop"
|
||||||
|
, exp = null
|
||||||
|
, nbf = null
|
||||||
|
, iat = 1499650083
|
||||||
|
, jti = 9d57a8d8-d11e-43b2-a4d6-7b82ad043994
|
||||||
|
, unregisteredClaims = { perms: [ "read", "write" ] }
|
||||||
|
}
|
||||||
|
|
||||||
|
The `issuer` and `audience` (or username) are almost entirely for human readability. In this instance, I issued a token that was intended to be used on my desktop system.
|
||||||
|
|
||||||
|
The `subject` in this case is synonymous with Resource and is a name for the resource for which access is being granted. Permissions are a simple list of freeform strings. Both of these are flexible within your application and your authorization checks will use them to verify that the token can be used for the specified purpose.
|
||||||
|
|
||||||
|
## CLI Usage
|
||||||
|
|
||||||
|
## Library Usage
|
||||||
|
|
||||||
|
[orizentic - Rust](https://docs.rs/orizentic/1.0.0/orizentic/)
|
||||||
|
|
||||||
|
There are multiple errata for the documentation:
|
||||||
|
|
||||||
|
* There are, in fact, now [two functions](https://docs.rs/orizentic/1.0.0/orizentic/filedb/index.html) for saving and loading a database.
|
||||||
|
* An example for how to use the library is currently here [for loading the database](https://github.com/luminescent-dreams/fitnesstrax/blob/8c9f3f418ff75675874f7a8e3928ad3f7d134eb4/server/src/web.rs#L64) and here [as part of the AuthMiddleware for an Iron server](https://github.com/luminescent-dreams/fitnesstrax/blob/8c9f3f418ff75675874f7a8e3928ad3f7d134eb4/server/src/server.rs#L156). I apologize for not writing this in more detail yet.
|
||||||
|
|
||||||
|
## Language support
|
||||||
|
|
||||||
|
This library and application is only supported for Rust. Haskell and Go support has been discontinued, but can be revived if I discover folks have an interest. The token database is compatible across tools. See readmes in the language directory for usage information.
|
||||||
|
|
||||||
|
Future Haskell, Go, and other language versions of the library will be done through language bindings against the Rust utilities instead of through my previous clean-room re-implementations.
|
||||||
|
|
||||||
|
## Nix installation
|
||||||
|
|
||||||
|
If you have Nix installed on your system, or you run NixOS, create this derivation:
|
||||||
|
|
||||||
|
orizentic.nix:
|
||||||
|
|
||||||
|
```
|
||||||
|
{ fetchFromGitHub }:
|
||||||
|
let src = fetchFromGitHub {
|
||||||
|
owner = "luminescent-dreams";
|
||||||
|
repo = "orizentic";
|
||||||
|
rev = "896140f594fe3c106662ffe2550f289bb68bc0cb";
|
||||||
|
sha256 = "05g7b0jiyy0pv74zf89yikf65vi3jrn1da0maj0k9fxnxb2vv7a4";
|
||||||
|
};
|
||||||
|
in import "${src}/default.nix" {}
|
||||||
|
```
|
||||||
|
|
||||||
|
At this time, you must have nixpkgs-19.03 defined (and preferably pointing to the 19.03 channel). I will parameterize this and update the instructions in the future.
|
||||||
|
|
||||||
|
I import this into my shell.nix `with import ./orizentic.nix { inherit (pkgs) fetchFromGitHub; };`.
|
||||||
|
|
||||||
|
For a complete example, see my [shell.nix](https://github.com/savannidgerinel/nix-shell/blob/sol/shell.nix) file.
|
||||||
|
|
||||||
|
I have not bundled this application for any other distribution, but you should nave no trouble just building with just cargo build --release with Rust-1.33 and Cargo.
|
|
@ -0,0 +1,20 @@
|
||||||
|
let
|
||||||
|
rust_overlay = import (builtins.fetchTarball "https://github.com/oxalica/rust-overlay/archive/master.tar.gz");
|
||||||
|
pkgs = import <nixpkgs> { overlays = [ rust_overlay ]; };
|
||||||
|
unstable = import <unstable> {};
|
||||||
|
rust = pkgs.rust-bin.stable."1.59.0".default.override {
|
||||||
|
extensions = [ "rust-src" ];
|
||||||
|
};
|
||||||
|
|
||||||
|
in pkgs.mkShell {
|
||||||
|
name = "datasphere";
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
rust
|
||||||
|
unstable.rust-analyzer
|
||||||
|
];
|
||||||
|
|
||||||
|
shellHook = ''
|
||||||
|
if [ -e ~/.nixpkgs/shellhook.sh ]; then . ~/.nixpkgs/shellhook.sh; fi
|
||||||
|
'';
|
||||||
|
}
|
|
@ -0,0 +1,251 @@
|
||||||
|
extern crate chrono;
|
||||||
|
extern crate clap;
|
||||||
|
extern crate orizentic;
|
||||||
|
|
||||||
|
use chrono::Duration;
|
||||||
|
use clap::{App, Arg, ArgMatches, SubCommand};
|
||||||
|
use std::env;
|
||||||
|
|
||||||
|
use orizentic::*;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum OrizenticErr {
|
||||||
|
ParseError(std::num::ParseIntError),
|
||||||
|
}
|
||||||
|
|
||||||
|
// ORIZENTIC_DB
|
||||||
|
// ORIZENTIC_SECRET
|
||||||
|
//
|
||||||
|
// list
|
||||||
|
// create
|
||||||
|
// revoke
|
||||||
|
// encode
|
||||||
|
pub fn main() {
|
||||||
|
let db_path = env::var_os("ORIZENTIC_DB").map(|str| {
|
||||||
|
str.into_string()
|
||||||
|
.expect("ORIZENTIC_DB contains invalid Unicode sequences")
|
||||||
|
});
|
||||||
|
let secret = env::var_os("ORIZENTIC_SECRET").map(|str| {
|
||||||
|
Secret(
|
||||||
|
str.into_string()
|
||||||
|
.map(|s| s.into_bytes())
|
||||||
|
.expect("ORIZENTIC_SECRET contains invalid Unicode sequences"),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let matches = App::new("orizentic cli")
|
||||||
|
.subcommand(SubCommand::with_name("list"))
|
||||||
|
.subcommand(
|
||||||
|
SubCommand::with_name("create")
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("issuer")
|
||||||
|
.long("issuer")
|
||||||
|
.takes_value(true)
|
||||||
|
.required(true),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("ttl")
|
||||||
|
.long("ttl")
|
||||||
|
.takes_value(true)
|
||||||
|
.required(true),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("resource")
|
||||||
|
.long("resource")
|
||||||
|
.takes_value(true)
|
||||||
|
.required(true),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("username")
|
||||||
|
.long("username")
|
||||||
|
.takes_value(true)
|
||||||
|
.required(true),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("perms")
|
||||||
|
.long("perms")
|
||||||
|
.takes_value(true)
|
||||||
|
.required(true),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.subcommand(
|
||||||
|
SubCommand::with_name("revoke").arg(
|
||||||
|
Arg::with_name("id")
|
||||||
|
.long("id")
|
||||||
|
.takes_value(true)
|
||||||
|
.required(true),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.subcommand(
|
||||||
|
SubCommand::with_name("encode").arg(
|
||||||
|
Arg::with_name("id")
|
||||||
|
.long("id")
|
||||||
|
.takes_value(true)
|
||||||
|
.required(true),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.get_matches();
|
||||||
|
|
||||||
|
match matches.subcommand() {
|
||||||
|
("list", _) => list_tokens(db_path),
|
||||||
|
("create", Some(args)) => create_token(db_path, secret, args),
|
||||||
|
("revoke", Some(args)) => revoke_token(db_path, args),
|
||||||
|
("encode", Some(args)) => encode_token(db_path, secret, args),
|
||||||
|
(cmd, _) => {
|
||||||
|
println!("unknown subcommand: {}", cmd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list_tokens(db_path: Option<String>) {
|
||||||
|
let db_path_ = db_path.expect("ORIZENTIC_DB is required for this operation");
|
||||||
|
let claimsets = orizentic::filedb::load_claims_from_file(&db_path_);
|
||||||
|
match claimsets {
|
||||||
|
Ok(claimsets_) => {
|
||||||
|
for claimset in claimsets_ {
|
||||||
|
println!("[{}]", claimset.id);
|
||||||
|
println!("Audience: {}", String::from(claimset.audience));
|
||||||
|
match claimset.expiration {
|
||||||
|
Some(expiration) => println!(
|
||||||
|
"Expiration: {}",
|
||||||
|
expiration.format("%Y-%m-%d %H:%M:%S")
|
||||||
|
),
|
||||||
|
None => println!("Expiration: None"),
|
||||||
|
}
|
||||||
|
println!("Issuer: {}", claimset.issuer.0);
|
||||||
|
println!(
|
||||||
|
"Issued At: {}",
|
||||||
|
claimset.issued_at.format("%Y-%m-%d %H:%M:%S")
|
||||||
|
);
|
||||||
|
println!("Resource Name: {}", claimset.resource.0);
|
||||||
|
|
||||||
|
let perm_val: String = itertools::Itertools::intersperse(
|
||||||
|
claimset.permissions.0.clone().into_iter(),
|
||||||
|
String::from(", "),
|
||||||
|
)
|
||||||
|
.collect();
|
||||||
|
println!("Permissions: {}", perm_val);
|
||||||
|
println!("")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
println!("claimset failed to load: {}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_token(db_path: Option<String>, secret: Option<Secret>, args: &ArgMatches) {
|
||||||
|
let db_path_ = db_path.expect("ORIZENTIC_DB is required for this operation");
|
||||||
|
let secret_ = secret.expect("ORIZENTIC_SECRET is required for this operation");
|
||||||
|
let issuer = args
|
||||||
|
.value_of("issuer")
|
||||||
|
.map(|x| Issuer(String::from(x)))
|
||||||
|
.expect("--issuer is a required parameter");
|
||||||
|
let ttl: Option<TTL> = args.value_of("ttl").map(|x| {
|
||||||
|
x.parse()
|
||||||
|
.and_then(|d| Ok(TTL(Duration::seconds(d))))
|
||||||
|
.map_err(|err| OrizenticErr::ParseError(err))
|
||||||
|
.expect("Failed to parse TTL")
|
||||||
|
});
|
||||||
|
let resource_name = args
|
||||||
|
.value_of("resource")
|
||||||
|
.map(|x| ResourceName(String::from(x)))
|
||||||
|
.expect("--resource is a required parameter");
|
||||||
|
let username = args
|
||||||
|
.value_of("username")
|
||||||
|
.map(|x| Username::from(x))
|
||||||
|
.expect("--username is a required parameter");
|
||||||
|
let perms: Permissions = args
|
||||||
|
.value_of("perms")
|
||||||
|
.map(|str| Permissions(str.split(',').map(|s| String::from(s)).collect()))
|
||||||
|
.expect("--permissions is a required parameter");
|
||||||
|
|
||||||
|
let claimsets = orizentic::filedb::load_claims_from_file(&db_path_);
|
||||||
|
match claimsets {
|
||||||
|
Err(err) => {
|
||||||
|
println!("claimset failed to load: {}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Ok(claimsets_) => {
|
||||||
|
let new_claimset = ClaimSet::new(issuer, ttl, resource_name, username, perms);
|
||||||
|
let mut ctx = orizentic::OrizenticCtx::new(secret_, claimsets_);
|
||||||
|
ctx.add_claimset(new_claimset.clone());
|
||||||
|
match orizentic::filedb::save_claims_to_file(&ctx.list_claimsets(), &db_path_) {
|
||||||
|
Err(err) => {
|
||||||
|
println!("Failed to write claimset to file: {:?}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Ok(_) => match ctx.encode_claimset(&new_claimset) {
|
||||||
|
Ok(token) => println!("{}", token.text),
|
||||||
|
Err(err) => {
|
||||||
|
println!("token could not be encoded: {:?}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn revoke_token(db_path: Option<String>, args: &ArgMatches) {
|
||||||
|
let db_path_ = db_path.expect("ORIZENTIC_DB is required for this operation");
|
||||||
|
let claimsets = orizentic::filedb::load_claims_from_file(&db_path_);
|
||||||
|
|
||||||
|
match claimsets {
|
||||||
|
Err(err) => {
|
||||||
|
println!("claimset failed to load: {}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Ok(claimsets_) => {
|
||||||
|
let id = args
|
||||||
|
.value_of("id")
|
||||||
|
.map(String::from)
|
||||||
|
.expect("--id is a required parameter");
|
||||||
|
let mut ctx =
|
||||||
|
orizentic::OrizenticCtx::new(Secret(String::from("").into_bytes()), claimsets_);
|
||||||
|
ctx.revoke_by_uuid(&id);
|
||||||
|
match orizentic::filedb::save_claims_to_file(&ctx.list_claimsets(), &db_path_) {
|
||||||
|
Err(err) => {
|
||||||
|
println!("Failed to write claimset to file: {:?}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Ok(_) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn encode_token(db_path: Option<String>, secret: Option<Secret>, args: &ArgMatches) {
|
||||||
|
let db_path_ = db_path.expect("ORIZENTIC_DB is required for this operation");
|
||||||
|
let secret_ = secret.expect("ORIZENTIC_SECRET is required for this operation");
|
||||||
|
let id = args
|
||||||
|
.value_of("id")
|
||||||
|
.map(String::from)
|
||||||
|
.expect("--id is a required parameter");
|
||||||
|
|
||||||
|
let claimsets = orizentic::filedb::load_claims_from_file(&db_path_);
|
||||||
|
match claimsets {
|
||||||
|
Err(err) => {
|
||||||
|
println!("claimset failed to load: {}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Ok(claimsets_) => {
|
||||||
|
let ctx = orizentic::OrizenticCtx::new(secret_, claimsets_);
|
||||||
|
let claimset = ctx.find_claimset(&id);
|
||||||
|
match claimset {
|
||||||
|
Some(claimset_) => match ctx.encode_claimset(&claimset_) {
|
||||||
|
Ok(token) => println!("{}", token.text),
|
||||||
|
Err(err) => {
|
||||||
|
println!("token could not be encoded: {:?}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
println!("No claimset found");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,303 @@
|
||||||
|
extern crate chrono;
|
||||||
|
extern crate jsonwebtoken as jwt;
|
||||||
|
extern crate serde;
|
||||||
|
extern crate serde_json;
|
||||||
|
extern crate uuid;
|
||||||
|
extern crate yaml_rust;
|
||||||
|
|
||||||
|
use core::chrono::prelude::*;
|
||||||
|
use core::uuid::Uuid;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
/// Orizentic Errors
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum Error {
|
||||||
|
/// An underlying JWT decoding error. May be replaced with Orizentic semantic errors to better
|
||||||
|
/// encapsulate the JWT library.
|
||||||
|
#[error("JWT failed to decode: {0}")]
|
||||||
|
JWTError(jwt::errors::Error),
|
||||||
|
/// Token decoded and verified but was not present in the database.
|
||||||
|
#[error("Token not recognized")]
|
||||||
|
UnknownToken,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// ResourceName is application-defined and names a resource to which access should be controlled
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct ResourceName(pub String);
|
||||||
|
|
||||||
|
/// Permissions are application-defined descriptions of what can be done with the named resource
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct Permissions(pub Vec<String>);
|
||||||
|
|
||||||
|
/// Issuers are typically informative, but should generally describe who or what created the token
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct Issuer(pub String);
|
||||||
|
|
||||||
|
/// Time to live is the number of seconds until a token expires. This is used for creating tokens
|
||||||
|
/// but tokens store their actual expiration time.
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct TTL(pub chrono::Duration);
|
||||||
|
|
||||||
|
/// Username, or Audience in JWT terms, should describe who or what is supposed to be using this
|
||||||
|
/// token
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct Username(String);
|
||||||
|
|
||||||
|
impl From<Username> for String {
|
||||||
|
fn from(u: Username) -> String {
|
||||||
|
u.0.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for Username {
|
||||||
|
fn from(s: &str) -> Username {
|
||||||
|
Username(s.to_owned())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct Secret(pub Vec<u8>);
|
||||||
|
|
||||||
|
/// A ClaimSet represents one set of permissions and claims. It is a standardized way of specifying
|
||||||
|
/// the owner, issuer, expiration time, relevant resources, and specific permissions on that
|
||||||
|
/// resource. By itself, this is only an informative data structure and so should never be trusted
|
||||||
|
/// when passed over the wire. See `VerifiedToken` and `UnverifiedToken`.
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct ClaimSet {
|
||||||
|
pub id: String,
|
||||||
|
pub audience: Username,
|
||||||
|
pub expiration: Option<DateTime<Utc>>,
|
||||||
|
pub issuer: Issuer,
|
||||||
|
pub issued_at: DateTime<Utc>,
|
||||||
|
pub resource: ResourceName,
|
||||||
|
pub permissions: Permissions,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ClaimSet {
|
||||||
|
/// Create a new `ClaimSet`. This will return a claimset with the expiration time calculated
|
||||||
|
/// from the TTL if the TTL is provided. No expiration will be set if no TTL is provided.
|
||||||
|
pub fn new(
|
||||||
|
issuer: Issuer,
|
||||||
|
ttl: Option<TTL>,
|
||||||
|
resource_name: ResourceName,
|
||||||
|
user_name: Username,
|
||||||
|
perms: Permissions,
|
||||||
|
) -> ClaimSet {
|
||||||
|
let issued_at: DateTime<Utc> = Utc::now().with_nanosecond(0).unwrap();
|
||||||
|
let expiration = match ttl {
|
||||||
|
Some(TTL(ttl_)) => issued_at.checked_add_signed(ttl_),
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
ClaimSet {
|
||||||
|
id: String::from(Uuid::new_v4().to_hyphenated().to_string()),
|
||||||
|
audience: user_name,
|
||||||
|
expiration,
|
||||||
|
issuer,
|
||||||
|
issued_at,
|
||||||
|
resource: resource_name,
|
||||||
|
permissions: perms,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_json(&self) -> Result<String, serde_json::Error> {
|
||||||
|
serde_json::to_string(&(ClaimSetJS::from_claimset(self)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_json(text: &String) -> Result<ClaimSet, serde_json::Error> {
|
||||||
|
serde_json::from_str(&text).map(|x| ClaimSetJS::to_claimset(&x))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// ClaimSetJS is an intermediary data structure between JWT serialization and a more usable
|
||||||
|
/// ClaimSet.
|
||||||
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ClaimSetJS {
|
||||||
|
jti: String,
|
||||||
|
aud: String,
|
||||||
|
exp: Option<i64>,
|
||||||
|
iss: String,
|
||||||
|
iat: i64,
|
||||||
|
sub: String,
|
||||||
|
perms: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ClaimSetJS {
|
||||||
|
pub fn from_claimset(claims: &ClaimSet) -> ClaimSetJS {
|
||||||
|
ClaimSetJS {
|
||||||
|
jti: claims.id.clone(),
|
||||||
|
aud: claims.audience.0.clone(),
|
||||||
|
exp: claims.expiration.map(|t| t.timestamp()),
|
||||||
|
iss: claims.issuer.0.clone(),
|
||||||
|
iat: claims.issued_at.timestamp(),
|
||||||
|
sub: claims.resource.0.clone(),
|
||||||
|
perms: claims.permissions.0.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_claimset(&self) -> ClaimSet {
|
||||||
|
ClaimSet {
|
||||||
|
id: self.jti.clone(),
|
||||||
|
audience: Username(self.aud.clone()),
|
||||||
|
expiration: self.exp.map(|t| Utc.timestamp(t, 0)),
|
||||||
|
issuer: Issuer(self.iss.clone()),
|
||||||
|
issued_at: Utc.timestamp(self.iat, 0),
|
||||||
|
resource: ResourceName(self.sub.clone()),
|
||||||
|
permissions: Permissions(self.perms.clone()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The Orizentic Context encapsulates a set of claims and an associated secret. This provides the
|
||||||
|
/// overall convenience of easily creating and validating tokens. Generated claimsets are stored
|
||||||
|
/// here on the theory that, even with validation, only those claims actually stored in the
|
||||||
|
/// database should be considered valid.
|
||||||
|
pub struct OrizenticCtx(Secret, HashMap<String, ClaimSet>);
|
||||||
|
|
||||||
|
/// An UnverifiedToken is a combination of the JWT serialization and the decoded `ClaimSet`. As this
|
||||||
|
/// is unverified, this should only be used for information purposes, such as determining what a
|
||||||
|
/// user can do with a token even when the decoding key is absent.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct UnverifiedToken {
|
||||||
|
pub text: String,
|
||||||
|
pub claims: ClaimSet,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UnverifiedToken {
|
||||||
|
/// Decode a JWT text string without verification
|
||||||
|
pub fn decode_text(text: String) -> Result<UnverifiedToken, Error> {
|
||||||
|
let res = jwt::dangerous_unsafe_decode::<ClaimSetJS>(&text);
|
||||||
|
match res {
|
||||||
|
Ok(res_) => Ok(UnverifiedToken {
|
||||||
|
text,
|
||||||
|
claims: res_.claims.to_claimset(),
|
||||||
|
}),
|
||||||
|
Err(err) => Err(Error::JWTError(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An VerifiedToken is a combination of the JWT serialization and the decoded `ClaimSet`. This will
|
||||||
|
/// only be created by the `validate_function`, and thus will represent a token which has been
|
||||||
|
/// validated via signature, expiration time, and presence in the database.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct VerifiedToken {
|
||||||
|
pub text: String,
|
||||||
|
pub claims: ClaimSet,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VerifiedToken {
|
||||||
|
/// Given a `VerifiedToken`, pass the resource name and permissions to a user-defined function. The
|
||||||
|
/// function should return true if the caller should be granted access to the resource and false,
|
||||||
|
/// otherwise. That result will be passed back to the caller.
|
||||||
|
pub fn check_authorizations<F: FnOnce(&ResourceName, &Permissions) -> bool>(
|
||||||
|
&self,
|
||||||
|
f: F,
|
||||||
|
) -> bool {
|
||||||
|
f(&self.claims.resource, &self.claims.permissions)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OrizenticCtx {
|
||||||
|
/// Create a new Orizentic Context with an initial set of claims.
|
||||||
|
pub fn new(secret: Secret, claims_lst: Vec<ClaimSet>) -> OrizenticCtx {
|
||||||
|
let mut hm = HashMap::new();
|
||||||
|
for claimset in claims_lst {
|
||||||
|
hm.insert(claimset.id.clone(), claimset);
|
||||||
|
}
|
||||||
|
OrizenticCtx(secret, hm)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Validate a token by checking its signature, that it is not expired, and that it is still
|
||||||
|
/// present in the database. Return an error if any check fails, but return a `VerifiedToken`
|
||||||
|
/// if it all succeeds.
|
||||||
|
pub fn validate_token(&self, token: &UnverifiedToken) -> Result<VerifiedToken, Error> {
|
||||||
|
let validator = match token.claims.expiration {
|
||||||
|
Some(_) => jwt::Validation::default(),
|
||||||
|
None => jwt::Validation {
|
||||||
|
validate_exp: false,
|
||||||
|
..jwt::Validation::default()
|
||||||
|
},
|
||||||
|
};
|
||||||
|
let res = jwt::decode::<ClaimSetJS>(&token.text, &(self.0).0, &validator);
|
||||||
|
match res {
|
||||||
|
Ok(res_) => {
|
||||||
|
let claims = res_.claims;
|
||||||
|
let in_db = self.1.get(&claims.jti);
|
||||||
|
if in_db.is_some() {
|
||||||
|
Ok(VerifiedToken {
|
||||||
|
text: token.text.clone(),
|
||||||
|
claims: claims.to_claimset(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(Error::UnknownToken)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => Err(Error::JWTError(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Given a text string, as from a web application's `Authorization` header, decode the string
|
||||||
|
/// and then validate the token.
|
||||||
|
pub fn decode_and_validate_text(&self, text: String) -> Result<VerifiedToken, Error> {
|
||||||
|
// it is necessary to first decode the token because we need the validator to know whether
|
||||||
|
// to attempt to validate the expiration. Without that check, the validator will fail any
|
||||||
|
// expiration set to None.
|
||||||
|
match UnverifiedToken::decode_text(text) {
|
||||||
|
Ok(unverified) => self.validate_token(&unverified),
|
||||||
|
Err(err) => Err(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a claimset to the database.
|
||||||
|
pub fn add_claimset(&mut self, claimset: ClaimSet) {
|
||||||
|
self.1.insert(claimset.id.clone(), claimset);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove a claims set from the database so that all additional validation checks fail.
|
||||||
|
pub fn revoke_claimset(&mut self, claim: &ClaimSet) {
|
||||||
|
self.1.remove(&claim.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Revoke a ClaimsSet given its ID, which is set in the `jti` claim of a JWT or the `id` field
|
||||||
|
/// of a `ClaimSet`.
|
||||||
|
pub fn revoke_by_uuid(&mut self, claim_id: &String) {
|
||||||
|
self.1.remove(claim_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// *NOT IMPLEMENTED*
|
||||||
|
pub fn replace_claimsets(&mut self, _claims_lst: Vec<ClaimSet>) {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List all of the `ClaimSet` IDs in the database.
|
||||||
|
pub fn list_claimsets(&self) -> Vec<&ClaimSet> {
|
||||||
|
self.1.values().map(|item| item).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find a `ClaimSet` by ID.
|
||||||
|
pub fn find_claimset(&self, claims_id: &String) -> Option<&ClaimSet> {
|
||||||
|
self.1.get(claims_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encode and sign a claimset, returning the result as a `VerifiedToken`.
|
||||||
|
pub fn encode_claimset(&self, claims: &ClaimSet) -> Result<VerifiedToken, Error> {
|
||||||
|
let in_db = self.1.get(&claims.id);
|
||||||
|
if in_db.is_some() {
|
||||||
|
let text = jwt::encode(
|
||||||
|
&jwt::Header::default(),
|
||||||
|
&ClaimSetJS::from_claimset(&claims),
|
||||||
|
&(self.0).0,
|
||||||
|
);
|
||||||
|
match text {
|
||||||
|
Ok(text_) => Ok(VerifiedToken {
|
||||||
|
text: text_,
|
||||||
|
claims: claims.clone(),
|
||||||
|
}),
|
||||||
|
Err(err) => Err(Error::JWTError(err)),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(Error::UnknownToken)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
extern crate serde_json;
|
||||||
|
|
||||||
|
use core;
|
||||||
|
|
||||||
|
use std::fs::File;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::io::{Read, Error, Write};
|
||||||
|
|
||||||
|
pub fn save_claims_to_file(claimsets: &Vec<&core::ClaimSet>, path: &String) -> Result<(), Error> {
|
||||||
|
let path = Path::new(path);
|
||||||
|
let mut file = File::create(&path)?;
|
||||||
|
|
||||||
|
let claimsets_js: Vec<core::ClaimSetJS> = claimsets
|
||||||
|
.into_iter()
|
||||||
|
.map(|claims| core::ClaimSetJS::from_claimset(claims))
|
||||||
|
.collect();
|
||||||
|
let claimset_str = serde_json::to_string(&claimsets_js)?;
|
||||||
|
file.write_fmt(format_args!("{}", claimset_str))?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn load_claims_from_file(path: &String) -> Result<Vec<core::ClaimSet>, Error> {
|
||||||
|
let path = Path::new(path);
|
||||||
|
let mut file = File::open(&path)?;
|
||||||
|
let mut text = String::new();
|
||||||
|
|
||||||
|
file.read_to_string(&mut text)?;
|
||||||
|
|
||||||
|
let claimsets_js: Vec<core::ClaimSetJS> = serde_json::from_str(&text)?;
|
||||||
|
let claimsets = claimsets_js
|
||||||
|
.into_iter()
|
||||||
|
.map(|cl_js| core::ClaimSetJS::to_claimset(&cl_js))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(claimsets)
|
||||||
|
}
|
|
@ -0,0 +1,30 @@
|
||||||
|
//! The Orizentic token management library
|
||||||
|
//!
|
||||||
|
//! This library provides a high level interface for authentication token management. It wraps
|
||||||
|
//! around the [JWT](https://jwt.io/) standard using the
|
||||||
|
//! [`jsonwebtoken`](https://github.com/Keats/jsonwebtoken) library for serialization and
|
||||||
|
//! validation.
|
||||||
|
//!
|
||||||
|
//! Functionality revolves around the relationship between a [ClaimSet](struct.ClaimSet.html), a
|
||||||
|
//! [VerifiedToken](struct.VerifiedToken.html), and an
|
||||||
|
//! [UnverifiedToken](struct.UnverifiedToken.html). A [ClaimSet](struct.ClaimSet.html) is
|
||||||
|
//! considered informative and stores all of the information about the permissions and resources
|
||||||
|
//! that the token bearer should have access to. [VerifiedToken](struct.VerifiedToken.html) and
|
||||||
|
//! [UnverifiedToken](struct.UnverifiedToken.html) are the result of the process of decoding a
|
||||||
|
//! string JWT, and inherently specify whether the decoding process verified the signature,
|
||||||
|
//! expiration time, and presence in the database.
|
||||||
|
//!
|
||||||
|
//! This library does not currently contain database save and load features, but those are a likely
|
||||||
|
//! upcoming feature.
|
||||||
|
//!
|
||||||
|
//! No setup is necessary when using this library to decode JWT strings. Refer to the standalone
|
||||||
|
//! [decode_text](fn.decode_text.html) function.
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
extern crate serde_derive;
|
||||||
|
extern crate thiserror;
|
||||||
|
|
||||||
|
pub use core::*;
|
||||||
|
|
||||||
|
mod core;
|
||||||
|
pub mod filedb;
|
|
@ -0,0 +1,429 @@
|
||||||
|
extern crate chrono;
|
||||||
|
extern crate orizentic;
|
||||||
|
|
||||||
|
use orizentic::filedb::*;
|
||||||
|
use orizentic::*;
|
||||||
|
use std::fs;
|
||||||
|
use std::ops;
|
||||||
|
use std::thread;
|
||||||
|
use std::time;
|
||||||
|
|
||||||
|
struct FileCleanup(String);
|
||||||
|
|
||||||
|
impl FileCleanup {
|
||||||
|
fn new(path: &str) -> FileCleanup {
|
||||||
|
FileCleanup(String::from(path))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ops::Drop for FileCleanup {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
fs::remove_file(&self.0).expect("failed to remove time series file");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_create_a_new_claimset() {
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("abcdefg".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-1")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
assert_eq!(claims.audience, Username::from("Savanni"));
|
||||||
|
match claims.expiration {
|
||||||
|
Some(ttl) => assert_eq!(ttl - claims.issued_at, chrono::Duration::seconds(3600)),
|
||||||
|
None => panic!("ttl should not be None"),
|
||||||
|
}
|
||||||
|
assert_eq!(claims.issuer, Issuer(String::from("test")));
|
||||||
|
assert_eq!(claims.resource, ResourceName(String::from("resource-1")));
|
||||||
|
assert_eq!(
|
||||||
|
claims.permissions,
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
])
|
||||||
|
);
|
||||||
|
{
|
||||||
|
let tok_list = ctx.list_claimsets();
|
||||||
|
assert_eq!(tok_list.len(), 1);
|
||||||
|
assert!(tok_list.contains(&&claims));
|
||||||
|
}
|
||||||
|
|
||||||
|
let claims2 = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-2")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims2.clone());
|
||||||
|
|
||||||
|
assert_ne!(claims2.id, claims.id);
|
||||||
|
assert_eq!(claims2.resource, ResourceName(String::from("resource-2")));
|
||||||
|
|
||||||
|
let tok_list = ctx.list_claimsets();
|
||||||
|
assert_eq!(tok_list.len(), 2);
|
||||||
|
assert!(tok_list.contains(&&claims));
|
||||||
|
assert!(tok_list.contains(&&claims2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_retrieve_claim_by_id() {
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("abcdefg".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-1")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
let claims2 = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-2")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
ctx.add_claimset(claims2.clone());
|
||||||
|
|
||||||
|
assert_eq!(ctx.find_claimset(&claims.id), Some(&claims));
|
||||||
|
assert_eq!(ctx.find_claimset(&claims2.id), Some(&claims2));
|
||||||
|
|
||||||
|
ctx.revoke_claimset(&claims);
|
||||||
|
assert_eq!(ctx.find_claimset(&claims.id), None);
|
||||||
|
assert_eq!(ctx.find_claimset(&claims2.id), Some(&claims2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_revoke_claim_by_id() {
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("abcdefg".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-1")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
let claims2 = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-2")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
ctx.add_claimset(claims2.clone());
|
||||||
|
|
||||||
|
assert_eq!(ctx.find_claimset(&claims.id), Some(&claims));
|
||||||
|
assert_eq!(ctx.find_claimset(&claims2.id), Some(&claims2));
|
||||||
|
|
||||||
|
ctx.revoke_by_uuid(&claims.id);
|
||||||
|
assert_eq!(ctx.find_claimset(&claims.id), None);
|
||||||
|
assert_eq!(ctx.find_claimset(&claims2.id), Some(&claims2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_revoke_a_token() {
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("abcdefg".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-1")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
let claims2 = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-2")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
ctx.add_claimset(claims2.clone());
|
||||||
|
|
||||||
|
ctx.revoke_claimset(&claims);
|
||||||
|
let tok_list = ctx.list_claimsets();
|
||||||
|
assert_eq!(tok_list.len(), 1);
|
||||||
|
assert!(!tok_list.contains(&&claims));
|
||||||
|
assert!(tok_list.contains(&&claims2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rejects_tokens_with_an_invalid_secret() {
|
||||||
|
let mut ctx1 = OrizenticCtx::new(Secret("ctx1".to_string().into_bytes()), Vec::new());
|
||||||
|
let ctx2 = OrizenticCtx::new(Secret("ctx2".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-1")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx1.add_claimset(claims.clone());
|
||||||
|
let encoded_token = ctx1.encode_claimset(&claims).ok().unwrap();
|
||||||
|
assert!(ctx2.decode_and_validate_text(encoded_token.text).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rejects_tokens_that_are_absent_from_the_database() {
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-1")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
||||||
|
|
||||||
|
ctx.revoke_claimset(&claims);
|
||||||
|
assert!(ctx.decode_and_validate_text(encoded_token.text).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validates_present_tokens_with_a_valid_secret() {
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-1")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
||||||
|
assert!(ctx.decode_and_validate_text(encoded_token.text).is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rejects_expired_tokens() {
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(1))),
|
||||||
|
ResourceName(String::from("resource-1")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
thread::sleep(time::Duration::from_secs(2));
|
||||||
|
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
||||||
|
assert!(ctx.decode_and_validate_text(encoded_token.text).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn accepts_tokens_that_have_no_expiration() {
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
None,
|
||||||
|
ResourceName(String::from("resource-1")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
||||||
|
assert!(ctx.decode_and_validate_text(encoded_token.text).is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn authorizes_a_token_with_the_correct_resource_and_permissions() {
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
None,
|
||||||
|
ResourceName(String::from("resource-1")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
||||||
|
let token = ctx
|
||||||
|
.decode_and_validate_text(encoded_token.text)
|
||||||
|
.ok()
|
||||||
|
.unwrap();
|
||||||
|
let res = token.check_authorizations(|rn: &ResourceName, perms: &Permissions| {
|
||||||
|
*rn == ResourceName(String::from("resource-1")) && perms.0.contains(&String::from("grant"))
|
||||||
|
});
|
||||||
|
assert!(res);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rejects_a_token_with_the_incorrect_permissions() {
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
None,
|
||||||
|
ResourceName(String::from("resource-1")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![String::from("read"), String::from("write")]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
||||||
|
let token = ctx
|
||||||
|
.decode_and_validate_text(encoded_token.text)
|
||||||
|
.ok()
|
||||||
|
.unwrap();
|
||||||
|
let res = token.check_authorizations(|rn: &ResourceName, perms: &Permissions| {
|
||||||
|
*rn == ResourceName(String::from("resource-1")) && perms.0.contains(&String::from("grant"))
|
||||||
|
});
|
||||||
|
assert!(!res);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rejects_a_token_with_the_incorrect_resource_name() {
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
None,
|
||||||
|
ResourceName(String::from("resource-2")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
||||||
|
let token = ctx
|
||||||
|
.decode_and_validate_text(encoded_token.text)
|
||||||
|
.ok()
|
||||||
|
.unwrap();
|
||||||
|
let res = token.check_authorizations(|rn: &ResourceName, perms: &Permissions| {
|
||||||
|
*rn == ResourceName(String::from("resource-1")) && perms.0.contains(&String::from("grant"))
|
||||||
|
});
|
||||||
|
assert!(!res);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn claims_serialize_to_json() {
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
None,
|
||||||
|
ResourceName(String::from("resource-2")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let expected_jti = format!("\"jti\":\"{}\"", claims.id);
|
||||||
|
|
||||||
|
let claim_str = claims.to_json().expect("to_json threw an error");
|
||||||
|
//.expect(assert!(false, format!("[claims_serilazie_to_json] {}", err)));
|
||||||
|
assert!(claim_str.contains(&expected_jti));
|
||||||
|
|
||||||
|
let claims_ = ClaimSet::from_json(&claim_str).expect("from_json threw an error");
|
||||||
|
assert_eq!(claims, claims_);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn save_and_load() {
|
||||||
|
let _file_cleanup = FileCleanup::new("var/claims.db");
|
||||||
|
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
||||||
|
let claims = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
None,
|
||||||
|
ResourceName(String::from("resource-2")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims.clone());
|
||||||
|
|
||||||
|
let claims2 = ClaimSet::new(
|
||||||
|
Issuer(String::from("test")),
|
||||||
|
Some(TTL(chrono::Duration::seconds(3600))),
|
||||||
|
ResourceName(String::from("resource-2")),
|
||||||
|
Username::from("Savanni"),
|
||||||
|
Permissions(vec![
|
||||||
|
String::from("read"),
|
||||||
|
String::from("write"),
|
||||||
|
String::from("grant"),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
ctx.add_claimset(claims2.clone());
|
||||||
|
|
||||||
|
let res = save_claims_to_file(&ctx.list_claimsets(), &String::from("var/claims.db"));
|
||||||
|
assert!(res.is_ok());
|
||||||
|
|
||||||
|
let claimset = load_claims_from_file(&String::from("var/claims.db"));
|
||||||
|
match claimset {
|
||||||
|
Ok(claimset_) => {
|
||||||
|
assert!(claimset_.contains(&claims));
|
||||||
|
assert!(claimset_.contains(&claims2));
|
||||||
|
}
|
||||||
|
Err(err) => assert!(false, "{}", err),
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,9 +1,8 @@
|
||||||
[package]
|
[package]
|
||||||
name = "flow"
|
name = "result-extended"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
license-file = "../COPYING"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
|
@ -33,84 +33,84 @@ use std::{error::Error, fmt};
|
||||||
/// statement.
|
/// statement.
|
||||||
pub trait FatalError: Error {}
|
pub trait FatalError: Error {}
|
||||||
|
|
||||||
/// Flow<A, FE, E> represents a return value that might be a success, might be a fatal error, or
|
/// Result<A, FE, E> represents a return value that might be a success, might be a fatal error, or
|
||||||
/// might be a normal handleable error.
|
/// might be a normal handleable error.
|
||||||
pub enum Flow<A, FE, E> {
|
pub enum Result<A, E, FE> {
|
||||||
/// The operation was successful
|
/// The operation was successful
|
||||||
Ok(A),
|
Ok(A),
|
||||||
|
/// Ordinary errors. These should be handled and the application should recover gracefully.
|
||||||
|
Err(E),
|
||||||
/// The operation encountered a fatal error. These should be bubbled up to a level that can
|
/// The operation encountered a fatal error. These should be bubbled up to a level that can
|
||||||
/// safely shut the application down.
|
/// safely shut the application down.
|
||||||
Fatal(FE),
|
Fatal(FE),
|
||||||
/// Ordinary errors. These should be handled and the application should recover gracefully.
|
|
||||||
Err(E),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A, FE, E> Flow<A, FE, E> {
|
impl<A, E, FE> Result<A, E, FE> {
|
||||||
/// Apply an infallible function to a successful value.
|
/// Apply an infallible function to a successful value.
|
||||||
pub fn map<B, O>(self, mapper: O) -> Flow<B, FE, E>
|
pub fn map<B, O>(self, mapper: O) -> Result<B, E, FE>
|
||||||
where
|
where
|
||||||
O: FnOnce(A) -> B,
|
O: FnOnce(A) -> B,
|
||||||
{
|
{
|
||||||
match self {
|
match self {
|
||||||
Flow::Ok(val) => Flow::Ok(mapper(val)),
|
Result::Ok(val) => Result::Ok(mapper(val)),
|
||||||
Flow::Fatal(err) => Flow::Fatal(err),
|
Result::Err(err) => Result::Err(err),
|
||||||
Flow::Err(err) => Flow::Err(err),
|
Result::Fatal(err) => Result::Fatal(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Apply a potentially fallible function to a successful value.
|
/// Apply a potentially fallible function to a successful value.
|
||||||
///
|
///
|
||||||
/// Like `Result.and_then`, the mapping function can itself fail.
|
/// Like `Result.and_then`, the mapping function can itself fail.
|
||||||
pub fn and_then<B, O>(self, handler: O) -> Flow<B, FE, E>
|
pub fn and_then<B, O>(self, handler: O) -> Result<B, E, FE>
|
||||||
where
|
where
|
||||||
O: FnOnce(A) -> Flow<B, FE, E>,
|
O: FnOnce(A) -> Result<B, E, FE>,
|
||||||
{
|
{
|
||||||
match self {
|
match self {
|
||||||
Flow::Ok(val) => handler(val),
|
Result::Ok(val) => handler(val),
|
||||||
Flow::Fatal(err) => Flow::Fatal(err),
|
Result::Err(err) => Result::Err(err),
|
||||||
Flow::Err(err) => Flow::Err(err),
|
Result::Fatal(err) => Result::Fatal(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Map a normal error from one type to another. This is useful for converting an error from
|
/// Map a normal error from one type to another. This is useful for converting an error from
|
||||||
/// one type to another, especially in re-throwing an underlying error. `?` syntax does not
|
/// one type to another, especially in re-throwing an underlying error. `?` syntax does not
|
||||||
/// work with `Flow`, so you will likely need to use this a lot.
|
/// work with `Result`, so you will likely need to use this a lot.
|
||||||
pub fn map_err<F, O>(self, mapper: O) -> Flow<A, FE, F>
|
pub fn map_err<F, O>(self, mapper: O) -> Result<A, F, FE>
|
||||||
where
|
where
|
||||||
O: FnOnce(E) -> F,
|
O: FnOnce(E) -> F,
|
||||||
{
|
{
|
||||||
match self {
|
match self {
|
||||||
Flow::Ok(val) => Flow::Ok(val),
|
Result::Ok(val) => Result::Ok(val),
|
||||||
Flow::Fatal(err) => Flow::Fatal(err),
|
Result::Err(err) => Result::Err(mapper(err)),
|
||||||
Flow::Err(err) => Flow::Err(mapper(err)),
|
Result::Fatal(err) => Result::Fatal(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Provide a function to use to recover from (or simply re-throw) an error.
|
/// Provide a function to use to recover from (or simply re-throw) an error.
|
||||||
pub fn or_else<O, F>(self, handler: O) -> Flow<A, FE, F>
|
pub fn or_else<O, F>(self, handler: O) -> Result<A, F, FE>
|
||||||
where
|
where
|
||||||
O: FnOnce(E) -> Flow<A, FE, F>,
|
O: FnOnce(E) -> Result<A, F, FE>,
|
||||||
{
|
{
|
||||||
match self {
|
match self {
|
||||||
Flow::Ok(val) => Flow::Ok(val),
|
Result::Ok(val) => Result::Ok(val),
|
||||||
Flow::Fatal(err) => Flow::Fatal(err),
|
Result::Err(err) => handler(err),
|
||||||
Flow::Err(err) => handler(err),
|
Result::Fatal(err) => Result::Fatal(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert from a normal `Result` type to a `Flow` type. The error condition for a `Result` will
|
/// Convert from a normal `Result` type to a `Result` type. The error condition for a `Result` will
|
||||||
/// be treated as `Flow::Err`, never `Flow::Fatal`.
|
/// be treated as `Result::Err`, never `Result::Fatal`.
|
||||||
impl<A, FE, E> From<Result<A, E>> for Flow<A, FE, E> {
|
impl<A, E, FE> From<std::result::Result<A, E>> for Result<A, E, FE> {
|
||||||
fn from(r: Result<A, E>) -> Self {
|
fn from(r: std::result::Result<A, E>) -> Self {
|
||||||
match r {
|
match r {
|
||||||
Ok(val) => Flow::Ok(val),
|
Ok(val) => Result::Ok(val),
|
||||||
Err(err) => Flow::Err(err),
|
Err(err) => Result::Err(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A, FE, E> fmt::Debug for Flow<A, FE, E>
|
impl<A, E, FE> fmt::Debug for Result<A, E, FE>
|
||||||
where
|
where
|
||||||
A: fmt::Debug,
|
A: fmt::Debug,
|
||||||
FE: fmt::Debug,
|
FE: fmt::Debug,
|
||||||
|
@ -118,14 +118,14 @@ where
|
||||||
{
|
{
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Flow::Ok(val) => f.write_fmt(format_args!("Flow::Ok {:?}", val)),
|
Result::Ok(val) => f.write_fmt(format_args!("Result::Ok {:?}", val)),
|
||||||
Flow::Err(err) => f.write_fmt(format_args!("Flow::Err {:?}", err)),
|
Result::Err(err) => f.write_fmt(format_args!("Result::Err {:?}", err)),
|
||||||
Flow::Fatal(err) => f.write_fmt(format_args!("Flow::Fatal {:?}", err)),
|
Result::Fatal(err) => f.write_fmt(format_args!("Result::Fatal {:?}", err)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A, FE, E> PartialEq for Flow<A, FE, E>
|
impl<A, E, FE> PartialEq for Result<A, E, FE>
|
||||||
where
|
where
|
||||||
A: PartialEq,
|
A: PartialEq,
|
||||||
FE: PartialEq,
|
FE: PartialEq,
|
||||||
|
@ -133,27 +133,27 @@ where
|
||||||
{
|
{
|
||||||
fn eq(&self, rhs: &Self) -> bool {
|
fn eq(&self, rhs: &Self) -> bool {
|
||||||
match (self, rhs) {
|
match (self, rhs) {
|
||||||
(Flow::Ok(val), Flow::Ok(rhs)) => val == rhs,
|
(Result::Ok(val), Result::Ok(rhs)) => val == rhs,
|
||||||
(Flow::Err(_), Flow::Err(_)) => true,
|
(Result::Err(_), Result::Err(_)) => true,
|
||||||
(Flow::Fatal(_), Flow::Fatal(_)) => true,
|
(Result::Fatal(_), Result::Fatal(_)) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience function to create an ok value.
|
/// Convenience function to create an ok value.
|
||||||
pub fn ok<A, FE: FatalError, E: Error>(val: A) -> Flow<A, FE, E> {
|
pub fn ok<A, E: Error, FE: FatalError>(val: A) -> Result<A, E, FE> {
|
||||||
Flow::Ok(val)
|
Result::Ok(val)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience function to create an error value.
|
/// Convenience function to create an error value.
|
||||||
pub fn error<A, FE: FatalError, E: Error>(err: E) -> Flow<A, FE, E> {
|
pub fn error<A, E: Error, FE: FatalError>(err: E) -> Result<A, E, FE> {
|
||||||
Flow::Err(err)
|
Result::Err(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience function to create a fatal value.
|
/// Convenience function to create a fatal value.
|
||||||
pub fn fatal<A, FE: FatalError, E: Error>(err: FE) -> Flow<A, FE, E> {
|
pub fn fatal<A, E: Error, FE: FatalError>(err: FE) -> Result<A, E, FE> {
|
||||||
Flow::Fatal(err)
|
Result::Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return early from the current function if the value is a fatal error.
|
/// Return early from the current function if the value is a fatal error.
|
||||||
|
@ -161,9 +161,9 @@ pub fn fatal<A, FE: FatalError, E: Error>(err: FE) -> Flow<A, FE, E> {
|
||||||
macro_rules! return_fatal {
|
macro_rules! return_fatal {
|
||||||
($x:expr) => {
|
($x:expr) => {
|
||||||
match $x {
|
match $x {
|
||||||
Flow::Fatal(err) => return Flow::Fatal(err),
|
Result::Fatal(err) => return Result::Fatal(err),
|
||||||
Flow::Err(err) => Err(err),
|
Result::Err(err) => Err(err),
|
||||||
Flow::Ok(val) => Ok(val),
|
Result::Ok(val) => Ok(val),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -173,9 +173,9 @@ macro_rules! return_fatal {
|
||||||
macro_rules! return_error {
|
macro_rules! return_error {
|
||||||
($x:expr) => {
|
($x:expr) => {
|
||||||
match $x {
|
match $x {
|
||||||
Flow::Ok(val) => val,
|
Result::Ok(val) => val,
|
||||||
Flow::Err(err) => return Flow::Err(err),
|
Result::Err(err) => return Result::Err(err),
|
||||||
Flow::Fatal(err) => return Flow::Fatal(err),
|
Result::Fatal(err) => return Result::Fatal(err),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -210,45 +210,45 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_can_map_things() {
|
fn it_can_map_things() {
|
||||||
let success: Flow<i32, FatalError, Error> = ok(15);
|
let success: Result<i32, Error, FatalError> = ok(15);
|
||||||
assert_eq!(ok(16), success.map(|v| v + 1));
|
assert_eq!(ok(16), success.map(|v| v + 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_can_chain_success() {
|
fn it_can_chain_success() {
|
||||||
let success: Flow<i32, FatalError, Error> = ok(15);
|
let success: Result<i32, Error, FatalError> = ok(15);
|
||||||
assert_eq!(ok(16), success.and_then(|v| ok(v + 1)));
|
assert_eq!(ok(16), success.and_then(|v| ok(v + 1)));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_can_handle_an_error() {
|
fn it_can_handle_an_error() {
|
||||||
let failure: Flow<i32, FatalError, Error> = error(Error::Error);
|
let failure: Result<i32, Error, FatalError> = error(Error::Error);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ok::<i32, FatalError, Error>(16),
|
ok::<i32, Error, FatalError>(16),
|
||||||
failure.or_else(|_| ok(16))
|
failure.or_else(|_| ok(16))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn early_exit_on_fatal() {
|
fn early_exit_on_fatal() {
|
||||||
fn ok_func() -> Flow<i32, FatalError, Error> {
|
fn ok_func() -> Result<i32, Error, FatalError> {
|
||||||
let value = return_fatal!(ok::<i32, FatalError, Error>(15));
|
let value = return_fatal!(ok::<i32, Error, FatalError>(15));
|
||||||
match value {
|
match value {
|
||||||
Ok(_) => ok(14),
|
Ok(_) => ok(14),
|
||||||
Err(err) => error(err),
|
Err(err) => error(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn err_func() -> Flow<i32, FatalError, Error> {
|
fn err_func() -> Result<i32, Error, FatalError> {
|
||||||
let value = return_fatal!(error::<i32, FatalError, Error>(Error::Error));
|
let value = return_fatal!(error::<i32, Error, FatalError>(Error::Error));
|
||||||
match value {
|
match value {
|
||||||
Ok(_) => panic!("shouldn't have gotten here"),
|
Ok(_) => panic!("shouldn't have gotten here"),
|
||||||
Err(_) => ok(0),
|
Err(_) => ok(0),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fatal_func() -> Flow<i32, FatalError, Error> {
|
fn fatal_func() -> Result<i32, Error, FatalError> {
|
||||||
return_fatal!(fatal::<i32, FatalError, Error>(FatalError::FatalError));
|
let _ = return_fatal!(fatal::<i32, Error, FatalError>(FatalError::FatalError));
|
||||||
panic!("failed to bail");
|
panic!("failed to bail");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -259,19 +259,19 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_can_early_exit_on_all_errors() {
|
fn it_can_early_exit_on_all_errors() {
|
||||||
fn ok_func() -> Flow<i32, FatalError, Error> {
|
fn ok_func() -> Result<i32, Error, FatalError> {
|
||||||
let value = return_error!(ok::<i32, FatalError, Error>(15));
|
let value = return_error!(ok::<i32, Error, FatalError>(15));
|
||||||
assert_eq!(value, 15);
|
assert_eq!(value, 15);
|
||||||
ok(14)
|
ok(14)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn err_func() -> Flow<i32, FatalError, Error> {
|
fn err_func() -> Result<i32, Error, FatalError> {
|
||||||
return_error!(error::<i32, FatalError, Error>(Error::Error));
|
return_error!(error::<i32, Error, FatalError>(Error::Error));
|
||||||
panic!("failed to bail");
|
panic!("failed to bail");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fatal_func() -> Flow<i32, FatalError, Error> {
|
fn fatal_func() -> Result<i32, Error, FatalError> {
|
||||||
return_error!(fatal::<i32, FatalError, Error>(FatalError::FatalError));
|
return_error!(fatal::<i32, Error, FatalError>(FatalError::FatalError));
|
||||||
panic!("failed to bail");
|
panic!("failed to bail");
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use chrono::{Datelike, NaiveDate};
|
use chrono::{Datelike, NaiveDate};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::num::ParseIntError;
|
use std::{fmt, num::ParseIntError};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use typeshare::typeshare;
|
use typeshare::typeshare;
|
||||||
|
|
||||||
|
@ -11,9 +11,6 @@ pub enum Error {
|
||||||
|
|
||||||
#[error("Invalid date")]
|
#[error("Invalid date")]
|
||||||
InvalidDate,
|
InvalidDate,
|
||||||
|
|
||||||
#[error("unsupported date format")]
|
|
||||||
Unsupported,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, PartialOrd, Deserialize, Serialize)]
|
#[derive(Clone, Debug, PartialEq, PartialOrd, Deserialize, Serialize)]
|
||||||
|
@ -24,12 +21,12 @@ pub enum Date {
|
||||||
Date(chrono::NaiveDate),
|
Date(chrono::NaiveDate),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Date {
|
impl fmt::Display for Date {
|
||||||
pub fn to_string(&self) -> String {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Date::Year(y) => format!("{}", y),
|
Date::Year(y) => write!(f, "{}", y),
|
||||||
Date::YearMonth(y, m) => format!("{}-{}", y, m),
|
Date::YearMonth(y, m) => write!(f, "{}-{}", y, m),
|
||||||
Date::Date(date) => format!("{}-{}-{}", date.year(), date.month(), date.day()),
|
Date::Date(date) => write!(f, "{}-{}-{}", date.year(), date.month(), date.day()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -71,13 +68,13 @@ impl TryFrom<&str> for Date {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
fn parse_numbers(s: &str) -> Result<Vec<i32>, Error> {
|
fn parse_numbers(s: &str) -> Result<Vec<i32>, Error> {
|
||||||
s.split("-")
|
s.split('-')
|
||||||
.map(|s| s.parse::<i32>().map_err(|err| Error::ParseNumberError(err)))
|
.map(|s| s.parse::<i32>().map_err(Error::ParseNumberError))
|
||||||
.collect::<Result<Vec<i32>, Error>>()
|
.collect::<Result<Vec<i32>, Error>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_date_field(s: &str) -> Result<Vec<Date>, Error> {
|
pub fn parse_date_field(s: &str) -> Result<Vec<Date>, Error> {
|
||||||
let date_elements = s.split(",");
|
let date_elements = s.split(',');
|
||||||
let mut dates = Vec::new();
|
let mut dates = Vec::new();
|
||||||
|
|
||||||
let mut most_recent: Option<Date> = None;
|
let mut most_recent: Option<Date> = None;
|
||||||
|
@ -96,9 +93,9 @@ pub fn parse_date_field(s: &str) -> Result<Vec<Date>, Error> {
|
||||||
None => Date::Year(*v1),
|
None => Date::Year(*v1),
|
||||||
},
|
},
|
||||||
[v1, v2] => Date::YearMonth(*v1, *v2 as u32),
|
[v1, v2] => Date::YearMonth(*v1, *v2 as u32),
|
||||||
[v1, v2, v3, ..] => Date::Date(
|
[v1, v2, v3, ..] => {
|
||||||
NaiveDate::from_ymd_opt(v1.clone(), v2.clone() as u32, v3.clone() as u32).unwrap(),
|
Date::Date(NaiveDate::from_ymd_opt(*v1, *v2 as u32, *v3 as u32).unwrap())
|
||||||
),
|
}
|
||||||
};
|
};
|
||||||
dates.push(new_date.clone());
|
dates.push(new_date.clone());
|
||||||
most_recent = Some(new_date);
|
most_recent = Some(new_date);
|
||||||
|
|
|
@ -95,6 +95,7 @@ impl Deref for Game {
|
||||||
impl TryFrom<Tree> for Game {
|
impl TryFrom<Tree> for Game {
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
|
#[allow(clippy::field_reassign_with_default)]
|
||||||
fn try_from(tree: Tree) -> Result<Self, Self::Error> {
|
fn try_from(tree: Tree) -> Result<Self, Self::Error> {
|
||||||
let board_size = match tree.root.find_prop("SZ") {
|
let board_size = match tree.root.find_prop("SZ") {
|
||||||
Some(prop) => Size::try_from(prop.values[0].as_str())?,
|
Some(prop) => Size::try_from(prop.values[0].as_str())?,
|
||||||
|
@ -131,7 +132,7 @@ impl TryFrom<Tree> for Game {
|
||||||
.root
|
.root
|
||||||
.find_prop("TM")
|
.find_prop("TM")
|
||||||
.and_then(|prop| prop.values[0].parse::<u64>().ok())
|
.and_then(|prop| prop.values[0].parse::<u64>().ok())
|
||||||
.and_then(|seconds| Some(std::time::Duration::from_secs(seconds)));
|
.map(std::time::Duration::from_secs);
|
||||||
|
|
||||||
info.date = tree
|
info.date = tree
|
||||||
.root
|
.root
|
||||||
|
@ -182,7 +183,7 @@ pub enum Rank {
|
||||||
impl TryFrom<&str> for Rank {
|
impl TryFrom<&str> for Rank {
|
||||||
type Error = String;
|
type Error = String;
|
||||||
fn try_from(r: &str) -> Result<Rank, Self::Error> {
|
fn try_from(r: &str) -> Result<Rank, Self::Error> {
|
||||||
let parts = r.split(" ").map(|s| s.to_owned()).collect::<Vec<String>>();
|
let parts = r.split(' ').map(|s| s.to_owned()).collect::<Vec<String>>();
|
||||||
let cnt = parts[0].parse::<u8>().map_err(|err| format!("{:?}", err))?;
|
let cnt = parts[0].parse::<u8>().map_err(|err| format!("{:?}", err))?;
|
||||||
match parts[1].to_ascii_lowercase().as_str() {
|
match parts[1].to_ascii_lowercase().as_str() {
|
||||||
"kyu" => Ok(Rank::Kyu(cnt)),
|
"kyu" => Ok(Rank::Kyu(cnt)),
|
||||||
|
|
Loading…
Reference in New Issue