Compare commits
1 Commits
7e3ee9a5b7
...
0218783f9a
Author | SHA1 | Date |
---|---|---|
Savanni D'Gerinel | 0218783f9a |
|
@ -3,9 +3,3 @@ target
|
||||||
node_modules
|
node_modules
|
||||||
dist
|
dist
|
||||||
result
|
result
|
||||||
*.tgz
|
|
||||||
*.tar.gz
|
|
||||||
file-service/*.sqlite
|
|
||||||
file-service/*.sqlite-shm
|
|
||||||
file-service/*.sqlite-wal
|
|
||||||
file-service/var
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
24
Cargo.toml
24
Cargo.toml
|
@ -1,24 +0,0 @@
|
||||||
[workspace]
|
|
||||||
members = [
|
|
||||||
"changeset",
|
|
||||||
"config",
|
|
||||||
"config-derive",
|
|
||||||
"coordinates",
|
|
||||||
"cyberpunk-splash",
|
|
||||||
"dashboard",
|
|
||||||
"emseries",
|
|
||||||
"file-service",
|
|
||||||
"fluent-ergonomics",
|
|
||||||
"geo-types",
|
|
||||||
"gm-control-panel",
|
|
||||||
"hex-grid",
|
|
||||||
"ifc",
|
|
||||||
"kifu/core",
|
|
||||||
"kifu/gtk",
|
|
||||||
"memorycache",
|
|
||||||
"nom-training",
|
|
||||||
"result-extended",
|
|
||||||
"screenplay",
|
|
||||||
"sgf",
|
|
||||||
"tree",
|
|
||||||
]
|
|
36
Makefile
36
Makefile
|
@ -1,9 +1,39 @@
|
||||||
|
|
||||||
all: test bin
|
changeset-dev:
|
||||||
|
cd changeset && make dev
|
||||||
|
|
||||||
test: kifu-core/test-oneshot sgf/test-oneshot
|
changeset-test:
|
||||||
|
cd changeset && make test
|
||||||
|
|
||||||
bin: kifu-gtk
|
coordinates-dev:
|
||||||
|
cd coordinates && make dev
|
||||||
|
|
||||||
|
coordinates-test:
|
||||||
|
cd coordinates && make test
|
||||||
|
|
||||||
|
emseries-dev:
|
||||||
|
cd emseries && make dev
|
||||||
|
|
||||||
|
emseries-test:
|
||||||
|
cd emseries && make test
|
||||||
|
|
||||||
|
flow-dev:
|
||||||
|
cd flow && make dev
|
||||||
|
|
||||||
|
flow-test:
|
||||||
|
cd flow && make test
|
||||||
|
|
||||||
|
fluent-ergonomics-dev:
|
||||||
|
cd fluent-ergonomics && make dev
|
||||||
|
|
||||||
|
fluent-ergonomics-test:
|
||||||
|
cd fluent-ergonomics && make test
|
||||||
|
|
||||||
|
ifc-dev:
|
||||||
|
cd ifc && make dev
|
||||||
|
|
||||||
|
ifc-test:
|
||||||
|
cd ifc && make test
|
||||||
|
|
||||||
kifu-core/dev:
|
kifu-core/dev:
|
||||||
cd kifu/core && make test
|
cd kifu/core && make test
|
||||||
|
|
72
build.sh
72
build.sh
|
@ -1,72 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
RUST_ALL_TARGETS=(
|
|
||||||
"changeset"
|
|
||||||
"config"
|
|
||||||
"config-derive"
|
|
||||||
"coordinates"
|
|
||||||
"cyberpunk-splash"
|
|
||||||
"dashboard"
|
|
||||||
"emseries"
|
|
||||||
"file-service"
|
|
||||||
"fluent-ergonomics"
|
|
||||||
"geo-types"
|
|
||||||
"gm-control-panel"
|
|
||||||
"hex-grid"
|
|
||||||
"ifc"
|
|
||||||
"kifu-core"
|
|
||||||
"kifu-gtk"
|
|
||||||
"memorycache"
|
|
||||||
"nom-training"
|
|
||||||
"result-extended"
|
|
||||||
"screenplay"
|
|
||||||
"sgf"
|
|
||||||
"tree"
|
|
||||||
)
|
|
||||||
|
|
||||||
build_rust_targets() {
|
|
||||||
local CMD=$1
|
|
||||||
local TARGETS=${@/$CMD}
|
|
||||||
|
|
||||||
for target in $TARGETS; do
|
|
||||||
MODULE=$target CMD=$CMD ./builders/rust.sh
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
build_dist() {
|
|
||||||
local TARGETS=${@/$CMD}
|
|
||||||
|
|
||||||
for target in $TARGETS; do
|
|
||||||
if [ -f $target/dist.sh ]; then
|
|
||||||
build_rust_targets release ${TARGETS[*]}
|
|
||||||
cd $target && ./dist.sh
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
export CARGO=`which cargo`
|
|
||||||
|
|
||||||
if [ -z "${TARGET-}" ]; then
|
|
||||||
TARGET="all"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "${CMD-}" ]; then
|
|
||||||
CMD="test release"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "${CMD}" == "clean" ]; then
|
|
||||||
cargo clean
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
for cmd in $CMD; do
|
|
||||||
if [ "${CMD}" == "dist" ]; then
|
|
||||||
build_dist $TARGET
|
|
||||||
elif [ "${TARGET}" == "all" ]; then
|
|
||||||
build_rust_targets $cmd ${RUST_ALL_TARGETS[*]}
|
|
||||||
else
|
|
||||||
build_rust_targets $cmd $TARGET
|
|
||||||
fi
|
|
||||||
done
|
|
|
@ -1,41 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
if [ ! -z "$MODULE" ]; then
|
|
||||||
MODULE="-p $MODULE"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "${PARAMS-}" ]; then
|
|
||||||
PARAMS=""
|
|
||||||
fi
|
|
||||||
|
|
||||||
case $CMD in
|
|
||||||
build)
|
|
||||||
$CARGO build $MODULE $PARAMS
|
|
||||||
;;
|
|
||||||
lint)
|
|
||||||
$CARGO clippy $MODULE $PARAMS -- -Dwarnings
|
|
||||||
;;
|
|
||||||
test)
|
|
||||||
$CARGO test $MODULE $PARAMS
|
|
||||||
;;
|
|
||||||
run)
|
|
||||||
$CARGO run $MODULE $PARAMS
|
|
||||||
;;
|
|
||||||
release)
|
|
||||||
$CARGO clippy $MODULE $PARAMS -- -Dwarnings
|
|
||||||
$CARGO build --release $MODULE $PARAMS
|
|
||||||
$CARGO test --release $MODULE $PARAMS
|
|
||||||
;;
|
|
||||||
clean)
|
|
||||||
$CARGO clean $MODULE
|
|
||||||
;;
|
|
||||||
"")
|
|
||||||
echo "No command specified. Use build | lint | test | run | release | clean"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "$CMD is unknown. Use build | lint | test | run | release | clean"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ name = "changeset"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
|
license-file = "../COPYING"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
|
||||||
|
dev:
|
||||||
|
cargo watch -x build
|
||||||
|
|
||||||
|
test:
|
||||||
|
cargo watch -x test
|
||||||
|
|
||||||
|
test-once:
|
||||||
|
cargo test
|
|
@ -26,7 +26,7 @@ pub enum Change<Key: Eq + Hash, Value> {
|
||||||
NewRecord(Value),
|
NewRecord(Value),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Changeset<Key: Clone + Eq + Hash, Value> {
|
pub struct Changeset<Key: Clone + Eq + Hash, Value> {
|
||||||
delete: HashSet<Key>,
|
delete: HashSet<Key>,
|
||||||
update: HashMap<Key, Value>,
|
update: HashMap<Key, Value>,
|
||||||
|
@ -34,6 +34,14 @@ pub struct Changeset<Key: Clone + Eq + Hash, Value> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Key: Clone + Constructable + Eq + Hash, Value> Changeset<Key, Value> {
|
impl<Key: Clone + Constructable + Eq + Hash, Value> Changeset<Key, Value> {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
delete: HashSet::new(),
|
||||||
|
update: HashMap::new(),
|
||||||
|
new: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn add(&mut self, r: Value) -> Key {
|
pub fn add(&mut self, r: Value) -> Key {
|
||||||
let k = Key::new();
|
let k = Key::new();
|
||||||
self.new.insert(k.clone(), r);
|
self.new.insert(k.clone(), r);
|
||||||
|
@ -82,7 +90,7 @@ impl<Key: Clone + Eq + Hash, Value> From<Changeset<Key, Value>> for Vec<Change<K
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(k, v)| Change::UpdateRecord((k, v))),
|
.map(|(k, v)| Change::UpdateRecord((k, v))),
|
||||||
)
|
)
|
||||||
.chain(new.into_values().map(|v| Change::NewRecord(v)))
|
.chain(new.into_iter().map(|(_, v)| Change::NewRecord(v)))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -92,7 +100,7 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash, Default)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
struct Id(Uuid);
|
struct Id(Uuid);
|
||||||
impl Constructable for Id {
|
impl Constructable for Id {
|
||||||
fn new() -> Self {
|
fn new() -> Self {
|
||||||
|
@ -102,7 +110,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_generates_a_new_record() {
|
fn it_generates_a_new_record() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::default();
|
let mut set: Changeset<Id, String> = Changeset::new();
|
||||||
set.add("efgh".to_string());
|
set.add("efgh".to_string());
|
||||||
let changes = Vec::from(set.clone());
|
let changes = Vec::from(set.clone());
|
||||||
assert_eq!(changes.len(), 1);
|
assert_eq!(changes.len(), 1);
|
||||||
|
@ -117,7 +125,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_generates_a_delete_record() {
|
fn it_generates_a_delete_record() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::default();
|
let mut set: Changeset<Id, String> = Changeset::new();
|
||||||
let id1 = Id::new();
|
let id1 = Id::new();
|
||||||
set.delete(id1.clone());
|
set.delete(id1.clone());
|
||||||
let changes = Vec::from(set.clone());
|
let changes = Vec::from(set.clone());
|
||||||
|
@ -134,7 +142,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn update_unrelated_records() {
|
fn update_unrelated_records() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::default();
|
let mut set: Changeset<Id, String> = Changeset::new();
|
||||||
let id1 = Id::new();
|
let id1 = Id::new();
|
||||||
let id2 = Id::new();
|
let id2 = Id::new();
|
||||||
set.update(id1.clone(), "abcd".to_owned());
|
set.update(id1.clone(), "abcd".to_owned());
|
||||||
|
@ -147,7 +155,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn delete_cancels_new() {
|
fn delete_cancels_new() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::default();
|
let mut set: Changeset<Id, String> = Changeset::new();
|
||||||
let key = set.add("efgh".to_string());
|
let key = set.add("efgh".to_string());
|
||||||
set.delete(key);
|
set.delete(key);
|
||||||
let changes = Vec::from(set);
|
let changes = Vec::from(set);
|
||||||
|
@ -156,7 +164,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn delete_cancels_update() {
|
fn delete_cancels_update() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::default();
|
let mut set: Changeset<Id, String> = Changeset::new();
|
||||||
let id = Id::new();
|
let id = Id::new();
|
||||||
set.update(id.clone(), "efgh".to_owned());
|
set.update(id.clone(), "efgh".to_owned());
|
||||||
set.delete(id.clone());
|
set.delete(id.clone());
|
||||||
|
@ -167,7 +175,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn update_atop_new_is_new() {
|
fn update_atop_new_is_new() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::default();
|
let mut set: Changeset<Id, String> = Changeset::new();
|
||||||
let key = set.add("efgh".to_owned());
|
let key = set.add("efgh".to_owned());
|
||||||
set.update(key, "wxyz".to_owned());
|
set.update(key, "wxyz".to_owned());
|
||||||
let changes = Vec::from(set);
|
let changes = Vec::from(set);
|
||||||
|
@ -177,7 +185,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn updates_get_squashed() {
|
fn updates_get_squashed() {
|
||||||
let mut set: Changeset<Id, String> = Changeset::default();
|
let mut set: Changeset<Id, String> = Changeset::new();
|
||||||
let id1 = Id::new();
|
let id1 = Id::new();
|
||||||
let id2 = Id::new();
|
let id2 = Id::new();
|
||||||
set.update(id1.clone(), "efgh".to_owned());
|
set.update(id1.clone(), "efgh".to_owned());
|
||||||
|
|
|
@ -1,14 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "config-derive"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
proc-macro = true
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
quote = { version = "1" }
|
|
||||||
syn = { version = "1", features = [ "extra-traits" ] }
|
|
||||||
|
|
|
@ -1,23 +0,0 @@
|
||||||
extern crate proc_macro;
|
|
||||||
|
|
||||||
use proc_macro::TokenStream;
|
|
||||||
use quote::quote;
|
|
||||||
|
|
||||||
use syn::{parse_macro_input, DeriveInput};
|
|
||||||
|
|
||||||
#[proc_macro_derive(ConfigOption)]
|
|
||||||
pub fn derive(input: TokenStream) -> TokenStream {
|
|
||||||
let DeriveInput { ident, .. } = parse_macro_input!(input as DeriveInput);
|
|
||||||
|
|
||||||
let result = quote! {
|
|
||||||
impl From<&Config> for Option<#ident> {
|
|
||||||
fn from(config: &Config) -> Self {
|
|
||||||
match config.values.get(&ConfigName::#ident) {
|
|
||||||
Some(ConfigOption::#ident(val)) => Some(val.clone()),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
result.into()
|
|
||||||
}
|
|
|
@ -1,16 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "config"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
config-derive = { path = "../config-derive" }
|
|
||||||
serde_json = { version = "1" }
|
|
||||||
serde = { version = "1", features = [ "derive" ] }
|
|
||||||
thiserror = { version = "1" }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
cool_asserts = { version = "2" }
|
|
|
@ -1,160 +0,0 @@
|
||||||
/*
|
|
||||||
use std::{
|
|
||||||
collections::HashMap,
|
|
||||||
fs::File,
|
|
||||||
hash::Hash,
|
|
||||||
io::{ErrorKind, Read},
|
|
||||||
path::PathBuf,
|
|
||||||
};
|
|
||||||
*/
|
|
||||||
|
|
||||||
pub use config_derive::ConfigOption;
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum ConfigReadError {
|
|
||||||
#[error("Cannot read the configuration file: {0}")]
|
|
||||||
CannotRead(std::io::Error),
|
|
||||||
#[error("Cannot open the configuration file for reading: {0}")]
|
|
||||||
CannotOpen(std::io::Error),
|
|
||||||
#[error("Invalid json data found in the configurationfile: {0}")]
|
|
||||||
InvalidJSON(serde_json::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! define_config {
|
|
||||||
($($name:ident($struct:ident),)+) => (
|
|
||||||
#[derive(Clone, Debug, Hash, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
|
|
||||||
pub enum ConfigName {
|
|
||||||
$($name),+
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
|
|
||||||
#[serde(untagged)]
|
|
||||||
pub enum ConfigOption {
|
|
||||||
$($name($struct)),+
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Config {
|
|
||||||
values: std::collections::HashMap<ConfigName, ConfigOption>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Config {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
values: std::collections::HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_path(config_path: std::path::PathBuf) -> Result<Self, $crate::ConfigReadError> {
|
|
||||||
let mut settings = config_path.clone();
|
|
||||||
settings.push("config");
|
|
||||||
|
|
||||||
match std::fs::File::open(settings) {
|
|
||||||
Ok(mut file) => {
|
|
||||||
let mut buf = String::new();
|
|
||||||
std::io::Read::read_to_string(&mut file, &mut buf)
|
|
||||||
.map_err(|err| $crate::ConfigReadError::CannotRead(err))?;
|
|
||||||
let values = serde_json::from_str(buf.as_ref())
|
|
||||||
.map_err(|err| $crate::ConfigReadError::InvalidJSON(err))?;
|
|
||||||
Ok(Self {
|
|
||||||
values,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
Err(io_err) => {
|
|
||||||
match io_err.kind() {
|
|
||||||
std::io::ErrorKind::NotFound => {
|
|
||||||
/* create the path and an empty file */
|
|
||||||
Ok(Self {
|
|
||||||
values: std::collections::HashMap::new(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
_ => Err($crate::ConfigReadError::CannotOpen(io_err)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set(&mut self, val: ConfigOption) {
|
|
||||||
let _ = match val {
|
|
||||||
$(ConfigOption::$struct(_) => self.values.insert(ConfigName::$name, val)),+
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get<'a, T>(&'a self) -> Option<T>
|
|
||||||
where
|
|
||||||
Option<T>: From<&'a Self>,
|
|
||||||
{
|
|
||||||
self.into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use cool_asserts::assert_matches;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
define_config! {
|
|
||||||
DatabasePath(DatabasePath),
|
|
||||||
Me(Me),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, ConfigOption)]
|
|
||||||
pub struct DatabasePath(PathBuf);
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
enum Rank {
|
|
||||||
Kyu(i8),
|
|
||||||
Dan(i8),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, ConfigOption)]
|
|
||||||
pub struct Me {
|
|
||||||
name: String,
|
|
||||||
rank: Option<Rank>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_can_set_and_get_options() {
|
|
||||||
let mut config: Config = Config::new();
|
|
||||||
config.set(ConfigOption::DatabasePath(DatabasePath(PathBuf::from(
|
|
||||||
"./fixtures/five_games",
|
|
||||||
))));
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
Some(DatabasePath(PathBuf::from("./fixtures/five_games"))),
|
|
||||||
config.get()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_can_serialize_and_deserialize() {
|
|
||||||
let mut config = Config::new();
|
|
||||||
config.set(ConfigOption::DatabasePath(DatabasePath(PathBuf::from(
|
|
||||||
"fixtures/five_games",
|
|
||||||
))));
|
|
||||||
config.set(ConfigOption::Me(Me {
|
|
||||||
name: "Savanni".to_owned(),
|
|
||||||
rank: Some(Rank::Kyu(10)),
|
|
||||||
}));
|
|
||||||
let s = serde_json::to_string(&config.values).unwrap();
|
|
||||||
println!("{}", s);
|
|
||||||
let values: HashMap<ConfigName, ConfigOption> = serde_json::from_str(s.as_ref()).unwrap();
|
|
||||||
println!("options: {:?}", values);
|
|
||||||
|
|
||||||
assert_matches!(values.get(&ConfigName::DatabasePath),
|
|
||||||
Some(ConfigOption::DatabasePath(ref db_path)) =>
|
|
||||||
assert_eq!(Some(db_path.clone()), config.get())
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_matches!(values.get(&ConfigName::Me), Some(ConfigOption::Me(val)) =>
|
|
||||||
assert_eq!(Some(val.clone()), config.get())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -3,6 +3,7 @@ name = "coordinates"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
|
license-file = "../COPYING"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
|
||||||
|
dev:
|
||||||
|
cargo watch -x build
|
||||||
|
|
||||||
|
test:
|
||||||
|
cargo watch -x test
|
||||||
|
|
||||||
|
test-once:
|
||||||
|
cargo test
|
|
@ -33,12 +33,12 @@ fn main() {
|
||||||
|
|
||||||
let filename = args
|
let filename = args
|
||||||
.next()
|
.next()
|
||||||
.map(PathBuf::from)
|
.map(|p| PathBuf::from(p))
|
||||||
.expect("A filename is required");
|
.expect("A filename is required");
|
||||||
let size = args
|
let size = args
|
||||||
.next()
|
.next()
|
||||||
.and_then(|s| s.parse::<usize>().ok())
|
.and_then(|s| s.parse::<usize>().ok())
|
||||||
.unwrap_or(3);
|
.unwrap_or(3);
|
||||||
let map: hex_map::Map<MapVal> = hex_map::Map::new_hexagonal(size);
|
let map: hex_map::Map<MapVal> = hex_map::Map::new_hexagonal(size);
|
||||||
hex_map::write_file(filename, map).expect("to write file");
|
hex_map::write_file(filename, map);
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,9 +10,10 @@ Luminescent Dreams Tools is distributed in the hope that it will be useful, but
|
||||||
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/// This module contains the elements of cube coordinates.
|
/// Ĉi-tiu modulo enhavas la elementojn por kub-koordinato.
|
||||||
///
|
///
|
||||||
/// This code is based on https://www.redblobgames.com/grids/hexagons/
|
/// This code is based on https://www.redblobgames.com/grids/hexagons/
|
||||||
|
use crate::Error;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
/// An address within the hex coordinate system
|
/// An address within the hex coordinate system
|
||||||
|
@ -61,7 +62,7 @@ impl AxialAddr {
|
||||||
pub fn is_adjacent(&self, dest: &AxialAddr) -> bool {
|
pub fn is_adjacent(&self, dest: &AxialAddr) -> bool {
|
||||||
dest.adjacencies()
|
dest.adjacencies()
|
||||||
.collect::<Vec<AxialAddr>>()
|
.collect::<Vec<AxialAddr>>()
|
||||||
.contains(self)
|
.contains(&self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Measure the distance to a destination
|
/// Measure the distance to a destination
|
||||||
|
@ -78,7 +79,7 @@ impl AxialAddr {
|
||||||
|
|
||||||
positions.push(item);
|
positions.push(item);
|
||||||
|
|
||||||
while !positions.is_empty() {
|
while positions.len() > 0 {
|
||||||
let elem = positions.remove(0);
|
let elem = positions.remove(0);
|
||||||
for adj in elem.adjacencies() {
|
for adj in elem.adjacencies() {
|
||||||
if self.distance(&adj) <= distance && !results.contains(&adj) {
|
if self.distance(&adj) <= distance && !results.contains(&adj) {
|
||||||
|
|
|
@ -14,6 +14,7 @@ use crate::{hex::AxialAddr, Error};
|
||||||
use nom::{
|
use nom::{
|
||||||
bytes::complete::tag,
|
bytes::complete::tag,
|
||||||
character::complete::alphanumeric1,
|
character::complete::alphanumeric1,
|
||||||
|
error::ParseError,
|
||||||
multi::many1,
|
multi::many1,
|
||||||
sequence::{delimited, separated_pair},
|
sequence::{delimited, separated_pair},
|
||||||
Finish, IResult, Parser,
|
Finish, IResult, Parser,
|
||||||
|
@ -80,7 +81,7 @@ pub fn parse_data<'a, A: Default + From<String>>(
|
||||||
}
|
}
|
||||||
|
|
||||||
let cells = data
|
let cells = data
|
||||||
.map(|line| parse_line::<A>(line).unwrap())
|
.map(|line| parse_line::<A>(&line).unwrap())
|
||||||
.collect::<Vec<(AxialAddr, A)>>();
|
.collect::<Vec<(AxialAddr, A)>>();
|
||||||
let cells = cells.into_iter().collect::<HashMap<AxialAddr, A>>();
|
let cells = cells.into_iter().collect::<HashMap<AxialAddr, A>>();
|
||||||
Map { cells }
|
Map { cells }
|
||||||
|
|
|
@ -9,9 +9,9 @@ Lumeto is distributed in the hope that it will be useful, but WITHOUT ANY WARRAN
|
||||||
|
|
||||||
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
use thiserror::Error;
|
use thiserror;
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error("IO error on reading or writing: {0}")]
|
#[error("IO error on reading or writing: {0}")]
|
||||||
IO(std::io::Error),
|
IO(std::io::Error),
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -2,7 +2,6 @@
|
||||||
name = "cyberpunk-splash"
|
name = "cyberpunk-splash"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "GPL-3.0-only"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@ use cairo::{
|
||||||
Context, FontSlant, FontWeight, Format, ImageSurface, LineCap, LinearGradient, Pattern,
|
Context, FontSlant, FontWeight, Format, ImageSurface, LineCap, LinearGradient, Pattern,
|
||||||
TextExtents,
|
TextExtents,
|
||||||
};
|
};
|
||||||
use glib::Object;
|
use glib::{GString, Object};
|
||||||
use gtk::{prelude::*, subclass::prelude::*, EventControllerKey};
|
use gtk::{gdk::Key, prelude::*, subclass::prelude::*, EventControllerKey};
|
||||||
use std::{
|
use std::{
|
||||||
cell::RefCell,
|
cell::RefCell,
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
|
@ -14,6 +14,12 @@ use std::{
|
||||||
const WIDTH: i32 = 1600;
|
const WIDTH: i32 = 1600;
|
||||||
const HEIGHT: i32 = 600;
|
const HEIGHT: i32 = 600;
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug)]
|
||||||
|
enum Event {
|
||||||
|
Frames(u8),
|
||||||
|
Time(Duration),
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
pub enum State {
|
pub enum State {
|
||||||
Running {
|
Running {
|
||||||
|
@ -44,7 +50,7 @@ impl State {
|
||||||
*self = Self::Running {
|
*self = Self::Running {
|
||||||
last_update: Instant::now(),
|
last_update: Instant::now(),
|
||||||
deadline: Instant::now() + *time_remaining,
|
deadline: Instant::now() + *time_remaining,
|
||||||
timeout: *timeout,
|
timeout: timeout.clone(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -56,7 +62,7 @@ impl State {
|
||||||
{
|
{
|
||||||
*self = Self::Paused {
|
*self = Self::Paused {
|
||||||
time_remaining: *deadline - Instant::now(),
|
time_remaining: *deadline - Instant::now(),
|
||||||
timeout: *timeout,
|
timeout: timeout.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -102,13 +108,13 @@ impl TimeoutAnimation {
|
||||||
fn tick(&mut self, frames_elapsed: u8) {
|
fn tick(&mut self, frames_elapsed: u8) {
|
||||||
let step_size = 1. / (self.duration * 60.);
|
let step_size = 1. / (self.duration * 60.);
|
||||||
if self.ascending {
|
if self.ascending {
|
||||||
self.intensity += step_size * frames_elapsed as f64;
|
self.intensity = self.intensity + step_size * frames_elapsed as f64;
|
||||||
if self.intensity > 1. {
|
if self.intensity > 1. {
|
||||||
self.intensity = 1.0;
|
self.intensity = 1.0;
|
||||||
self.ascending = false;
|
self.ascending = false;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.intensity -= step_size * frames_elapsed as f64;
|
self.intensity = self.intensity - step_size * frames_elapsed as f64;
|
||||||
if self.intensity < 0. {
|
if self.intensity < 0. {
|
||||||
self.intensity = 0.0;
|
self.intensity = 0.0;
|
||||||
self.ascending = true;
|
self.ascending = true;
|
||||||
|
@ -137,14 +143,6 @@ impl SplashPrivate {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn redraw_background(&self) {
|
fn redraw_background(&self) {
|
||||||
let pen = GlowPen::new(
|
|
||||||
*self.width.borrow(),
|
|
||||||
*self.height.borrow(),
|
|
||||||
2.,
|
|
||||||
8.,
|
|
||||||
(0.7, 0., 1.),
|
|
||||||
);
|
|
||||||
|
|
||||||
let background =
|
let background =
|
||||||
ImageSurface::create(Format::Rgb24, *self.width.borrow(), *self.height.borrow())
|
ImageSurface::create(Format::Rgb24, *self.width.borrow(), *self.height.borrow())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -154,43 +152,6 @@ impl SplashPrivate {
|
||||||
let _ = context.paint();
|
let _ = context.paint();
|
||||||
|
|
||||||
context.select_font_face("Alegreya Sans SC", FontSlant::Normal, FontWeight::Bold);
|
context.select_font_face("Alegreya Sans SC", FontSlant::Normal, FontWeight::Bold);
|
||||||
|
|
||||||
{
|
|
||||||
context.set_source_rgb(0.7, 0., 1.);
|
|
||||||
|
|
||||||
let hashtag = "#CodingTogether";
|
|
||||||
context.set_font_size(64.);
|
|
||||||
let extents = context.text_extents(hashtag).unwrap();
|
|
||||||
|
|
||||||
context.move_to(20., extents.height() + 40.);
|
|
||||||
let _ = context.show_text(hashtag);
|
|
||||||
|
|
||||||
AsymLine {
|
|
||||||
orientation: gtk::Orientation::Horizontal,
|
|
||||||
start_x: 10.,
|
|
||||||
start_y: extents.height() + 10.,
|
|
||||||
start_length: 0.,
|
|
||||||
height: extents.height() / 2.,
|
|
||||||
total_length: extents.width() + extents.height() / 2.,
|
|
||||||
invert: false,
|
|
||||||
}
|
|
||||||
.draw(&pen);
|
|
||||||
pen.stroke();
|
|
||||||
|
|
||||||
AsymLine {
|
|
||||||
orientation: gtk::Orientation::Horizontal,
|
|
||||||
start_x: 20.,
|
|
||||||
start_y: extents.height() + 60.,
|
|
||||||
start_length: extents.width(),
|
|
||||||
height: extents.height() / 2.,
|
|
||||||
total_length: extents.width() + extents.height() / 2.,
|
|
||||||
invert: false,
|
|
||||||
}
|
|
||||||
.draw(&pen);
|
|
||||||
pen.stroke();
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
context.set_font_size(128.);
|
context.set_font_size(128.);
|
||||||
|
|
||||||
let center_x = *self.width.borrow() as f64 / 2.;
|
let center_x = *self.width.borrow() as f64 / 2.;
|
||||||
|
@ -214,8 +175,11 @@ impl SplashPrivate {
|
||||||
invert: false,
|
invert: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
title_cutout.draw(&pen);
|
context.set_line_cap(LineCap::Round);
|
||||||
pen.stroke();
|
context.set_source_rgb(0.7, 0., 1.);
|
||||||
|
context.set_line_width(2.);
|
||||||
|
title_cutout.draw(&context);
|
||||||
|
let _ = context.stroke();
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -234,9 +198,9 @@ impl SplashPrivate {
|
||||||
let _ = context.set_source(gradient);
|
let _ = context.set_source(gradient);
|
||||||
let _ = context.show_text(&self.text.borrow());
|
let _ = context.show_text(&self.text.borrow());
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
{
|
{
|
||||||
|
context.set_source_rgb(0.7, 0., 1.);
|
||||||
AsymLine {
|
AsymLine {
|
||||||
orientation: gtk::Orientation::Horizontal,
|
orientation: gtk::Orientation::Horizontal,
|
||||||
start_x: 100.,
|
start_x: 100.,
|
||||||
|
@ -246,8 +210,8 @@ impl SplashPrivate {
|
||||||
total_length: 650.,
|
total_length: 650.,
|
||||||
invert: true,
|
invert: true,
|
||||||
}
|
}
|
||||||
.draw(&pen);
|
.draw(&context);
|
||||||
pen.stroke();
|
let _ = context.stroke();
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -261,14 +225,10 @@ impl SplashPrivate {
|
||||||
total_length: 650.,
|
total_length: 650.,
|
||||||
invert: false,
|
invert: false,
|
||||||
}
|
}
|
||||||
.draw(&pen);
|
.draw(&context);
|
||||||
pen.stroke();
|
let _ = context.stroke();
|
||||||
}
|
}
|
||||||
|
|
||||||
let tracery = pen.finish();
|
|
||||||
let _ = context.set_source(tracery);
|
|
||||||
let _ = context.paint();
|
|
||||||
|
|
||||||
let background = context.pop_group().unwrap();
|
let background = context.pop_group().unwrap();
|
||||||
|
|
||||||
*self.background.borrow_mut() = background;
|
*self.background.borrow_mut() = background;
|
||||||
|
@ -326,7 +286,7 @@ impl Splash {
|
||||||
let _ = context.set_source(&*background);
|
let _ = context.set_source(&*background);
|
||||||
let _ = context.paint();
|
let _ = context.paint();
|
||||||
|
|
||||||
let state = *s.imp().state.borrow();
|
let state = s.imp().state.borrow().clone();
|
||||||
|
|
||||||
let time = match state {
|
let time = match state {
|
||||||
State::Running { deadline, .. } => deadline - Instant::now(),
|
State::Running { deadline, .. } => deadline - Instant::now(),
|
||||||
|
@ -352,7 +312,7 @@ impl Splash {
|
||||||
|
|
||||||
let mut saved_extents = s.imp().time_extents.borrow_mut();
|
let mut saved_extents = s.imp().time_extents.borrow_mut();
|
||||||
if saved_extents.is_none() {
|
if saved_extents.is_none() {
|
||||||
*saved_extents = Some(time_extents);
|
*saved_extents = Some(time_extents.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let time_baseline_x = center_x - time_extents.width() / 2.;
|
let time_baseline_x = center_x - time_extents.width() / 2.;
|
||||||
|
@ -365,8 +325,8 @@ impl Splash {
|
||||||
time_baseline_y,
|
time_baseline_y,
|
||||||
);
|
);
|
||||||
let (running, timeout_animation) = match state {
|
let (running, timeout_animation) = match state {
|
||||||
State::Running { timeout, .. } => (true, timeout),
|
State::Running { timeout, .. } => (true, timeout.clone()),
|
||||||
State::Paused { timeout, .. } => (false, timeout),
|
State::Paused { timeout, .. } => (false, timeout.clone()),
|
||||||
};
|
};
|
||||||
match timeout_animation {
|
match timeout_animation {
|
||||||
Some(ref animation) => {
|
Some(ref animation) => {
|
||||||
|
@ -388,7 +348,8 @@ impl Splash {
|
||||||
let _ = context.show_text(&time);
|
let _ = context.show_text(&time);
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(extents) = *s.imp().time_extents.borrow() {
|
match *s.imp().time_extents.borrow() {
|
||||||
|
Some(extents) => {
|
||||||
context.set_source_rgb(0.7, 0.0, 1.0);
|
context.set_source_rgb(0.7, 0.0, 1.0);
|
||||||
let time_meter = SlashMeter {
|
let time_meter = SlashMeter {
|
||||||
orientation: gtk::Orientation::Horizontal,
|
orientation: gtk::Orientation::Horizontal,
|
||||||
|
@ -399,7 +360,9 @@ impl Splash {
|
||||||
height: 60.,
|
height: 60.,
|
||||||
length: 100.,
|
length: 100.,
|
||||||
};
|
};
|
||||||
time_meter.draw(context);
|
time_meter.draw(&context);
|
||||||
|
}
|
||||||
|
None => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -431,7 +394,7 @@ struct AsymLineCutout {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsymLineCutout {
|
impl AsymLineCutout {
|
||||||
fn draw(&self, pen: &impl Pen) {
|
fn draw(&self, context: &Context) {
|
||||||
let dodge = if self.invert {
|
let dodge = if self.invert {
|
||||||
self.height
|
self.height
|
||||||
} else {
|
} else {
|
||||||
|
@ -439,17 +402,17 @@ impl AsymLineCutout {
|
||||||
};
|
};
|
||||||
match self.orientation {
|
match self.orientation {
|
||||||
gtk::Orientation::Horizontal => {
|
gtk::Orientation::Horizontal => {
|
||||||
pen.move_to(self.start_x, self.start_y);
|
context.move_to(self.start_x, self.start_y);
|
||||||
pen.line_to(self.start_x + self.start_length, self.start_y);
|
context.line_to(self.start_x + self.start_length, self.start_y);
|
||||||
pen.line_to(
|
context.line_to(
|
||||||
self.start_x + self.start_length + self.height,
|
self.start_x + self.start_length + self.height,
|
||||||
self.start_y + dodge,
|
self.start_y + dodge,
|
||||||
);
|
);
|
||||||
pen.line_to(
|
context.line_to(
|
||||||
self.start_x + self.start_length + self.height + self.cutout_length,
|
self.start_x + self.start_length + self.height + self.cutout_length,
|
||||||
self.start_y + dodge,
|
self.start_y + dodge,
|
||||||
);
|
);
|
||||||
pen.line_to(
|
context.line_to(
|
||||||
self.start_x
|
self.start_x
|
||||||
+ self.start_length
|
+ self.start_length
|
||||||
+ self.height
|
+ self.height
|
||||||
|
@ -457,20 +420,20 @@ impl AsymLineCutout {
|
||||||
+ (self.height / 2.),
|
+ (self.height / 2.),
|
||||||
self.start_y + dodge / 2.,
|
self.start_y + dodge / 2.,
|
||||||
);
|
);
|
||||||
pen.line_to(self.total_length, self.start_y + dodge / 2.);
|
context.line_to(self.total_length, self.start_y + dodge / 2.);
|
||||||
}
|
}
|
||||||
gtk::Orientation::Vertical => {
|
gtk::Orientation::Vertical => {
|
||||||
pen.move_to(self.start_x, self.start_y);
|
context.move_to(self.start_x, self.start_y);
|
||||||
pen.line_to(self.start_x, self.start_y + self.start_length);
|
context.line_to(self.start_x, self.start_y + self.start_length);
|
||||||
pen.line_to(
|
context.line_to(
|
||||||
self.start_x + dodge,
|
self.start_x + dodge,
|
||||||
self.start_y + self.start_length + self.height,
|
self.start_y + self.start_length + self.height,
|
||||||
);
|
);
|
||||||
pen.line_to(
|
context.line_to(
|
||||||
self.start_x + dodge,
|
self.start_x + dodge,
|
||||||
self.start_y + self.start_length + self.height + self.cutout_length,
|
self.start_y + self.start_length + self.height + self.cutout_length,
|
||||||
);
|
);
|
||||||
pen.line_to(
|
context.line_to(
|
||||||
self.start_x + dodge / 2.,
|
self.start_x + dodge / 2.,
|
||||||
self.start_y
|
self.start_y
|
||||||
+ self.start_length
|
+ self.start_length
|
||||||
|
@ -478,7 +441,7 @@ impl AsymLineCutout {
|
||||||
+ self.cutout_length
|
+ self.cutout_length
|
||||||
+ (self.height / 2.),
|
+ (self.height / 2.),
|
||||||
);
|
);
|
||||||
pen.line_to(self.start_x + dodge / 2., self.total_length);
|
context.line_to(self.start_x + dodge / 2., self.total_length);
|
||||||
}
|
}
|
||||||
_ => panic!("unknown orientation"),
|
_ => panic!("unknown orientation"),
|
||||||
}
|
}
|
||||||
|
@ -496,7 +459,7 @@ struct AsymLine {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsymLine {
|
impl AsymLine {
|
||||||
fn draw(&self, pen: &impl Pen) {
|
fn draw(&self, context: &Context) {
|
||||||
let dodge = if self.invert {
|
let dodge = if self.invert {
|
||||||
self.height
|
self.height
|
||||||
} else {
|
} else {
|
||||||
|
@ -504,13 +467,13 @@ impl AsymLine {
|
||||||
};
|
};
|
||||||
match self.orientation {
|
match self.orientation {
|
||||||
gtk::Orientation::Horizontal => {
|
gtk::Orientation::Horizontal => {
|
||||||
pen.move_to(self.start_x, self.start_y);
|
context.move_to(self.start_x, self.start_y);
|
||||||
pen.line_to(self.start_x + self.start_length, self.start_y);
|
context.line_to(self.start_x + self.start_length, self.start_y);
|
||||||
pen.line_to(
|
context.line_to(
|
||||||
self.start_x + self.start_length + self.height,
|
self.start_x + self.start_length + self.height,
|
||||||
self.start_y + dodge,
|
self.start_y + dodge,
|
||||||
);
|
);
|
||||||
pen.line_to(self.start_x + self.total_length, self.start_y + dodge);
|
context.line_to(self.start_x + self.total_length, self.start_y + dodge);
|
||||||
}
|
}
|
||||||
gtk::Orientation::Vertical => {}
|
gtk::Orientation::Vertical => {}
|
||||||
_ => panic!("unknown orientation"),
|
_ => panic!("unknown orientation"),
|
||||||
|
@ -518,6 +481,36 @@ impl AsymLine {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct RoundedRectangle {
|
||||||
|
x: f64,
|
||||||
|
y: f64,
|
||||||
|
width: f64,
|
||||||
|
height: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RoundedRectangle {
|
||||||
|
fn draw(&self, context: &Context) {
|
||||||
|
context.arc(
|
||||||
|
self.x,
|
||||||
|
self.y - self.height / 2.,
|
||||||
|
self.height / 2.,
|
||||||
|
0.5 * std::f64::consts::PI,
|
||||||
|
1.5 * std::f64::consts::PI,
|
||||||
|
);
|
||||||
|
let _ = context.fill();
|
||||||
|
context.arc(
|
||||||
|
self.x + self.width,
|
||||||
|
self.y - self.height / 2.,
|
||||||
|
self.height / 2.,
|
||||||
|
1.5 * std::f64::consts::PI,
|
||||||
|
0.5 * std::f64::consts::PI,
|
||||||
|
);
|
||||||
|
let _ = context.fill();
|
||||||
|
context.rectangle(self.x, self.y, self.width, -self.height);
|
||||||
|
let _ = context.fill();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
struct SlashMeter {
|
struct SlashMeter {
|
||||||
orientation: gtk::Orientation,
|
orientation: gtk::Orientation,
|
||||||
start_x: f64,
|
start_x: f64,
|
||||||
|
@ -534,7 +527,7 @@ impl SlashMeter {
|
||||||
gtk::Orientation::Horizontal => {
|
gtk::Orientation::Horizontal => {
|
||||||
let angle: f64 = 0.8;
|
let angle: f64 = 0.8;
|
||||||
let run = self.height / angle.tan();
|
let run = self.height / angle.tan();
|
||||||
let width = self.length / (self.count as f64 * 2.);
|
let width = self.length as f64 / (self.count as f64 * 2.);
|
||||||
|
|
||||||
for c in 0..self.count {
|
for c in 0..self.count {
|
||||||
context.set_line_width(1.);
|
context.set_line_width(1.);
|
||||||
|
@ -558,74 +551,6 @@ impl SlashMeter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
trait Pen {
|
|
||||||
fn move_to(&self, x: f64, y: f64);
|
|
||||||
fn line_to(&self, x: f64, y: f64);
|
|
||||||
fn stroke(&self);
|
|
||||||
|
|
||||||
fn finish(self) -> Pattern;
|
|
||||||
}
|
|
||||||
|
|
||||||
struct GlowPen {
|
|
||||||
blur_context: Context,
|
|
||||||
draw_context: Context,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GlowPen {
|
|
||||||
fn new(
|
|
||||||
width: i32,
|
|
||||||
height: i32,
|
|
||||||
line_width: f64,
|
|
||||||
blur_line_width: f64,
|
|
||||||
color: (f64, f64, f64),
|
|
||||||
) -> Self {
|
|
||||||
let blur_context =
|
|
||||||
Context::new(ImageSurface::create(Format::Rgb24, width, height).unwrap()).unwrap();
|
|
||||||
blur_context.set_line_width(blur_line_width);
|
|
||||||
blur_context.set_source_rgba(color.0, color.1, color.2, 0.5);
|
|
||||||
blur_context.push_group();
|
|
||||||
blur_context.set_line_cap(LineCap::Round);
|
|
||||||
|
|
||||||
let draw_context =
|
|
||||||
Context::new(ImageSurface::create(Format::Rgb24, width, height).unwrap()).unwrap();
|
|
||||||
draw_context.set_line_width(line_width);
|
|
||||||
draw_context.set_source_rgb(color.0, color.1, color.2);
|
|
||||||
draw_context.push_group();
|
|
||||||
draw_context.set_line_cap(LineCap::Round);
|
|
||||||
|
|
||||||
Self {
|
|
||||||
blur_context,
|
|
||||||
draw_context,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Pen for GlowPen {
|
|
||||||
fn move_to(&self, x: f64, y: f64) {
|
|
||||||
self.blur_context.move_to(x, y);
|
|
||||||
self.draw_context.move_to(x, y);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn line_to(&self, x: f64, y: f64) {
|
|
||||||
self.blur_context.line_to(x, y);
|
|
||||||
self.draw_context.line_to(x, y);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn stroke(&self) {
|
|
||||||
self.blur_context.stroke().expect("to draw the blur line");
|
|
||||||
self.draw_context
|
|
||||||
.stroke()
|
|
||||||
.expect("to draw the regular line");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn finish(self) -> Pattern {
|
|
||||||
let foreground = self.draw_context.pop_group().unwrap();
|
|
||||||
self.blur_context.set_source(foreground).unwrap();
|
|
||||||
self.blur_context.paint().unwrap();
|
|
||||||
self.blur_context.pop_group().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let app = gtk::Application::builder()
|
let app = gtk::Application::builder()
|
||||||
.application_id("com.luminescent-dreams.cyberpunk-splash")
|
.application_id("com.luminescent-dreams.cyberpunk-splash")
|
||||||
|
@ -665,7 +590,7 @@ fn main() {
|
||||||
let countdown = match options.lookup::<String>("countdown") {
|
let countdown = match options.lookup::<String>("countdown") {
|
||||||
Ok(Some(countdown_str)) => {
|
Ok(Some(countdown_str)) => {
|
||||||
let parts = countdown_str.split(':').collect::<Vec<&str>>();
|
let parts = countdown_str.split(':').collect::<Vec<&str>>();
|
||||||
match parts.len() {
|
let duration = match parts.len() {
|
||||||
2 => {
|
2 => {
|
||||||
let minutes = parts[0].parse::<u64>().unwrap();
|
let minutes = parts[0].parse::<u64>().unwrap();
|
||||||
let seconds = parts[1].parse::<u64>().unwrap();
|
let seconds = parts[1].parse::<u64>().unwrap();
|
||||||
|
@ -676,7 +601,8 @@ fn main() {
|
||||||
Duration::from_secs(seconds)
|
Duration::from_secs(seconds)
|
||||||
}
|
}
|
||||||
_ => Duration::from_secs(300),
|
_ => Duration::from_secs(300),
|
||||||
}
|
};
|
||||||
|
duration
|
||||||
}
|
}
|
||||||
_ => Duration::from_secs(300),
|
_ => Duration::from_secs(300),
|
||||||
};
|
};
|
||||||
|
@ -708,7 +634,7 @@ fn main() {
|
||||||
let window = gtk::ApplicationWindow::new(app);
|
let window = gtk::ApplicationWindow::new(app);
|
||||||
window.present();
|
window.present();
|
||||||
|
|
||||||
let splash = Splash::new(title.read().unwrap().clone(), *state.read().unwrap());
|
let splash = Splash::new(title.read().unwrap().clone(), state.read().unwrap().clone());
|
||||||
|
|
||||||
window.set_child(Some(&splash));
|
window.set_child(Some(&splash));
|
||||||
|
|
||||||
|
@ -746,7 +672,7 @@ fn main() {
|
||||||
loop {
|
loop {
|
||||||
std::thread::sleep(Duration::from_millis(1000 / 60));
|
std::thread::sleep(Duration::from_millis(1000 / 60));
|
||||||
state.write().unwrap().run(Instant::now());
|
state.write().unwrap().run(Instant::now());
|
||||||
let _ = gtk_tx.send(*state.read().unwrap());
|
let _ = gtk_tx.send(state.read().unwrap().clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,32 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "dashboard"
|
|
||||||
version = "0.1.1"
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
adw = { version = "0.4", package = "libadwaita", features = [ "v1_2" ] }
|
|
||||||
cairo-rs = { version = "0.17" }
|
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
|
||||||
fluent-ergonomics = { path = "../fluent-ergonomics/" }
|
|
||||||
fluent = { version = "0.16" }
|
|
||||||
futures = { version = "0.3" }
|
|
||||||
geo-types = { path = "../geo-types/" }
|
|
||||||
gio = { version = "0.17" }
|
|
||||||
glib = { version = "0.17" }
|
|
||||||
gdk = { version = "0.6", package = "gdk4" }
|
|
||||||
gtk = { version = "0.6", package = "gtk4" }
|
|
||||||
ifc = { path = "../ifc/" }
|
|
||||||
lazy_static = { version = "1.4" }
|
|
||||||
memorycache = { path = "../memorycache/" }
|
|
||||||
reqwest = { version = "0.11", features = ["json"] }
|
|
||||||
serde_derive = { version = "1" }
|
|
||||||
serde_json = { version = "1" }
|
|
||||||
serde = { version = "1" }
|
|
||||||
tokio = { version = "1", features = ["full"] }
|
|
||||||
unic-langid = { version = "0.9" }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
glib-build-tools = "0.16"
|
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
fn main() {
|
|
||||||
glib_build_tools::compile_resources(
|
|
||||||
"resources",
|
|
||||||
"resources/gresources.xml",
|
|
||||||
"com.luminescent-dreams.dashboard.gresource",
|
|
||||||
);
|
|
||||||
}
|
|
|
@ -1,6 +0,0 @@
|
||||||
[Desktop Entry]
|
|
||||||
Type=Application
|
|
||||||
Version=1.0
|
|
||||||
Name=dashboard
|
|
||||||
Comment=My personal system dashboard
|
|
||||||
Exec=dashboard
|
|
|
@ -1,12 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
VERSION=`cat Cargo.toml | grep "^version =" | sed -r 's/^version = "(.+)"$/\1/'`
|
|
||||||
|
|
||||||
mkdir -p dist
|
|
||||||
cp dashboard.desktop dist
|
|
||||||
cp ../target/release/dashboard dist
|
|
||||||
strip dist/dashboard
|
|
||||||
tar -czf dashboard-${VERSION}.tgz dist/
|
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<gresources>
|
|
||||||
<gresource prefix="/com/luminescent-dreams/dashboard/">
|
|
||||||
<file>style.css</file>
|
|
||||||
</gresource>
|
|
||||||
</gresources>
|
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
label {
|
|
||||||
font-size: 200%;
|
|
||||||
padding: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.highlight {
|
|
||||||
color: @accent_fg_color;
|
|
||||||
background-color: @accent_bg_color;
|
|
||||||
}
|
|
|
@ -1,74 +0,0 @@
|
||||||
use crate::{
|
|
||||||
components::{Date, Events, TransitCard, TransitClock},
|
|
||||||
types::State,
|
|
||||||
};
|
|
||||||
use adw::prelude::AdwApplicationWindowExt;
|
|
||||||
use gio::resources_lookup_data;
|
|
||||||
use gtk::{prelude::*, STYLE_PROVIDER_PRIORITY_USER};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct ApplicationWindow {
|
|
||||||
pub window: adw::ApplicationWindow,
|
|
||||||
pub date_label: Date,
|
|
||||||
pub events: Events,
|
|
||||||
pub transit_card: TransitCard,
|
|
||||||
pub transit_clock: TransitClock,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ApplicationWindow {
|
|
||||||
pub fn new(app: &adw::Application) -> Self {
|
|
||||||
let window = adw::ApplicationWindow::new(app);
|
|
||||||
|
|
||||||
let stylesheet = String::from_utf8(
|
|
||||||
resources_lookup_data(
|
|
||||||
"/com/luminescent-dreams/dashboard/style.css",
|
|
||||||
gio::ResourceLookupFlags::NONE,
|
|
||||||
)
|
|
||||||
.expect("stylesheet should just be available")
|
|
||||||
.to_vec(),
|
|
||||||
)
|
|
||||||
.expect("to parse stylesheet");
|
|
||||||
|
|
||||||
let provider = gtk::CssProvider::new();
|
|
||||||
provider.load_from_data(&stylesheet);
|
|
||||||
let context = window.style_context();
|
|
||||||
context.add_provider(&provider, STYLE_PROVIDER_PRIORITY_USER);
|
|
||||||
|
|
||||||
let layout = gtk::Box::builder()
|
|
||||||
.orientation(gtk::Orientation::Vertical)
|
|
||||||
.hexpand(true)
|
|
||||||
.vexpand(true)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
let date_label = Date::default();
|
|
||||||
layout.append(&date_label);
|
|
||||||
|
|
||||||
let events = Events::default();
|
|
||||||
layout.append(&events);
|
|
||||||
|
|
||||||
let transit_card = TransitCard::default();
|
|
||||||
layout.append(&transit_card);
|
|
||||||
|
|
||||||
let transit_clock = TransitClock::default();
|
|
||||||
layout.append(&transit_clock);
|
|
||||||
|
|
||||||
window.set_content(Some(&layout));
|
|
||||||
|
|
||||||
Self {
|
|
||||||
window,
|
|
||||||
date_label,
|
|
||||||
events,
|
|
||||||
transit_card,
|
|
||||||
transit_clock,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_state(&self, state: State) {
|
|
||||||
self.date_label.update_date(state.date);
|
|
||||||
self.events.set_events(state.events, state.next_event);
|
|
||||||
if let Some(transit) = state.transit {
|
|
||||||
self.transit_card.update_transit(&transit);
|
|
||||||
self.transit_clock.update_transit(transit);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,69 +0,0 @@
|
||||||
use chrono::Datelike;
|
|
||||||
use glib::Object;
|
|
||||||
use gtk::{prelude::*, subclass::prelude::*};
|
|
||||||
use ifc::IFC;
|
|
||||||
use std::{cell::RefCell, rc::Rc};
|
|
||||||
|
|
||||||
pub struct DatePrivate {
|
|
||||||
date: Rc<RefCell<IFC>>,
|
|
||||||
label: Rc<RefCell<gtk::Label>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for DatePrivate {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
date: Rc::new(RefCell::new(IFC::from(
|
|
||||||
chrono::Local::now().date_naive().with_year(12023).unwrap(),
|
|
||||||
))),
|
|
||||||
label: Rc::new(RefCell::new(gtk::Label::new(None))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[glib::object_subclass]
|
|
||||||
impl ObjectSubclass for DatePrivate {
|
|
||||||
const NAME: &'static str = "Date";
|
|
||||||
type Type = Date;
|
|
||||||
type ParentType = gtk::Box;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ObjectImpl for DatePrivate {}
|
|
||||||
impl WidgetImpl for DatePrivate {}
|
|
||||||
impl BoxImpl for DatePrivate {}
|
|
||||||
|
|
||||||
glib::wrapper! {
|
|
||||||
pub struct Date(ObjectSubclass<DatePrivate>) @extends gtk::Box, gtk::Widget;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Date {
|
|
||||||
fn default() -> Self {
|
|
||||||
let s: Self = Object::builder().build();
|
|
||||||
s.set_margin_bottom(8);
|
|
||||||
s.set_margin_top(8);
|
|
||||||
s.set_margin_start(8);
|
|
||||||
s.set_margin_end(8);
|
|
||||||
|
|
||||||
s.append(&*s.imp().label.borrow());
|
|
||||||
|
|
||||||
s.redraw();
|
|
||||||
s
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Date {
|
|
||||||
pub fn update_date(&self, date: IFC) {
|
|
||||||
*self.imp().date.borrow_mut() = date;
|
|
||||||
self.redraw();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn redraw(&self) {
|
|
||||||
let date = self.imp().date.borrow().clone();
|
|
||||||
self.imp().label.borrow_mut().set_text(&format!(
|
|
||||||
"{:?}, {:?} {}, {}",
|
|
||||||
date.weekday(),
|
|
||||||
date.month(),
|
|
||||||
date.day(),
|
|
||||||
date.year()
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,96 +0,0 @@
|
||||||
use crate::{
|
|
||||||
components::Date,
|
|
||||||
solstices::{self, YearlyEvents},
|
|
||||||
};
|
|
||||||
use glib::Object;
|
|
||||||
use gtk::{prelude::*, subclass::prelude::*};
|
|
||||||
use ifc::IFC;
|
|
||||||
|
|
||||||
/*
|
|
||||||
#[derive(PartialEq)]
|
|
||||||
pub enum UpcomingEvent {
|
|
||||||
SpringEquinox,
|
|
||||||
SummerSolstice,
|
|
||||||
AutumnEquinox,
|
|
||||||
WinterSolstice,
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct EventsPrivate {
|
|
||||||
spring_equinox: Date,
|
|
||||||
summer_solstice: Date,
|
|
||||||
autumn_equinox: Date,
|
|
||||||
winter_solstice: Date,
|
|
||||||
// next: UpcomingEvent,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[glib::object_subclass]
|
|
||||||
impl ObjectSubclass for EventsPrivate {
|
|
||||||
const NAME: &'static str = "Events";
|
|
||||||
type Type = Events;
|
|
||||||
type ParentType = gtk::Box;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ObjectImpl for EventsPrivate {}
|
|
||||||
impl WidgetImpl for EventsPrivate {}
|
|
||||||
impl BoxImpl for EventsPrivate {}
|
|
||||||
|
|
||||||
glib::wrapper! {
|
|
||||||
pub struct Events(ObjectSubclass<EventsPrivate>) @extends gtk::Widget, gtk::Box, @implements gtk::Orientable;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Events {
|
|
||||||
fn default() -> Self {
|
|
||||||
let s: Self = Object::builder().build();
|
|
||||||
s.set_orientation(gtk::Orientation::Horizontal);
|
|
||||||
s.set_spacing(8);
|
|
||||||
|
|
||||||
s.append(&s.imp().spring_equinox);
|
|
||||||
s.append(&s.imp().summer_solstice);
|
|
||||||
s.append(&s.imp().autumn_equinox);
|
|
||||||
s.append(&s.imp().winter_solstice);
|
|
||||||
|
|
||||||
s
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Events {
|
|
||||||
pub fn set_events(&self, events: YearlyEvents, next_event: solstices::Event) {
|
|
||||||
self.imp()
|
|
||||||
.spring_equinox
|
|
||||||
.update_date(IFC::from(events.spring_equinox.date_naive()));
|
|
||||||
|
|
||||||
self.imp()
|
|
||||||
.summer_solstice
|
|
||||||
.update_date(IFC::from(events.summer_solstice.date_naive()));
|
|
||||||
|
|
||||||
self.imp()
|
|
||||||
.autumn_equinox
|
|
||||||
.update_date(IFC::from(events.autumn_equinox.date_naive()));
|
|
||||||
|
|
||||||
self.imp()
|
|
||||||
.winter_solstice
|
|
||||||
.update_date(IFC::from(events.winter_solstice.date_naive()));
|
|
||||||
|
|
||||||
self.imp().spring_equinox.remove_css_class("highlight");
|
|
||||||
self.imp().summer_solstice.remove_css_class("highlight");
|
|
||||||
self.imp().autumn_equinox.remove_css_class("highlight");
|
|
||||||
self.imp().winter_solstice.remove_css_class("highlight");
|
|
||||||
|
|
||||||
match next_event {
|
|
||||||
solstices::Event::SpringEquinox(_) => {
|
|
||||||
self.imp().spring_equinox.add_css_class("highlight")
|
|
||||||
}
|
|
||||||
solstices::Event::SummerSolstice(_) => {
|
|
||||||
self.imp().summer_solstice.add_css_class("highlight")
|
|
||||||
}
|
|
||||||
solstices::Event::AutumnEquinox(_) => {
|
|
||||||
self.imp().autumn_equinox.add_css_class("highlight")
|
|
||||||
}
|
|
||||||
solstices::Event::WinterSolstice(_) => {
|
|
||||||
self.imp().winter_solstice.add_css_class("highlight")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,57 +0,0 @@
|
||||||
use glib::Object;
|
|
||||||
use gtk::{prelude::*, subclass::prelude::*};
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct LabelPrivate {
|
|
||||||
label: gtk::Label,
|
|
||||||
icon: gtk::Image,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[glib::object_subclass]
|
|
||||||
impl ObjectSubclass for LabelPrivate {
|
|
||||||
const NAME: &'static str = "Label";
|
|
||||||
type Type = Label;
|
|
||||||
type ParentType = gtk::Box;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ObjectImpl for LabelPrivate {}
|
|
||||||
impl WidgetImpl for LabelPrivate {}
|
|
||||||
impl BoxImpl for LabelPrivate {}
|
|
||||||
|
|
||||||
glib::wrapper! {
|
|
||||||
pub struct Label(ObjectSubclass<LabelPrivate>) @extends gtk::Box, gtk::Widget,
|
|
||||||
@implements gtk::Orientable;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Label {
|
|
||||||
pub fn new(text: Option<&str>, icon: Option<gio::ThemedIcon>) -> Self {
|
|
||||||
let s: Self = Object::builder().build();
|
|
||||||
s.set_orientation(gtk::Orientation::Horizontal);
|
|
||||||
s.set_spacing(8);
|
|
||||||
s.set_margin_bottom(8);
|
|
||||||
s.set_margin_top(8);
|
|
||||||
s.set_margin_start(8);
|
|
||||||
s.set_margin_end(8);
|
|
||||||
|
|
||||||
s.append(&s.imp().icon);
|
|
||||||
s.append(&s.imp().label);
|
|
||||||
|
|
||||||
if let Some(text) = text {
|
|
||||||
s.set_text(text);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(icon) = icon {
|
|
||||||
s.set_icon(icon);
|
|
||||||
}
|
|
||||||
|
|
||||||
s
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_text(&self, text: &str) {
|
|
||||||
self.imp().label.set_text(text);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_icon(&self, icon: gio::ThemedIcon) {
|
|
||||||
self.imp().icon.set_from_gicon(&icon);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
mod date;
|
|
||||||
pub use date::Date;
|
|
||||||
|
|
||||||
mod events;
|
|
||||||
pub use events::Events;
|
|
||||||
|
|
||||||
mod label;
|
|
||||||
pub use label::Label;
|
|
||||||
|
|
||||||
mod transit_card;
|
|
||||||
pub use transit_card::TransitCard;
|
|
||||||
|
|
||||||
mod transit_clock;
|
|
||||||
pub use transit_clock::TransitClock;
|
|
|
@ -1,74 +0,0 @@
|
||||||
use crate::{components::Label, soluna_client::SunMoon};
|
|
||||||
use glib::Object;
|
|
||||||
use gtk::{prelude::*, subclass::prelude::*};
|
|
||||||
|
|
||||||
pub struct TransitCardPrivate {
|
|
||||||
sunrise: Label,
|
|
||||||
sunset: Label,
|
|
||||||
moonrise: Label,
|
|
||||||
moonset: Label,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for TransitCardPrivate {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
sunrise: Label::new(None, Some(gio::ThemedIcon::new("daytime-sunrise-symbolic"))),
|
|
||||||
sunset: Label::new(None, Some(gio::ThemedIcon::new("daytime-sunset-symbolic"))),
|
|
||||||
moonrise: Label::new(None, Some(gio::ThemedIcon::new("moon-outline-symbolic"))),
|
|
||||||
moonset: Label::new(None, Some(gio::ThemedIcon::new("moon-outline-symbolic"))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[glib::object_subclass]
|
|
||||||
impl ObjectSubclass for TransitCardPrivate {
|
|
||||||
const NAME: &'static str = "TransitCard";
|
|
||||||
type Type = TransitCard;
|
|
||||||
type ParentType = gtk::Grid;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ObjectImpl for TransitCardPrivate {}
|
|
||||||
impl WidgetImpl for TransitCardPrivate {}
|
|
||||||
impl GridImpl for TransitCardPrivate {}
|
|
||||||
|
|
||||||
glib::wrapper! {
|
|
||||||
pub struct TransitCard(ObjectSubclass<TransitCardPrivate>) @extends gtk::Grid, gtk::Widget;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for TransitCard {
|
|
||||||
fn default() -> Self {
|
|
||||||
let s: Self = Object::builder().build();
|
|
||||||
s.add_css_class("card");
|
|
||||||
s.set_column_homogeneous(true);
|
|
||||||
|
|
||||||
s.attach(&s.imp().sunrise, 0, 0, 1, 1);
|
|
||||||
s.attach(&s.imp().sunset, 0, 1, 1, 1);
|
|
||||||
s.attach(&s.imp().moonrise, 1, 0, 1, 1);
|
|
||||||
s.attach(&s.imp().moonset, 1, 1, 1, 1);
|
|
||||||
|
|
||||||
s
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TransitCard {
|
|
||||||
pub fn update_transit(&self, transit_info: &SunMoon) {
|
|
||||||
self.imp()
|
|
||||||
.sunrise
|
|
||||||
.set_text(format!("{}", transit_info.sunrise.format("%H:%M")).as_ref());
|
|
||||||
self.imp()
|
|
||||||
.sunset
|
|
||||||
.set_text(format!("{}", transit_info.sunset.format("%H:%M")).as_ref());
|
|
||||||
self.imp().moonrise.set_text(
|
|
||||||
&transit_info
|
|
||||||
.moonrise
|
|
||||||
.map(|time| format!("{}", time.format("%H:%M")))
|
|
||||||
.unwrap_or("".to_owned()),
|
|
||||||
);
|
|
||||||
self.imp().moonset.set_text(
|
|
||||||
&transit_info
|
|
||||||
.moonset
|
|
||||||
.map(|time| format!("{}", time.format("%H:%M")))
|
|
||||||
.unwrap_or("".to_owned()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,103 +0,0 @@
|
||||||
use crate::{
|
|
||||||
drawing::{Color, PieChart, Wedge},
|
|
||||||
soluna_client::SunMoon,
|
|
||||||
};
|
|
||||||
use chrono::{Duration, NaiveTime};
|
|
||||||
use glib::Object;
|
|
||||||
use gtk::{prelude::*, subclass::prelude::*};
|
|
||||||
use std::{cell::RefCell, f64::consts::PI, rc::Rc};
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct TransitClockPrivate {
|
|
||||||
info: Rc<RefCell<Option<SunMoon>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[glib::object_subclass]
|
|
||||||
impl ObjectSubclass for TransitClockPrivate {
|
|
||||||
const NAME: &'static str = "TransitClock";
|
|
||||||
type Type = TransitClock;
|
|
||||||
type ParentType = gtk::DrawingArea;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ObjectImpl for TransitClockPrivate {}
|
|
||||||
impl WidgetImpl for TransitClockPrivate {}
|
|
||||||
impl DrawingAreaImpl for TransitClockPrivate {}
|
|
||||||
|
|
||||||
glib::wrapper! {
|
|
||||||
pub struct TransitClock(ObjectSubclass<TransitClockPrivate>) @extends gtk::DrawingArea, gtk::Widget;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for TransitClock {
|
|
||||||
fn default() -> Self {
|
|
||||||
let s: Self = Object::builder().build();
|
|
||||||
s.set_width_request(500);
|
|
||||||
s.set_height_request(500);
|
|
||||||
|
|
||||||
s.set_draw_func({
|
|
||||||
let s = s.clone();
|
|
||||||
move |_, context, width, height| {
|
|
||||||
let style_context = WidgetExt::style_context(&s);
|
|
||||||
let center_x = width as f64 / 2.;
|
|
||||||
let center_y = height as f64 / 2.;
|
|
||||||
let radius = width.min(height) as f64 / 2. * 0.9;
|
|
||||||
if let Some(ref info) = *s.imp().info.borrow() {
|
|
||||||
let full_day = Duration::days(1).num_seconds() as f64;
|
|
||||||
let sunrise = info.sunrise - NaiveTime::from_hms_opt(0, 0, 0).unwrap();
|
|
||||||
let sunset = info.sunset - NaiveTime::from_hms_opt(0, 0, 0).unwrap();
|
|
||||||
|
|
||||||
let night_color = style_context.lookup_color("dark_5").unwrap();
|
|
||||||
let day_color = style_context.lookup_color("blue_1").unwrap();
|
|
||||||
|
|
||||||
PieChart::new(&style_context)
|
|
||||||
.center(center_x, center_y)
|
|
||||||
.radius(radius)
|
|
||||||
.rotation(-PI / 2.)
|
|
||||||
.wedges(
|
|
||||||
vec![
|
|
||||||
Wedge {
|
|
||||||
start_angle: (PI * 2.) * sunset.num_seconds() as f64 / full_day,
|
|
||||||
end_angle: (PI * 2.) * sunrise.num_seconds() as f64 / full_day,
|
|
||||||
color: Color {
|
|
||||||
r: night_color.red() as f64,
|
|
||||||
g: night_color.green() as f64,
|
|
||||||
b: night_color.blue() as f64,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Wedge {
|
|
||||||
start_angle: (PI * 2.) * sunrise.num_seconds() as f64
|
|
||||||
/ full_day,
|
|
||||||
end_angle: (PI * 2.) * sunset.num_seconds() as f64 / full_day,
|
|
||||||
color: Color {
|
|
||||||
r: day_color.red() as f64,
|
|
||||||
g: day_color.green() as f64,
|
|
||||||
b: day_color.blue() as f64,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
.into_iter(),
|
|
||||||
)
|
|
||||||
.draw(context);
|
|
||||||
|
|
||||||
(0..24).for_each(|tick| {
|
|
||||||
context.set_source_rgb(0., 0., 0.);
|
|
||||||
context.translate(center_x, center_y);
|
|
||||||
context.rotate(tick as f64 * (PI / 12.));
|
|
||||||
context.move_to(radius - 5., 0.);
|
|
||||||
context.line_to(radius - 10., 0.);
|
|
||||||
let _ = context.stroke();
|
|
||||||
context.identity_matrix();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
s
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TransitClock {
|
|
||||||
pub fn update_transit(&self, transit_info: SunMoon) {
|
|
||||||
*self.imp().info.borrow_mut() = Some(transit_info);
|
|
||||||
self.queue_draw();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,2 +0,0 @@
|
||||||
mod pie_chart;
|
|
||||||
pub use pie_chart::{Color, PieChart, Wedge};
|
|
|
@ -1,96 +0,0 @@
|
||||||
use cairo::Context;
|
|
||||||
use gtk::{gdk::RGBA, prelude::*, StyleContext};
|
|
||||||
use std::f64::consts::PI;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Color {
|
|
||||||
pub r: f64,
|
|
||||||
pub g: f64,
|
|
||||||
pub b: f64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Wedge {
|
|
||||||
pub start_angle: f64,
|
|
||||||
pub end_angle: f64,
|
|
||||||
pub color: Color,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct PieChart {
|
|
||||||
rotation: f64,
|
|
||||||
wedges: Vec<Wedge>,
|
|
||||||
center_x: f64,
|
|
||||||
center_y: f64,
|
|
||||||
radius: f64,
|
|
||||||
|
|
||||||
border_color: RGBA,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PieChart {
|
|
||||||
pub fn new(style_context: &StyleContext) -> Self {
|
|
||||||
Self {
|
|
||||||
rotation: 0.,
|
|
||||||
wedges: vec![],
|
|
||||||
center_x: 0.,
|
|
||||||
center_y: 0.,
|
|
||||||
radius: 0.,
|
|
||||||
border_color: style_context.lookup_color("theme_fg_color").unwrap(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn rotation(mut self, rotation: f64) -> Self {
|
|
||||||
self.rotation = rotation;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn wedges(mut self, wedge: impl Iterator<Item = Wedge>) -> Self {
|
|
||||||
let mut wedges: Vec<Wedge> = wedge.collect();
|
|
||||||
self.wedges.append(&mut wedges);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn center(mut self, center_x: f64, center_y: f64) -> Self {
|
|
||||||
self.center_x = center_x;
|
|
||||||
self.center_y = center_y;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn radius(mut self, radius: f64) -> Self {
|
|
||||||
self.radius = radius;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn draw(self, context: &Context) {
|
|
||||||
context.set_source_rgba(0., 0., 0., 0.);
|
|
||||||
let _ = context.paint();
|
|
||||||
|
|
||||||
context.set_line_width(2.);
|
|
||||||
|
|
||||||
self.wedges.iter().for_each(
|
|
||||||
|Wedge {
|
|
||||||
start_angle,
|
|
||||||
end_angle,
|
|
||||||
color,
|
|
||||||
}| {
|
|
||||||
context.move_to(self.center_x, self.center_y);
|
|
||||||
context.set_source_rgb(color.r, color.g, color.b);
|
|
||||||
context.arc(
|
|
||||||
self.center_x,
|
|
||||||
self.center_y,
|
|
||||||
self.radius,
|
|
||||||
start_angle + self.rotation,
|
|
||||||
end_angle + self.rotation,
|
|
||||||
);
|
|
||||||
let _ = context.fill();
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
context.set_source_rgb(
|
|
||||||
self.border_color.red() as f64,
|
|
||||||
self.border_color.green() as f64,
|
|
||||||
self.border_color.blue() as f64,
|
|
||||||
);
|
|
||||||
context.arc(self.center_x, self.center_y, self.radius, 0., 2. * PI);
|
|
||||||
let _ = context.stroke();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,144 +0,0 @@
|
||||||
use chrono::{Datelike, Local, Utc};
|
|
||||||
use geo_types::{Latitude, Longitude};
|
|
||||||
use glib::Sender;
|
|
||||||
use gtk::prelude::*;
|
|
||||||
use ifc::IFC;
|
|
||||||
use std::{
|
|
||||||
env,
|
|
||||||
sync::{Arc, RwLock},
|
|
||||||
};
|
|
||||||
|
|
||||||
mod app_window;
|
|
||||||
use app_window::ApplicationWindow;
|
|
||||||
|
|
||||||
mod components;
|
|
||||||
|
|
||||||
mod drawing;
|
|
||||||
|
|
||||||
mod soluna_client;
|
|
||||||
use soluna_client::SolunaClient;
|
|
||||||
|
|
||||||
mod solstices;
|
|
||||||
use solstices::EVENTS;
|
|
||||||
|
|
||||||
mod types;
|
|
||||||
use types::State;
|
|
||||||
|
|
||||||
/*
|
|
||||||
const EO_TEXT: &'static str = "
|
|
||||||
day = {$day ->
|
|
||||||
*[Sunday] Dimanĉo
|
|
||||||
[Monday] Lundo
|
|
||||||
[Tuesday] Mardo
|
|
||||||
[Wednesday] Merkredo
|
|
||||||
[Thursday] Ĵaŭdo
|
|
||||||
[Friday] Vendredo
|
|
||||||
[Saturday] Sabato
|
|
||||||
[LeapDay] Leap Day
|
|
||||||
[YearDay] Year Day
|
|
||||||
}
|
|
||||||
month = {$month ->
|
|
||||||
*[January] Januaro
|
|
||||||
[February] Februaro
|
|
||||||
[March] Marto
|
|
||||||
[April] Aprilo
|
|
||||||
[May] Mayo
|
|
||||||
[June] Junio
|
|
||||||
[Sol] Solo
|
|
||||||
[July] Julio
|
|
||||||
[August] Aŭgusto
|
|
||||||
[September] Septembro
|
|
||||||
[October] Oktobro
|
|
||||||
[November] Novembro
|
|
||||||
[December] Decembro
|
|
||||||
}
|
|
||||||
spring_equinox = Printempa Ekvinokso
|
|
||||||
summer_solstice = Somera Solstico
|
|
||||||
autumn_equinox = Aŭtuna Ekvinokso
|
|
||||||
winter_solstice = Vintra Solstico
|
|
||||||
";
|
|
||||||
*/
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub enum Message {
|
|
||||||
Refresh(State),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Core {
|
|
||||||
tx: Arc<RwLock<Option<Sender<Message>>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn main() {
|
|
||||||
gio::resources_register_include!("com.luminescent-dreams.dashboard.gresource")
|
|
||||||
.expect("Failed to register resources");
|
|
||||||
|
|
||||||
let app = adw::Application::builder()
|
|
||||||
.application_id("com.luminescent-dreams.dashboard")
|
|
||||||
.resource_base_path("/com/luminescent-dreams/dashboard")
|
|
||||||
.build();
|
|
||||||
|
|
||||||
let latitude = Latitude::from(41.78);
|
|
||||||
let longitude = Longitude::from(-71.41);
|
|
||||||
|
|
||||||
let runtime = tokio::runtime::Builder::new_multi_thread()
|
|
||||||
.enable_all()
|
|
||||||
.build()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let core = Core {
|
|
||||||
tx: Arc::new(RwLock::new(None)),
|
|
||||||
};
|
|
||||||
|
|
||||||
runtime.spawn({
|
|
||||||
let core = core.clone();
|
|
||||||
async move {
|
|
||||||
let soluna_client = SolunaClient::new();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let transit = soluna_client
|
|
||||||
.request(latitude.clone(), longitude.clone(), Local::now())
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let now = Local::now();
|
|
||||||
let state = State {
|
|
||||||
date: IFC::from(now.date_naive().with_year(12023).unwrap()),
|
|
||||||
next_event: EVENTS.next_event(now.with_timezone(&Utc)).unwrap(),
|
|
||||||
events: EVENTS.yearly_events(now.year()).unwrap(),
|
|
||||||
transit: Some(transit),
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(ref gtk_tx) = *core.tx.read().unwrap() {
|
|
||||||
let _ = gtk_tx.send(Message::Refresh(state.clone()));
|
|
||||||
std::thread::sleep(std::time::Duration::from_secs(60));
|
|
||||||
} else {
|
|
||||||
std::thread::sleep(std::time::Duration::from_secs(1));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
app.connect_activate(move |app| {
|
|
||||||
let (gtk_tx, gtk_rx) =
|
|
||||||
gtk::glib::MainContext::channel::<Message>(gtk::glib::PRIORITY_DEFAULT);
|
|
||||||
|
|
||||||
*core.tx.write().unwrap() = Some(gtk_tx);
|
|
||||||
|
|
||||||
let window = ApplicationWindow::new(app);
|
|
||||||
window.window.present();
|
|
||||||
|
|
||||||
gtk_rx.attach(None, {
|
|
||||||
let window = window.clone();
|
|
||||||
move |msg| {
|
|
||||||
let Message::Refresh(state) = msg;
|
|
||||||
ApplicationWindow::update_state(&window, state);
|
|
||||||
|
|
||||||
Continue(true)
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
let args: Vec<String> = env::args().collect();
|
|
||||||
ApplicationExtManual::run_with_args(&app, &args);
|
|
||||||
runtime.shutdown_background();
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
use svg::{
|
|
||||||
node::element::{Circle, Image},
|
|
||||||
Document,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn moon() -> Document {
|
|
||||||
/*
|
|
||||||
svg(width="100%", height="100%", xmlns="http://www.w3.org/2000/svg") {
|
|
||||||
circle(cx="50", cy="50", r="50", stroke="green", fill="none");
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
let img = Image::new().set("href", "/moon-small.png");
|
|
||||||
Document::new().add(img)
|
|
||||||
}
|
|
|
@ -1,201 +0,0 @@
|
||||||
use chrono::prelude::*;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use serde_derive::{Deserialize, Serialize};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
// http://astropixels.com/ephemeris/soleq2001.html
|
|
||||||
const SOLSTICE_TEXT: &str = "
|
|
||||||
2001 Mar 20 13:31 Jun 21 07:38 Sep 22 23:05 Dec 21 19:22
|
|
||||||
2002 Mar 20 19:16 Jun 21 13:25 Sep 23 04:56 Dec 22 01:15
|
|
||||||
2003 Mar 21 01:00 Jun 21 19:11 Sep 23 10:47 Dec 22 07:04
|
|
||||||
2004 Mar 20 06:49 Jun 21 00:57 Sep 22 16:30 Dec 21 12:42
|
|
||||||
2005 Mar 20 12:34 Jun 21 06:46 Sep 22 22:23 Dec 21 18:35
|
|
||||||
2006 Mar 20 18:25 Jun 21 12:26 Sep 23 04:04 Dec 22 00:22
|
|
||||||
2007 Mar 21 00:07 Jun 21 18:06 Sep 23 09:51 Dec 22 06:08
|
|
||||||
2008 Mar 20 05:49 Jun 21 00:00 Sep 22 15:45 Dec 21 12:04
|
|
||||||
2009 Mar 20 11:44 Jun 21 05:45 Sep 22 21:18 Dec 21 17:47
|
|
||||||
2010 Mar 20 17:32 Jun 21 11:28 Sep 23 03:09 Dec 21 23:38
|
|
||||||
|
|
||||||
2011 Mar 20 23:21 Jun 21 17:16 Sep 23 09:05 Dec 22 05:30
|
|
||||||
2012 Mar 20 05:15 Jun 20 23:08 Sep 22 14:49 Dec 21 11:12
|
|
||||||
2013 Mar 20 11:02 Jun 21 05:04 Sep 22 20:44 Dec 21 17:11
|
|
||||||
2014 Mar 20 16:57 Jun 21 10:52 Sep 23 02:30 Dec 21 23:03
|
|
||||||
2015 Mar 20 22:45 Jun 21 16:38 Sep 23 08:20 Dec 22 04:48
|
|
||||||
2016 Mar 20 04:31 Jun 20 22:35 Sep 22 14:21 Dec 21 10:45
|
|
||||||
2017 Mar 20 10:29 Jun 21 04:25 Sep 22 20:02 Dec 21 16:29
|
|
||||||
2018 Mar 20 16:15 Jun 21 10:07 Sep 23 01:54 Dec 21 22:22
|
|
||||||
2019 Mar 20 21:58 Jun 21 15:54 Sep 23 07:50 Dec 22 04:19
|
|
||||||
2020 Mar 20 03:50 Jun 20 21:43 Sep 22 13:31 Dec 21 10:03
|
|
||||||
|
|
||||||
2021 Mar 20 09:37 Jun 21 03:32 Sep 22 19:21 Dec 21 15:59
|
|
||||||
2022 Mar 20 15:33 Jun 21 09:14 Sep 23 01:04 Dec 21 21:48
|
|
||||||
2023 Mar 20 21:25 Jun 21 14:58 Sep 23 06:50 Dec 22 03:28
|
|
||||||
2024 Mar 20 03:07 Jun 20 20:51 Sep 22 12:44 Dec 21 09:20
|
|
||||||
2025 Mar 20 09:02 Jun 21 02:42 Sep 22 18:20 Dec 21 15:03
|
|
||||||
2026 Mar 20 14:46 Jun 21 08:25 Sep 23 00:06 Dec 21 20:50
|
|
||||||
2027 Mar 20 20:25 Jun 21 14:11 Sep 23 06:02 Dec 22 02:43
|
|
||||||
2028 Mar 20 02:17 Jun 20 20:02 Sep 22 11:45 Dec 21 08:20
|
|
||||||
2029 Mar 20 08:01 Jun 21 01:48 Sep 22 17:37 Dec 21 14:14
|
|
||||||
2030 Mar 20 13:51 Jun 21 07:31 Sep 22 23:27 Dec 21 20:09
|
|
||||||
|
|
||||||
2031 Mar 20 19:41 Jun 21 13:17 Sep 23 05:15 Dec 22 01:56
|
|
||||||
2032 Mar 20 01:23 Jun 20 19:09 Sep 22 11:11 Dec 21 07:57
|
|
||||||
2033 Mar 20 07:23 Jun 21 01:01 Sep 22 16:52 Dec 21 13:45
|
|
||||||
2034 Mar 20 13:18 Jun 21 06:45 Sep 22 22:41 Dec 21 19:35
|
|
||||||
2035 Mar 20 19:03 Jun 21 12:33 Sep 23 04:39 Dec 22 01:31
|
|
||||||
2036 Mar 20 01:02 Jun 20 18:31 Sep 22 10:23 Dec 21 07:12
|
|
||||||
2037 Mar 20 06:50 Jun 21 00:22 Sep 22 16:13 Dec 21 13:08
|
|
||||||
2038 Mar 20 12:40 Jun 21 06:09 Sep 22 22:02 Dec 21 19:01
|
|
||||||
2039 Mar 20 18:32 Jun 21 11:58 Sep 23 03:50 Dec 22 00:41
|
|
||||||
2040 Mar 20 00:11 Jun 20 17:46 Sep 22 09:44 Dec 21 06:33
|
|
||||||
|
|
||||||
2041 Mar 20 06:07 Jun 20 23:37 Sep 22 15:27 Dec 21 12:19
|
|
||||||
2042 Mar 20 11:53 Jun 21 05:16 Sep 22 21:11 Dec 21 18:04
|
|
||||||
2043 Mar 20 17:29 Jun 21 10:59 Sep 23 03:07 Dec 22 00:02
|
|
||||||
2044 Mar 19 23:20 Jun 20 16:50 Sep 22 08:47 Dec 21 05:43
|
|
||||||
2045 Mar 20 05:08 Jun 20 22:34 Sep 22 14:33 Dec 21 11:36
|
|
||||||
2046 Mar 20 10:58 Jun 21 04:15 Sep 22 20:22 Dec 21 17:28
|
|
||||||
2047 Mar 20 16:52 Jun 21 10:02 Sep 23 02:07 Dec 21 23:07
|
|
||||||
2048 Mar 19 22:34 Jun 20 15:54 Sep 22 08:01 Dec 21 05:02
|
|
||||||
2049 Mar 20 04:28 Jun 20 21:47 Sep 22 13:42 Dec 21 10:51
|
|
||||||
2050 Mar 20 10:20 Jun 21 03:33 Sep 22 19:29 Dec 21 16:39
|
|
||||||
";
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
|
|
||||||
pub struct YearlyEvents {
|
|
||||||
pub year: i32,
|
|
||||||
pub spring_equinox: chrono::DateTime<chrono::Utc>,
|
|
||||||
pub summer_solstice: chrono::DateTime<chrono::Utc>,
|
|
||||||
pub autumn_equinox: chrono::DateTime<chrono::Utc>,
|
|
||||||
pub winter_solstice: chrono::DateTime<chrono::Utc>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
|
|
||||||
pub enum Event {
|
|
||||||
SpringEquinox(chrono::DateTime<chrono::Utc>),
|
|
||||||
SummerSolstice(chrono::DateTime<chrono::Utc>),
|
|
||||||
AutumnEquinox(chrono::DateTime<chrono::Utc>),
|
|
||||||
WinterSolstice(chrono::DateTime<chrono::Utc>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Event {
|
|
||||||
pub fn date(&self) -> chrono::DateTime<chrono::Utc> {
|
|
||||||
match *self {
|
|
||||||
Event::SpringEquinox(d) => d,
|
|
||||||
Event::SummerSolstice(d) => d,
|
|
||||||
Event::AutumnEquinox(d) => d,
|
|
||||||
Event::WinterSolstice(d) => d,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_time<'a>(
|
|
||||||
year: &str,
|
|
||||||
iter: impl Iterator<Item = &'a str>,
|
|
||||||
) -> chrono::DateTime<chrono::Utc> {
|
|
||||||
let parts = iter.collect::<Vec<&str>>();
|
|
||||||
let p = format!("{} {} {} {}", year, parts[0], parts[1], parts[2]);
|
|
||||||
NaiveDateTime::parse_from_str(&p, "%Y %b %d %H:%M")
|
|
||||||
.unwrap()
|
|
||||||
.and_utc()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_line(year: &str, rest: &[&str]) -> YearlyEvents {
|
|
||||||
let spring = parse_time(year, rest.iter().take(3).cloned());
|
|
||||||
let summer = parse_time(year, rest.iter().skip(3).take(3).cloned());
|
|
||||||
let autumn = parse_time(year, rest.iter().skip(6).take(3).cloned());
|
|
||||||
let winter = parse_time(year, rest.iter().skip(9).take(3).cloned());
|
|
||||||
YearlyEvents {
|
|
||||||
year: year.parse::<i32>().unwrap(),
|
|
||||||
spring_equinox: spring,
|
|
||||||
summer_solstice: summer,
|
|
||||||
autumn_equinox: autumn,
|
|
||||||
winter_solstice: winter,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_events() -> Vec<Option<YearlyEvents>> {
|
|
||||||
SOLSTICE_TEXT
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
match line
|
|
||||||
.split(' ')
|
|
||||||
.filter(|elem| !elem.is_empty())
|
|
||||||
.collect::<Vec<&str>>()
|
|
||||||
.as_slice()
|
|
||||||
{
|
|
||||||
[year, rest @ ..] => Some(parse_line(year, rest)),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Solstices(HashMap<i32, YearlyEvents>);
|
|
||||||
|
|
||||||
impl Solstices {
|
|
||||||
pub fn yearly_events(&self, year: i32) -> Option<YearlyEvents> {
|
|
||||||
self.0.get(&year).copied()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn next_event(&self, date: chrono::DateTime<chrono::Utc>) -> Option<Event> {
|
|
||||||
let year_events = self.0.get(&date.year());
|
|
||||||
match year_events {
|
|
||||||
Some(year_events) => {
|
|
||||||
if date <= year_events.spring_equinox {
|
|
||||||
Some(Event::SpringEquinox(year_events.spring_equinox))
|
|
||||||
} else if date <= year_events.summer_solstice {
|
|
||||||
Some(Event::SummerSolstice(year_events.summer_solstice))
|
|
||||||
} else if date <= year_events.autumn_equinox {
|
|
||||||
Some(Event::AutumnEquinox(year_events.autumn_equinox))
|
|
||||||
} else if date <= year_events.winter_solstice {
|
|
||||||
Some(Event::WinterSolstice(year_events.winter_solstice))
|
|
||||||
} else {
|
|
||||||
self.0
|
|
||||||
.get(&(date.year() + 1))
|
|
||||||
.map(|_| Event::SpringEquinox(year_events.spring_equinox))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Vec<Option<YearlyEvents>>> for Solstices {
|
|
||||||
fn from(event_list: Vec<Option<YearlyEvents>>) -> Self {
|
|
||||||
Solstices(event_list.iter().fold(HashMap::new(), |mut m, record| {
|
|
||||||
match record {
|
|
||||||
Some(record) => {
|
|
||||||
m.insert(record.year, *record);
|
|
||||||
}
|
|
||||||
None => (),
|
|
||||||
}
|
|
||||||
m
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
pub static ref EVENTS: Solstices = Solstices::from(parse_events());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use chrono::{NaiveDate, NaiveDateTime};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_can_parse_a_solstice_time() {
|
|
||||||
let p = "2001 Mar 20 13:31".to_owned();
|
|
||||||
let parsed_date = NaiveDateTime::parse_from_str(&p, "%Y %b %d %H:%M")
|
|
||||||
.unwrap()
|
|
||||||
.and_utc();
|
|
||||||
assert_eq!(
|
|
||||||
parsed_date,
|
|
||||||
NaiveDate::from_ymd_opt(2001, 03, 20)
|
|
||||||
.unwrap()
|
|
||||||
.and_hms_opt(13, 31, 0)
|
|
||||||
.unwrap()
|
|
||||||
.and_utc()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,154 +0,0 @@
|
||||||
// 41.78, -71.41
|
|
||||||
// https://api.solunar.org/solunar/41.78,-71.41,20211029,-4
|
|
||||||
|
|
||||||
use chrono::{DateTime, Duration, Local, NaiveTime, Offset, TimeZone, Timelike, Utc};
|
|
||||||
use geo_types::{Latitude, Longitude};
|
|
||||||
use memorycache::MemoryCache;
|
|
||||||
use serde::Deserialize;
|
|
||||||
|
|
||||||
const ENDPOINT: &str = "https://api.solunar.org/solunar";
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct SunMoon {
|
|
||||||
pub sunrise: NaiveTime,
|
|
||||||
pub sunset: NaiveTime,
|
|
||||||
pub moonrise: Option<NaiveTime>,
|
|
||||||
pub moonset: Option<NaiveTime>,
|
|
||||||
pub moon_phase: LunarPhase,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SunMoon {
|
|
||||||
fn from_js(val: SunMoonJs) -> Self {
|
|
||||||
fn parse_time(val: String) -> Option<NaiveTime> {
|
|
||||||
NaiveTime::parse_from_str(&val, "%H:%M").ok()
|
|
||||||
}
|
|
||||||
|
|
||||||
let sunrise = parse_time(val.sunrise).unwrap();
|
|
||||||
let sunset = parse_time(val.sunset).unwrap();
|
|
||||||
let moonrise = val.moonrise.and_then(parse_time);
|
|
||||||
let moonset = val.moonset.and_then(parse_time);
|
|
||||||
|
|
||||||
Self {
|
|
||||||
sunrise,
|
|
||||||
sunset,
|
|
||||||
moonrise,
|
|
||||||
moonset,
|
|
||||||
moon_phase: val.moon_phase,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
|
||||||
pub(crate) struct SunMoonJs {
|
|
||||||
#[serde(alias = "sunRise")]
|
|
||||||
sunrise: String,
|
|
||||||
#[serde(alias = "sunSet")]
|
|
||||||
sunset: String,
|
|
||||||
#[serde(alias = "moonRise")]
|
|
||||||
moonrise: Option<String>,
|
|
||||||
#[serde(alias = "moonSet")]
|
|
||||||
moonset: Option<String>,
|
|
||||||
#[serde(alias = "moonPhase")]
|
|
||||||
moon_phase: LunarPhase,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize, PartialEq)]
|
|
||||||
pub enum LunarPhase {
|
|
||||||
#[serde(alias = "New Moon")]
|
|
||||||
NewMoon,
|
|
||||||
#[serde(alias = "Waxing Crescent")]
|
|
||||||
WaxingCrescent,
|
|
||||||
#[serde(alias = "First Quarter")]
|
|
||||||
FirstQuarter,
|
|
||||||
#[serde(alias = "Waxing Gibbous")]
|
|
||||||
WaxingGibbous,
|
|
||||||
#[serde(alias = "Full Moon")]
|
|
||||||
FullMoon,
|
|
||||||
#[serde(alias = "Waning Gibbous")]
|
|
||||||
WaningGibbous,
|
|
||||||
#[serde(alias = "Last Quarter")]
|
|
||||||
LastQuarter,
|
|
||||||
#[serde(alias = "Waning Crescent")]
|
|
||||||
WaningCrescent,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct SolunaClient {
|
|
||||||
client: reqwest::Client,
|
|
||||||
memory_cache: MemoryCache<SunMoonJs>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SolunaClient {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
client: reqwest::Client::new(),
|
|
||||||
memory_cache: MemoryCache::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn request<Tz: TimeZone>(
|
|
||||||
&self,
|
|
||||||
latitude: Latitude,
|
|
||||||
longitude: Longitude,
|
|
||||||
day: DateTime<Tz>,
|
|
||||||
) -> SunMoon {
|
|
||||||
let date = day.date_naive().format("%Y%m%d");
|
|
||||||
let url = format!(
|
|
||||||
"{}/{},{},{},{}",
|
|
||||||
ENDPOINT,
|
|
||||||
latitude,
|
|
||||||
longitude,
|
|
||||||
date,
|
|
||||||
day.offset().fix().local_minus_utc() / 3600
|
|
||||||
);
|
|
||||||
let js = self
|
|
||||||
.memory_cache
|
|
||||||
.find(&url, async {
|
|
||||||
let response = self.client.get(&url).send().await.unwrap();
|
|
||||||
let expiration = response
|
|
||||||
.headers()
|
|
||||||
.get(reqwest::header::EXPIRES)
|
|
||||||
.and_then(|header| header.to_str().ok())
|
|
||||||
.and_then(|expiration| DateTime::parse_from_rfc2822(expiration).ok())
|
|
||||||
.map(DateTime::<Utc>::from)
|
|
||||||
.unwrap_or(
|
|
||||||
Local::now()
|
|
||||||
.with_hour(0)
|
|
||||||
.and_then(|dt| dt.with_minute(0))
|
|
||||||
.and_then(|dt| dt.with_second(0))
|
|
||||||
.and_then(|dt| dt.with_nanosecond(0))
|
|
||||||
.map(|dt| dt.with_timezone(&Utc))
|
|
||||||
.unwrap()
|
|
||||||
+ Duration::days(1),
|
|
||||||
);
|
|
||||||
let soluna: SunMoonJs = response.json().await.unwrap();
|
|
||||||
(expiration, soluna)
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
|
|
||||||
SunMoon::from_js(js)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use serde_json;
|
|
||||||
|
|
||||||
const EXAMPLE: &str = "{\"sunRise\":\"7:15\",\"sunTransit\":\"12:30\",\"sunSet\":\"17:45\",\"moonRise\":null,\"moonTransit\":\"7:30\",\"moonUnder\":\"19:54\",\"moonSet\":\"15:02\",\"moonPhase\":\"Waning Crescent\",\"moonIllumination\":0.35889454647387764,\"sunRiseDec\":7.25,\"sunTransitDec\":12.5,\"sunSetDec\":17.75,\"moonRiseDec\":null,\"moonSetDec\":15.033333333333333,\"moonTransitDec\":7.5,\"moonUnderDec\":19.9,\"minor1Start\":null,\"minor1Stop\":null,\"minor2StartDec\":14.533333333333333,\"minor2Start\":\"14:32\",\"minor2StopDec\":15.533333333333333,\"minor2Stop\":\"15:32\",\"major1StartDec\":6.5,\"major1Start\":\"06:30\",\"major1StopDec\":8.5,\"major1Stop\":\"08:30\",\"major2StartDec\":18.9,\"major2Start\":\"18:54\",\"major2StopDec\":20.9,\"major2Stop\":\"20:54\",\"dayRating\":1,\"hourlyRating\":{\"0\":20,\"1\":20,\"2\":0,\"3\":0,\"4\":0,\"5\":0,\"6\":20,\"7\":40,\"8\":40,\"9\":20,\"10\":0,\"11\":0,\"12\":0,\"13\":0,\"14\":0,\"15\":20,\"16\":20,\"17\":20,\"18\":40,\"19\":20,\"20\":20,\"21\":20,\"22\":0,\"23\":0}}";
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_parses_a_response() {
|
|
||||||
let sun_moon_js: SunMoonJs = serde_json::from_str(EXAMPLE).unwrap();
|
|
||||||
let sun_moon = SunMoon::from_js(sun_moon_js);
|
|
||||||
assert_eq!(
|
|
||||||
sun_moon,
|
|
||||||
SunMoon {
|
|
||||||
sunrise: NaiveTime::from_hms_opt(7, 15, 0).unwrap(),
|
|
||||||
sunset: NaiveTime::from_hms_opt(17, 45, 0).unwrap(),
|
|
||||||
moonrise: None,
|
|
||||||
moonset: Some(NaiveTime::from_hms_opt(15, 02, 0).unwrap()),
|
|
||||||
moon_phase: LunarPhase::WaningCrescent,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,13 +0,0 @@
|
||||||
use crate::{
|
|
||||||
solstices::{Event, YearlyEvents},
|
|
||||||
soluna_client::SunMoon,
|
|
||||||
};
|
|
||||||
use ifc::IFC;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct State {
|
|
||||||
pub date: IFC,
|
|
||||||
pub next_event: Event,
|
|
||||||
pub events: YearlyEvents,
|
|
||||||
pub transit: Option<SunMoon>,
|
|
||||||
}
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
|
||||||
|
dev:
|
||||||
|
cargo watch -x build
|
||||||
|
|
||||||
|
test:
|
||||||
|
cargo watch -x test
|
||||||
|
|
||||||
|
test-once:
|
||||||
|
cargo test
|
|
@ -10,6 +10,7 @@ Luminescent Dreams Tools is distributed in the hope that it will be useful, but
|
||||||
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
use date_time_tz::DateTimeTz;
|
||||||
use types::{Recordable, Timestamp};
|
use types::{Recordable, Timestamp};
|
||||||
|
|
||||||
/// This trait is used for constructing queries for searching the database.
|
/// This trait is used for constructing queries for searching the database.
|
||||||
|
|
|
@ -33,13 +33,19 @@ use std::{fmt, str::FromStr};
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub struct DateTimeTz(pub chrono::DateTime<chrono_tz::Tz>);
|
pub struct DateTimeTz(pub chrono::DateTime<chrono_tz::Tz>);
|
||||||
|
|
||||||
impl fmt::Display for DateTimeTz {
|
impl DateTimeTz {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
pub fn map<F>(&self, f: F) -> DateTimeTz
|
||||||
|
where
|
||||||
|
F: FnOnce(chrono::DateTime<chrono_tz::Tz>) -> chrono::DateTime<chrono_tz::Tz>,
|
||||||
|
{
|
||||||
|
DateTimeTz(f(self.0))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_string(&self) -> String {
|
||||||
if self.0.timezone() == UTC {
|
if self.0.timezone() == UTC {
|
||||||
write!(f, "{}", self.0.to_rfc3339_opts(SecondsFormat::Secs, true))
|
self.0.to_rfc3339_opts(SecondsFormat::Secs, true)
|
||||||
} else {
|
} else {
|
||||||
write!(
|
format!(
|
||||||
f,
|
|
||||||
"{} {}",
|
"{} {}",
|
||||||
self.0
|
self.0
|
||||||
.with_timezone(&chrono_tz::Etc::UTC)
|
.with_timezone(&chrono_tz::Etc::UTC)
|
||||||
|
@ -50,20 +56,11 @@ impl fmt::Display for DateTimeTz {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DateTimeTz {
|
|
||||||
pub fn map<F>(&self, f: F) -> DateTimeTz
|
|
||||||
where
|
|
||||||
F: FnOnce(chrono::DateTime<chrono_tz::Tz>) -> chrono::DateTime<chrono_tz::Tz>,
|
|
||||||
{
|
|
||||||
DateTimeTz(f(self.0))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::str::FromStr for DateTimeTz {
|
impl std::str::FromStr for DateTimeTz {
|
||||||
type Err = chrono::ParseError;
|
type Err = chrono::ParseError;
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
let v: Vec<&str> = s.split_terminator(' ').collect();
|
let v: Vec<&str> = s.split_terminator(" ").collect();
|
||||||
if v.len() == 2 {
|
if v.len() == 2 {
|
||||||
let tz = v[1].parse::<chrono_tz::Tz>().unwrap();
|
let tz = v[1].parse::<chrono_tz::Tz>().unwrap();
|
||||||
chrono::DateTime::parse_from_rfc3339(v[0]).map(|ts| DateTimeTz(ts.with_timezone(&tz)))
|
chrono::DateTime::parse_from_rfc3339(v[0]).map(|ts| DateTimeTz(ts.with_timezone(&tz)))
|
||||||
|
@ -89,9 +86,9 @@ impl<'de> Visitor<'de> for DateTimeTzVisitor {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_str<E: de::Error>(self, s: &str) -> Result<Self::Value, E> {
|
fn visit_str<E: de::Error>(self, s: &str) -> Result<Self::Value, E> {
|
||||||
DateTimeTz::from_str(s).or(Err(E::custom(
|
DateTimeTz::from_str(s).or(Err(E::custom(format!(
|
||||||
"string is not a parsable datetime representation".to_owned(),
|
"string is not a parsable datetime representation"
|
||||||
)))
|
))))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -120,43 +117,28 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_creates_timestamp_with_z() {
|
fn it_creates_timestamp_with_z() {
|
||||||
let t = DateTimeTz(UTC.with_ymd_and_hms(2019, 5, 15, 12, 0, 0).unwrap());
|
let t = DateTimeTz(UTC.ymd(2019, 5, 15).and_hms(12, 0, 0));
|
||||||
assert_eq!(t.to_string(), "2019-05-15T12:00:00Z");
|
assert_eq!(t.to_string(), "2019-05-15T12:00:00Z");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_parses_utc_rfc3339_z() {
|
fn it_parses_utc_rfc3339_z() {
|
||||||
let t = DateTimeTz::from_str("2019-05-15T12:00:00Z").unwrap();
|
let t = DateTimeTz::from_str("2019-05-15T12:00:00Z").unwrap();
|
||||||
assert_eq!(
|
assert_eq!(t, DateTimeTz(UTC.ymd(2019, 5, 15).and_hms(12, 0, 0)));
|
||||||
t,
|
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2019, 5, 15, 12, 0, 0).unwrap())
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_parses_rfc3339_with_offset() {
|
fn it_parses_rfc3339_with_offset() {
|
||||||
let t = DateTimeTz::from_str("2019-05-15T12:00:00-06:00").unwrap();
|
let t = DateTimeTz::from_str("2019-05-15T12:00:00-06:00").unwrap();
|
||||||
assert_eq!(
|
assert_eq!(t, DateTimeTz(UTC.ymd(2019, 5, 15).and_hms(18, 0, 0)));
|
||||||
t,
|
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2019, 5, 15, 18, 0, 0).unwrap())
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_parses_rfc3339_with_tz() {
|
fn it_parses_rfc3339_with_tz() {
|
||||||
let t = DateTimeTz::from_str("2019-06-15T19:00:00Z US/Arizona").unwrap();
|
let t = DateTimeTz::from_str("2019-06-15T19:00:00Z US/Arizona").unwrap();
|
||||||
assert_eq!(
|
assert_eq!(t, DateTimeTz(UTC.ymd(2019, 6, 15).and_hms(19, 0, 0)));
|
||||||
t,
|
assert_eq!(t, DateTimeTz(Arizona.ymd(2019, 6, 15).and_hms(12, 0, 0)));
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2019, 6, 15, 19, 0, 0).unwrap())
|
assert_eq!(t, DateTimeTz(Central.ymd(2019, 6, 15).and_hms(14, 0, 0)));
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
t,
|
|
||||||
DateTimeTz(Arizona.with_ymd_and_hms(2019, 6, 15, 12, 0, 0).unwrap())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
t,
|
|
||||||
DateTimeTz(Central.with_ymd_and_hms(2019, 6, 15, 14, 0, 0).unwrap())
|
|
||||||
);
|
|
||||||
assert_eq!(t.to_string(), "2019-06-15T19:00:00Z US/Arizona");
|
assert_eq!(t.to_string(), "2019-06-15T19:00:00Z US/Arizona");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -190,9 +172,6 @@ mod test {
|
||||||
fn it_json_parses() {
|
fn it_json_parses() {
|
||||||
let t =
|
let t =
|
||||||
serde_json::from_str::<DateTimeTz>("\"2019-06-15T19:00:00Z America/Phoenix\"").unwrap();
|
serde_json::from_str::<DateTimeTz>("\"2019-06-15T19:00:00Z America/Phoenix\"").unwrap();
|
||||||
assert_eq!(
|
assert_eq!(t, DateTimeTz(Phoenix.ymd(2019, 6, 15).and_hms(12, 0, 0)));
|
||||||
t,
|
|
||||||
DateTimeTz(Phoenix.with_ymd_and_hms(2019, 6, 15, 12, 0, 0).unwrap())
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,7 +47,7 @@ where
|
||||||
.read(true)
|
.read(true)
|
||||||
.append(true)
|
.append(true)
|
||||||
.create(true)
|
.create(true)
|
||||||
.open(path)
|
.open(&path)
|
||||||
.map_err(EmseriesReadError::IOError)?;
|
.map_err(EmseriesReadError::IOError)?;
|
||||||
|
|
||||||
let records = Series::load_file(&f)?;
|
let records = Series::load_file(&f)?;
|
||||||
|
@ -88,8 +88,8 @@ where
|
||||||
/// Put a new record into the database. A unique id will be assigned to the record and
|
/// Put a new record into the database. A unique id will be assigned to the record and
|
||||||
/// returned.
|
/// returned.
|
||||||
pub fn put(&mut self, entry: T) -> Result<UniqueId, EmseriesWriteError> {
|
pub fn put(&mut self, entry: T) -> Result<UniqueId, EmseriesWriteError> {
|
||||||
let uuid = UniqueId::default();
|
let uuid = UniqueId::new();
|
||||||
self.update(uuid.clone(), entry).map(|_| uuid)
|
self.update(uuid.clone(), entry).and_then(|_| Ok(uuid))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update an existing record. The `UniqueId` of the record passed into this function must match
|
/// Update an existing record. The `UniqueId` of the record passed into this function must match
|
||||||
|
@ -138,7 +138,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get all of the records in the database.
|
/// Get all of the records in the database.
|
||||||
pub fn records(&self) -> impl Iterator<Item = (&UniqueId, &T)> {
|
pub fn records<'s>(&'s self) -> impl Iterator<Item = (&'s UniqueId, &'s T)> + 's {
|
||||||
self.records.iter()
|
self.records.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ where
|
||||||
|
|
||||||
/// Get an exact record from the database based on unique id.
|
/// Get an exact record from the database based on unique id.
|
||||||
pub fn get(&self, uuid: &UniqueId) -> Option<T> {
|
pub fn get(&self, uuid: &UniqueId) -> Option<T> {
|
||||||
self.records.get(uuid).cloned()
|
self.records.get(uuid).map(|v| v.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -55,8 +55,8 @@ impl str::FromStr for Timestamp {
|
||||||
type Err = chrono::ParseError;
|
type Err = chrono::ParseError;
|
||||||
fn from_str(line: &str) -> Result<Self, Self::Err> {
|
fn from_str(line: &str) -> Result<Self, Self::Err> {
|
||||||
DateTimeTz::from_str(line)
|
DateTimeTz::from_str(line)
|
||||||
.map(Timestamp::DateTime)
|
.map(|dtz| Timestamp::DateTime(dtz))
|
||||||
.or(NaiveDate::from_str(line).map(Timestamp::Date))
|
.or(NaiveDate::from_str(line).map(|d| Timestamp::Date(d)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,8 +70,8 @@ impl Ord for Timestamp {
|
||||||
fn cmp(&self, other: &Timestamp) -> Ordering {
|
fn cmp(&self, other: &Timestamp) -> Ordering {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
(Timestamp::DateTime(dt1), Timestamp::DateTime(dt2)) => dt1.cmp(dt2),
|
(Timestamp::DateTime(dt1), Timestamp::DateTime(dt2)) => dt1.cmp(dt2),
|
||||||
(Timestamp::DateTime(dt1), Timestamp::Date(dt2)) => dt1.0.date_naive().cmp(dt2),
|
(Timestamp::DateTime(dt1), Timestamp::Date(dt2)) => dt1.0.date().naive_utc().cmp(&dt2),
|
||||||
(Timestamp::Date(dt1), Timestamp::DateTime(dt2)) => dt1.cmp(&dt2.0.date_naive()),
|
(Timestamp::Date(dt1), Timestamp::DateTime(dt2)) => dt1.cmp(&dt2.0.date().naive_utc()),
|
||||||
(Timestamp::Date(dt1), Timestamp::Date(dt2)) => dt1.cmp(dt2),
|
(Timestamp::Date(dt1), Timestamp::Date(dt2)) => dt1.cmp(dt2),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -105,9 +105,11 @@ pub trait Recordable {
|
||||||
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
|
||||||
pub struct UniqueId(Uuid);
|
pub struct UniqueId(Uuid);
|
||||||
|
|
||||||
impl Default for UniqueId {
|
impl UniqueId {
|
||||||
fn default() -> Self {
|
/// Create a new V4 UUID (this is the most common type in use these days).
|
||||||
Self(Uuid::new_v4())
|
pub fn new() -> UniqueId {
|
||||||
|
let id = Uuid::new_v4();
|
||||||
|
UniqueId(id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -118,14 +120,14 @@ impl str::FromStr for UniqueId {
|
||||||
fn from_str(val: &str) -> Result<Self, Self::Err> {
|
fn from_str(val: &str) -> Result<Self, Self::Err> {
|
||||||
Uuid::parse_str(val)
|
Uuid::parse_str(val)
|
||||||
.map(UniqueId)
|
.map(UniqueId)
|
||||||
.map_err(EmseriesReadError::UUIDParseError)
|
.map_err(|err| EmseriesReadError::UUIDParseError(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for UniqueId {
|
impl fmt::Display for UniqueId {
|
||||||
/// Convert to a hyphenated string
|
/// Convert to a hyphenated string
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
||||||
write!(f, "{}", self.0.to_hyphenated())
|
write!(f, "{}", self.0.to_hyphenated().to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -144,7 +146,7 @@ where
|
||||||
type Err = EmseriesReadError;
|
type Err = EmseriesReadError;
|
||||||
|
|
||||||
fn from_str(line: &str) -> Result<Self, Self::Err> {
|
fn from_str(line: &str) -> Result<Self, Self::Err> {
|
||||||
serde_json::from_str(line).map_err(EmseriesReadError::JSONParseError)
|
serde_json::from_str(&line).map_err(|err| EmseriesReadError::JSONParseError(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,9 +184,7 @@ mod test {
|
||||||
fn timestamp_parses_datetimetz_without_timezone() {
|
fn timestamp_parses_datetimetz_without_timezone() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
"2003-11-10T06:00:00Z".parse::<Timestamp>().unwrap(),
|
"2003-11-10T06:00:00Z".parse::<Timestamp>().unwrap(),
|
||||||
Timestamp::DateTime(DateTimeTz(
|
Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0))),
|
||||||
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0).unwrap()
|
|
||||||
)),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -210,9 +210,7 @@ mod test {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
rec.data,
|
rec.data,
|
||||||
Some(WeightRecord {
|
Some(WeightRecord {
|
||||||
date: Timestamp::DateTime(DateTimeTz(
|
date: Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0))),
|
||||||
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0).unwrap()
|
|
||||||
)),
|
|
||||||
weight: Weight(77.79109 * KG),
|
weight: Weight(77.79109 * KG),
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
@ -221,9 +219,7 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn serialization_output() {
|
fn serialization_output() {
|
||||||
let rec = WeightRecord {
|
let rec = WeightRecord {
|
||||||
date: Timestamp::DateTime(DateTimeTz(
|
date: Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0))),
|
||||||
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0).unwrap(),
|
|
||||||
)),
|
|
||||||
weight: Weight(77.0 * KG),
|
weight: Weight(77.0 * KG),
|
||||||
};
|
};
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -232,12 +228,7 @@ mod test {
|
||||||
);
|
);
|
||||||
|
|
||||||
let rec2 = WeightRecord {
|
let rec2 = WeightRecord {
|
||||||
date: Timestamp::DateTime(
|
date: Timestamp::DateTime(Central.ymd(2003, 11, 10).and_hms(0, 0, 0).into()),
|
||||||
Central
|
|
||||||
.with_ymd_and_hms(2003, 11, 10, 0, 0, 0)
|
|
||||||
.unwrap()
|
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
weight: Weight(77.0 * KG),
|
weight: Weight(77.0 * KG),
|
||||||
};
|
};
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -248,28 +239,22 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn two_datetimes_can_be_compared() {
|
fn two_datetimes_can_be_compared() {
|
||||||
let time1 = Timestamp::DateTime(DateTimeTz(
|
let time1 = Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0)));
|
||||||
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0).unwrap(),
|
let time2 = Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 11).and_hms(6, 0, 0)));
|
||||||
));
|
|
||||||
let time2 = Timestamp::DateTime(DateTimeTz(
|
|
||||||
UTC.with_ymd_and_hms(2003, 11, 11, 6, 0, 0).unwrap(),
|
|
||||||
));
|
|
||||||
assert!(time1 < time2);
|
assert!(time1 < time2);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn two_dates_can_be_compared() {
|
fn two_dates_can_be_compared() {
|
||||||
let time1 = Timestamp::Date(NaiveDate::from_ymd_opt(2003, 11, 10).unwrap());
|
let time1 = Timestamp::Date(NaiveDate::from_ymd(2003, 11, 10));
|
||||||
let time2 = Timestamp::Date(NaiveDate::from_ymd_opt(2003, 11, 11).unwrap());
|
let time2 = Timestamp::Date(NaiveDate::from_ymd(2003, 11, 11));
|
||||||
assert!(time1 < time2);
|
assert!(time1 < time2);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn datetime_and_date_can_be_compared() {
|
fn datetime_and_date_can_be_compared() {
|
||||||
let time1 = Timestamp::DateTime(DateTimeTz(
|
let time1 = Timestamp::DateTime(DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0)));
|
||||||
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0).unwrap(),
|
let time2 = Timestamp::Date(NaiveDate::from_ymd(2003, 11, 11));
|
||||||
));
|
|
||||||
let time2 = Timestamp::Date(NaiveDate::from_ymd_opt(2003, 11, 11).unwrap());
|
|
||||||
assert!(time1 < time2)
|
assert!(time1 < time2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ extern crate emseries;
|
||||||
mod test {
|
mod test {
|
||||||
use chrono::prelude::*;
|
use chrono::prelude::*;
|
||||||
use chrono_tz::Etc::UTC;
|
use chrono_tz::Etc::UTC;
|
||||||
use dimensioned::si::{Kilogram, Meter, Second, M, S};
|
use dimensioned::si::{Kilogram, Meter, Second, KG, M, S};
|
||||||
|
|
||||||
use emseries::*;
|
use emseries::*;
|
||||||
|
|
||||||
|
@ -52,31 +52,31 @@ mod test {
|
||||||
fn mk_trips() -> [BikeTrip; 5] {
|
fn mk_trips() -> [BikeTrip; 5] {
|
||||||
[
|
[
|
||||||
BikeTrip {
|
BikeTrip {
|
||||||
datetime: DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 29, 0, 0, 0).unwrap()),
|
datetime: DateTimeTz(UTC.ymd(2011, 10, 29).and_hms(0, 0, 0)),
|
||||||
distance: Distance(58741.055 * M),
|
distance: Distance(58741.055 * M),
|
||||||
duration: Duration(11040.0 * S),
|
duration: Duration(11040.0 * S),
|
||||||
comments: String::from("long time ago"),
|
comments: String::from("long time ago"),
|
||||||
},
|
},
|
||||||
BikeTrip {
|
BikeTrip {
|
||||||
datetime: DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()),
|
datetime: DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)),
|
||||||
distance: Distance(17702.0 * M),
|
distance: Distance(17702.0 * M),
|
||||||
duration: Duration(2880.0 * S),
|
duration: Duration(2880.0 * S),
|
||||||
comments: String::from("day 2"),
|
comments: String::from("day 2"),
|
||||||
},
|
},
|
||||||
BikeTrip {
|
BikeTrip {
|
||||||
datetime: DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 02, 0, 0, 0).unwrap()),
|
datetime: DateTimeTz(UTC.ymd(2011, 11, 02).and_hms(0, 0, 0)),
|
||||||
distance: Distance(41842.945 * M),
|
distance: Distance(41842.945 * M),
|
||||||
duration: Duration(7020.0 * S),
|
duration: Duration(7020.0 * S),
|
||||||
comments: String::from("Do Some Distance!"),
|
comments: String::from("Do Some Distance!"),
|
||||||
},
|
},
|
||||||
BikeTrip {
|
BikeTrip {
|
||||||
datetime: DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 04, 0, 0, 0).unwrap()),
|
datetime: DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)),
|
||||||
distance: Distance(34600.895 * M),
|
distance: Distance(34600.895 * M),
|
||||||
duration: Duration(5580.0 * S),
|
duration: Duration(5580.0 * S),
|
||||||
comments: String::from("I did a lot of distance back then"),
|
comments: String::from("I did a lot of distance back then"),
|
||||||
},
|
},
|
||||||
BikeTrip {
|
BikeTrip {
|
||||||
datetime: DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 05, 0, 0, 0).unwrap()),
|
datetime: DateTimeTz(UTC.ymd(2011, 11, 05).and_hms(0, 0, 0)),
|
||||||
distance: Distance(6437.376 * M),
|
distance: Distance(6437.376 * M),
|
||||||
duration: Duration(960.0 * S),
|
duration: Duration(960.0 * S),
|
||||||
comments: String::from("day 5"),
|
comments: String::from("day 5"),
|
||||||
|
@ -122,7 +122,7 @@ mod test {
|
||||||
Some(tr) => {
|
Some(tr) => {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tr.timestamp(),
|
tr.timestamp(),
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 29, 0, 0, 0).unwrap()).into()
|
DateTimeTz(UTC.ymd(2011, 10, 29).and_hms(0, 0, 0)).into()
|
||||||
);
|
);
|
||||||
assert_eq!(tr.duration, Duration(11040.0 * S));
|
assert_eq!(tr.duration, Duration(11040.0 * S));
|
||||||
assert_eq!(tr.comments, String::from("long time ago"));
|
assert_eq!(tr.comments, String::from("long time ago"));
|
||||||
|
@ -145,7 +145,7 @@ mod test {
|
||||||
|
|
||||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts
|
let v: Vec<(&UniqueId, &BikeTrip)> = ts
|
||||||
.search(exact_time(
|
.search(exact_time(
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)).into(),
|
||||||
))
|
))
|
||||||
.collect();
|
.collect();
|
||||||
assert_eq!(v.len(), 1);
|
assert_eq!(v.len(), 1);
|
||||||
|
@ -166,9 +166,9 @@ mod test {
|
||||||
|
|
||||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
||||||
time_range(
|
time_range(
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)).into(),
|
||||||
true,
|
true,
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 04, 0, 0, 0).unwrap()).into(),
|
DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)).into(),
|
||||||
true,
|
true,
|
||||||
),
|
),
|
||||||
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
||||||
|
@ -199,9 +199,9 @@ mod test {
|
||||||
.expect("expect the time series to open correctly");
|
.expect("expect the time series to open correctly");
|
||||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
||||||
time_range(
|
time_range(
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)).into(),
|
||||||
true,
|
true,
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 04, 0, 0, 0).unwrap()).into(),
|
DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)).into(),
|
||||||
true,
|
true,
|
||||||
),
|
),
|
||||||
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
||||||
|
@ -233,9 +233,9 @@ mod test {
|
||||||
.expect("expect the time series to open correctly");
|
.expect("expect the time series to open correctly");
|
||||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
||||||
time_range(
|
time_range(
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)).into(),
|
||||||
true,
|
true,
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 04, 0, 0, 0).unwrap()).into(),
|
DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)).into(),
|
||||||
true,
|
true,
|
||||||
),
|
),
|
||||||
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
||||||
|
@ -252,9 +252,9 @@ mod test {
|
||||||
.expect("expect the time series to open correctly");
|
.expect("expect the time series to open correctly");
|
||||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
||||||
time_range(
|
time_range(
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)).into(),
|
||||||
true,
|
true,
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 05, 0, 0, 0).unwrap()).into(),
|
DateTimeTz(UTC.ymd(2011, 11, 05).and_hms(0, 0, 0)).into(),
|
||||||
true,
|
true,
|
||||||
),
|
),
|
||||||
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
|l, r| l.1.timestamp().cmp(&r.1.timestamp()),
|
||||||
|
@ -294,7 +294,7 @@ mod test {
|
||||||
Some(trip) => {
|
Some(trip) => {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
trip.datetime,
|
trip.datetime,
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 02, 0, 0, 0).unwrap())
|
DateTimeTz(UTC.ymd(2011, 11, 02).and_hms(0, 0, 0))
|
||||||
);
|
);
|
||||||
assert_eq!(trip.distance, Distance(50000.0 * M));
|
assert_eq!(trip.distance, Distance(50000.0 * M));
|
||||||
assert_eq!(trip.duration, Duration(7020.0 * S));
|
assert_eq!(trip.duration, Duration(7020.0 * S));
|
||||||
|
@ -335,13 +335,13 @@ mod test {
|
||||||
|
|
||||||
let trips: Vec<(&UniqueId, &BikeTrip)> = ts
|
let trips: Vec<(&UniqueId, &BikeTrip)> = ts
|
||||||
.search(exact_time(
|
.search(exact_time(
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 02, 0, 0, 0).unwrap()).into(),
|
DateTimeTz(UTC.ymd(2011, 11, 02).and_hms(0, 0, 0)).into(),
|
||||||
))
|
))
|
||||||
.collect();
|
.collect();
|
||||||
assert_eq!(trips.len(), 1);
|
assert_eq!(trips.len(), 1);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
trips[0].1.datetime,
|
trips[0].1.datetime,
|
||||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 11, 02, 0, 0, 0).unwrap())
|
DateTimeTz(UTC.ymd(2011, 11, 02).and_hms(0, 0, 0))
|
||||||
);
|
);
|
||||||
assert_eq!(trips[0].1.distance, Distance(50000.0 * M));
|
assert_eq!(trips[0].1.distance, Distance(50000.0 * M));
|
||||||
assert_eq!(trips[0].1.duration, Duration(7020.0 * S));
|
assert_eq!(trips[0].1.duration, Duration(7020.0 * S));
|
||||||
|
@ -361,6 +361,7 @@ mod test {
|
||||||
let trip_id = ts.put(trips[0].clone()).expect("expect a successful put");
|
let trip_id = ts.put(trips[0].clone()).expect("expect a successful put");
|
||||||
ts.put(trips[1].clone()).expect("expect a successful put");
|
ts.put(trips[1].clone()).expect("expect a successful put");
|
||||||
ts.put(trips[2].clone()).expect("expect a successful put");
|
ts.put(trips[2].clone()).expect("expect a successful put");
|
||||||
|
|
||||||
ts.delete(&trip_id).expect("successful delete");
|
ts.delete(&trip_id).expect("successful delete");
|
||||||
|
|
||||||
let recs: Vec<(&UniqueId, &BikeTrip)> = ts.records().collect();
|
let recs: Vec<(&UniqueId, &BikeTrip)> = ts.records().collect();
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
fixtures
|
|
|
@ -1,2 +0,0 @@
|
||||||
fixtures
|
|
||||||
var
|
|
|
@ -1,51 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "file-service"
|
|
||||||
version = "0.2.0"
|
|
||||||
authors = ["savanni@luminescent-dreams.com"]
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
name = "file_service"
|
|
||||||
path = "src/lib.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "file-service"
|
|
||||||
path = "src/main.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "auth-cli"
|
|
||||||
path = "src/bin/cli.rs"
|
|
||||||
|
|
||||||
[target.auth-cli.dependencies]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
base64ct = { version = "1", features = [ "alloc" ] }
|
|
||||||
build_html = { version = "2" }
|
|
||||||
bytes = { version = "1" }
|
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
|
||||||
clap = { version = "4", features = [ "derive" ] }
|
|
||||||
cookie = { version = "0.17" }
|
|
||||||
futures-util = { version = "0.3" }
|
|
||||||
hex-string = "0.1.0"
|
|
||||||
http = { version = "0.2" }
|
|
||||||
image = "0.23.5"
|
|
||||||
logger = "*"
|
|
||||||
log = { version = "0.4" }
|
|
||||||
mime = "0.3.16"
|
|
||||||
mime_guess = "2.0.3"
|
|
||||||
pretty_env_logger = { version = "0.5" }
|
|
||||||
serde_json = "*"
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
|
||||||
sha2 = "0.10"
|
|
||||||
sqlx = { version = "0.7", features = [ "runtime-tokio", "sqlite" ] }
|
|
||||||
thiserror = "1.0.20"
|
|
||||||
tokio = { version = "1", features = [ "full" ] }
|
|
||||||
uuid = { version = "0.4", features = [ "serde", "v4" ] }
|
|
||||||
warp = { version = "0.3" }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
cool_asserts = { version = "2" }
|
|
||||||
tempdir = { version = "0.3" }
|
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
[{"jti":"ac3a46c6-3fa1-4d0a-af12-e7d3fefdc878","aud":"savanni","exp":1621351436,"iss":"savanni","iat":1589729036,"sub":"https://savanni.luminescent-dreams.com/file-service/","perms":["admin"]}]
|
|
|
@ -1,13 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
VERSION=`cat Cargo.toml | grep "^version =" | sed -r 's/^version = "(.+)"$/\1/'`
|
|
||||||
|
|
||||||
mkdir -p dist
|
|
||||||
cp ../target/release/file-service dist
|
|
||||||
cp ../target/release/auth-cli dist
|
|
||||||
strip dist/file-service
|
|
||||||
strip dist/auth-cli
|
|
||||||
tar -czf file-service-${VERSION}.tgz dist/
|
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 23 KiB |
|
@ -1,11 +0,0 @@
|
||||||
CREATE TABLE IF NOT EXISTS users (
|
|
||||||
id INTEGER PRIMARY KEY NOT NULL,
|
|
||||||
username TEXT NOT NULL,
|
|
||||||
token TEXT NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS sessions (
|
|
||||||
token TEXT NOT NULL,
|
|
||||||
user_id INTEGER,
|
|
||||||
FOREIGN KEY(user_id) REFERENCES users(id)
|
|
||||||
);
|
|
|
@ -1,40 +0,0 @@
|
||||||
use clap::{Parser, Subcommand};
|
|
||||||
use file_service::{AuthDB, Username};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
#[derive(Subcommand, Debug)]
|
|
||||||
enum Commands {
|
|
||||||
AddUser { username: String },
|
|
||||||
DeleteUser { username: String },
|
|
||||||
ListUsers,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
|
||||||
struct Args {
|
|
||||||
#[command(subcommand)]
|
|
||||||
command: Commands,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
pub async fn main() {
|
|
||||||
let args = Args::parse();
|
|
||||||
let authdb = AuthDB::new(PathBuf::from(&std::env::var("AUTHDB").unwrap()))
|
|
||||||
.await
|
|
||||||
.expect("to be able to open the database");
|
|
||||||
|
|
||||||
match args.command {
|
|
||||||
Commands::AddUser { username } => {
|
|
||||||
match authdb.add_user(Username::from(username.clone())).await {
|
|
||||||
Ok(token) => {
|
|
||||||
println!("User {} created. Auth token: {}", username, *token);
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
println!("Could not create user {}", username);
|
|
||||||
println!("\tError: {:?}", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Commands::DeleteUser { .. } => {}
|
|
||||||
Commands::ListUsers => {}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,279 +0,0 @@
|
||||||
use build_html::Html;
|
|
||||||
use bytes::Buf;
|
|
||||||
use file_service::WriteFileError;
|
|
||||||
use futures_util::StreamExt;
|
|
||||||
use http::{Error, StatusCode};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::io::Read;
|
|
||||||
use warp::{filters::multipart::FormData, http::Response, multipart::Part};
|
|
||||||
|
|
||||||
use crate::{pages, App, AuthToken, FileId, FileInfo, ReadFileError, SessionToken};
|
|
||||||
|
|
||||||
const CSS: &str = include_str!("../templates/style.css");
|
|
||||||
|
|
||||||
pub async fn handle_index(
|
|
||||||
app: App,
|
|
||||||
token: Option<SessionToken>,
|
|
||||||
) -> Result<Response<String>, Error> {
|
|
||||||
match token {
|
|
||||||
Some(token) => match app.validate_session(token).await {
|
|
||||||
Ok(_) => render_gallery_page(app).await,
|
|
||||||
Err(err) => render_auth_page(Some(format!("session expired: {:?}", err))),
|
|
||||||
},
|
|
||||||
None => render_auth_page(None),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_css() -> Result<Response<String>, Error> {
|
|
||||||
Response::builder()
|
|
||||||
.header("content-type", "text/css")
|
|
||||||
.status(StatusCode::OK)
|
|
||||||
.body(CSS.to_owned())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn render_auth_page(message: Option<String>) -> Result<Response<String>, Error> {
|
|
||||||
Response::builder()
|
|
||||||
.status(StatusCode::OK)
|
|
||||||
.body(pages::auth(message).to_html_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn render_gallery_page(app: App) -> Result<Response<String>, Error> {
|
|
||||||
match app.list_files().await {
|
|
||||||
Ok(ids) => {
|
|
||||||
let mut files = vec![];
|
|
||||||
for id in ids.into_iter() {
|
|
||||||
let file = app.get_file(&id).await;
|
|
||||||
files.push(file);
|
|
||||||
}
|
|
||||||
Response::builder()
|
|
||||||
.header("content-type", "text/html")
|
|
||||||
.status(StatusCode::OK)
|
|
||||||
.body(pages::gallery(files).to_html_string())
|
|
||||||
}
|
|
||||||
Err(_) => Response::builder()
|
|
||||||
.header("content-type", "text/html")
|
|
||||||
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
.body("".to_owned()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn thumbnail(
|
|
||||||
app: App,
|
|
||||||
id: String,
|
|
||||||
old_etags: Option<String>,
|
|
||||||
) -> Result<Response<Vec<u8>>, Error> {
|
|
||||||
match app.get_file(&FileId::from(id)).await {
|
|
||||||
Ok(file) => serve_file(file.info.clone(), || file.thumbnail(), old_etags),
|
|
||||||
Err(_err) => Response::builder()
|
|
||||||
.status(StatusCode::NOT_FOUND)
|
|
||||||
.body(vec![]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn file(
|
|
||||||
app: App,
|
|
||||||
id: String,
|
|
||||||
old_etags: Option<String>,
|
|
||||||
) -> Result<Response<Vec<u8>>, Error> {
|
|
||||||
match app.get_file(&FileId::from(id)).await {
|
|
||||||
Ok(file) => serve_file(file.info.clone(), || file.content(), old_etags),
|
|
||||||
Err(_err) => Response::builder()
|
|
||||||
.status(StatusCode::NOT_FOUND)
|
|
||||||
.body(vec![]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_auth(
|
|
||||||
app: App,
|
|
||||||
form: HashMap<String, String>,
|
|
||||||
) -> Result<http::Response<String>, Error> {
|
|
||||||
match form.get("password") {
|
|
||||||
Some(token) => match app.authenticate(AuthToken::from(token.clone())).await {
|
|
||||||
Ok(Some(session_token)) => Response::builder()
|
|
||||||
.header("location", "/")
|
|
||||||
.header(
|
|
||||||
"set-cookie",
|
|
||||||
format!(
|
|
||||||
"session={}; Secure; HttpOnly; SameSite=Strict",
|
|
||||||
*session_token
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.status(StatusCode::SEE_OTHER)
|
|
||||||
.body("".to_owned()),
|
|
||||||
Ok(None) => render_auth_page(Some("no user found".to_owned())),
|
|
||||||
Err(_) => render_auth_page(Some("invalid auth token".to_owned())),
|
|
||||||
},
|
|
||||||
None => render_auth_page(Some("no token available".to_owned())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_upload(
|
|
||||||
app: App,
|
|
||||||
token: SessionToken,
|
|
||||||
form: FormData,
|
|
||||||
) -> Result<http::Response<String>, Error> {
|
|
||||||
match app.validate_session(token).await {
|
|
||||||
Ok(Some(_)) => match process_file_upload(app, form).await {
|
|
||||||
Ok(_) => Response::builder()
|
|
||||||
.header("location", "/")
|
|
||||||
.status(StatusCode::SEE_OTHER)
|
|
||||||
.body("".to_owned()),
|
|
||||||
Err(UploadError::FilenameMissing) => Response::builder()
|
|
||||||
.status(StatusCode::BAD_REQUEST)
|
|
||||||
.body("filename is required for all files".to_owned()),
|
|
||||||
Err(UploadError::WriteFileError(err)) => Response::builder()
|
|
||||||
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
.body(format!("could not write to the file system: {:?}", err)),
|
|
||||||
Err(UploadError::WarpError(err)) => Response::builder()
|
|
||||||
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
.body(format!("error with the app framework: {:?}", err)),
|
|
||||||
},
|
|
||||||
_ => Response::builder()
|
|
||||||
.status(StatusCode::UNAUTHORIZED)
|
|
||||||
.body("".to_owned()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_delete(
|
|
||||||
app: App,
|
|
||||||
token: SessionToken,
|
|
||||||
id: FileId,
|
|
||||||
) -> Result<http::Response<String>, Error> {
|
|
||||||
match app.validate_session(token).await {
|
|
||||||
Ok(Some(_)) => match app.delete_file(id).await {
|
|
||||||
Ok(_) => Response::builder()
|
|
||||||
.header("location", "/")
|
|
||||||
.status(StatusCode::SEE_OTHER)
|
|
||||||
.body("".to_owned()),
|
|
||||||
Err(_) => unimplemented!(),
|
|
||||||
},
|
|
||||||
_ => Response::builder()
|
|
||||||
.status(StatusCode::UNAUTHORIZED)
|
|
||||||
.body("".to_owned()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn serve_file<F>(
|
|
||||||
info: FileInfo,
|
|
||||||
file: F,
|
|
||||||
old_etags: Option<String>,
|
|
||||||
) -> http::Result<http::Response<Vec<u8>>>
|
|
||||||
where
|
|
||||||
F: FnOnce() -> Result<Vec<u8>, ReadFileError>,
|
|
||||||
{
|
|
||||||
match old_etags {
|
|
||||||
Some(old_etags) if old_etags != info.hash => Response::builder()
|
|
||||||
.header("content-type", info.file_type)
|
|
||||||
.status(StatusCode::NOT_MODIFIED)
|
|
||||||
.body(vec![]),
|
|
||||||
_ => match file() {
|
|
||||||
Ok(content) => Response::builder()
|
|
||||||
.header("content-type", info.file_type)
|
|
||||||
.header("etag", info.hash)
|
|
||||||
.status(StatusCode::OK)
|
|
||||||
.body(content),
|
|
||||||
Err(_) => Response::builder()
|
|
||||||
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
.body(vec![]),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn collect_multipart(
|
|
||||||
mut stream: warp::filters::multipart::FormData,
|
|
||||||
) -> Result<Vec<(Option<String>, Option<String>, Vec<u8>)>, warp::Error> {
|
|
||||||
let mut content: Vec<(Option<String>, Option<String>, Vec<u8>)> = Vec::new();
|
|
||||||
|
|
||||||
while let Some(part) = stream.next().await {
|
|
||||||
match part {
|
|
||||||
Ok(part) => content.push(collect_content(part).await.unwrap()),
|
|
||||||
Err(err) => return Err(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(content)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn collect_content(
|
|
||||||
mut part: Part,
|
|
||||||
) -> Result<(Option<String>, Option<String>, Vec<u8>), String> {
|
|
||||||
let mut content: Vec<u8> = Vec::new();
|
|
||||||
|
|
||||||
while let Some(Ok(data)) = part.data().await {
|
|
||||||
let mut reader = data.reader();
|
|
||||||
reader.read_to_end(&mut content).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
part.content_type().map(|s| s.to_owned()),
|
|
||||||
part.filename().map(|s| s.to_owned()),
|
|
||||||
content,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
async fn handle_upload(
|
|
||||||
form: warp::filters::multipart::FormData,
|
|
||||||
app: App,
|
|
||||||
) -> warp::http::Result<warp::http::Response<String>> {
|
|
||||||
let files = collect_multipart(form).await;
|
|
||||||
match files {
|
|
||||||
Ok(files) => {
|
|
||||||
for (_, filename, content) in files {
|
|
||||||
match filename {
|
|
||||||
Some(filename) => {
|
|
||||||
app.add_file(filename, content).unwrap();
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
return warp::http::Response::builder()
|
|
||||||
.status(StatusCode::BAD_REQUEST)
|
|
||||||
.body("".to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(_err) => {
|
|
||||||
return warp::http::Response::builder()
|
|
||||||
.status(StatusCode::BAD_REQUEST)
|
|
||||||
.body("".to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// println!("file length: {:?}", files.map(|f| f.len()));
|
|
||||||
warp::http::Response::builder()
|
|
||||||
.header("location", "/")
|
|
||||||
.status(StatusCode::SEE_OTHER)
|
|
||||||
.body("".to_owned())
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
enum UploadError {
|
|
||||||
FilenameMissing,
|
|
||||||
WriteFileError(WriteFileError),
|
|
||||||
WarpError(warp::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<WriteFileError> for UploadError {
|
|
||||||
fn from(err: WriteFileError) -> Self {
|
|
||||||
Self::WriteFileError(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<warp::Error> for UploadError {
|
|
||||||
fn from(err: warp::Error) -> Self {
|
|
||||||
Self::WarpError(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn process_file_upload(app: App, form: FormData) -> Result<(), UploadError> {
|
|
||||||
let files = collect_multipart(form).await?;
|
|
||||||
for (_, filename, content) in files {
|
|
||||||
match filename {
|
|
||||||
Some(filename) => {
|
|
||||||
app.add_file(filename, content).await?;
|
|
||||||
}
|
|
||||||
None => return Err(UploadError::FilenameMissing),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
|
@ -1,208 +0,0 @@
|
||||||
use build_html::{self, Html, HtmlContainer};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default)]
|
|
||||||
pub struct Attributes(Vec<(String, String)>);
|
|
||||||
|
|
||||||
/*
|
|
||||||
impl FromIterator<(String, String)> for Attributes {
|
|
||||||
fn from_iter<T>(iter: T) -> Self
|
|
||||||
where
|
|
||||||
T: IntoIterator<Item = (String, String)>,
|
|
||||||
{
|
|
||||||
Attributes(iter.collect::<Vec<(String, String)>>())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromIterator<(&str, &str)> for Attributes {
|
|
||||||
fn from_iter<T>(iter: T) -> Self
|
|
||||||
where
|
|
||||||
T: IntoIterator<Item = (&str, &str)>,
|
|
||||||
{
|
|
||||||
unimplemented!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
impl ToString for Attributes {
|
|
||||||
fn to_string(&self) -> String {
|
|
||||||
self.0
|
|
||||||
.iter()
|
|
||||||
.map(|(key, value)| format!("{}=\"{}\"", key, value))
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(" ")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Form {
|
|
||||||
path: String,
|
|
||||||
method: String,
|
|
||||||
encoding: Option<String>,
|
|
||||||
elements: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Form {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
path: "/".to_owned(),
|
|
||||||
method: "get".to_owned(),
|
|
||||||
encoding: None,
|
|
||||||
elements: "".to_owned(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_path(mut self, path: &str) -> Self {
|
|
||||||
self.path = path.to_owned();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_method(mut self, method: &str) -> Self {
|
|
||||||
self.method = method.to_owned();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_encoding(mut self, encoding: &str) -> Self {
|
|
||||||
self.encoding = Some(encoding.to_owned());
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Html for Form {
|
|
||||||
fn to_html_string(&self) -> String {
|
|
||||||
let encoding = match self.encoding {
|
|
||||||
Some(ref encoding) => format!("enctype=\"{encoding}\"", encoding = encoding),
|
|
||||||
None => "".to_owned(),
|
|
||||||
};
|
|
||||||
format!(
|
|
||||||
"<form action=\"{path}\" method=\"{method}\" {encoding}>\n{elements}\n</form>\n",
|
|
||||||
path = self.path,
|
|
||||||
method = self.method,
|
|
||||||
encoding = encoding,
|
|
||||||
elements = self.elements.to_html_string(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HtmlContainer for Form {
|
|
||||||
fn add_html<H: Html>(&mut self, html: H) {
|
|
||||||
self.elements.push_str(&html.to_html_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Input {
|
|
||||||
ty: String,
|
|
||||||
name: String,
|
|
||||||
id: Option<String>,
|
|
||||||
value: Option<String>,
|
|
||||||
attributes: Attributes,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Html for Input {
|
|
||||||
fn to_html_string(&self) -> String {
|
|
||||||
let id = match self.id {
|
|
||||||
Some(ref id) => format!("id=\"{}\"", id),
|
|
||||||
None => "".to_owned(),
|
|
||||||
};
|
|
||||||
let value = match self.value {
|
|
||||||
Some(ref value) => format!("value=\"{}\"", value),
|
|
||||||
None => "".to_owned(),
|
|
||||||
};
|
|
||||||
let attrs = self.attributes.to_string();
|
|
||||||
|
|
||||||
format!(
|
|
||||||
"<input type=\"{ty}\" name=\"{name}\" {id} {value} {attrs} />\n",
|
|
||||||
ty = self.ty,
|
|
||||||
name = self.name,
|
|
||||||
id = id,
|
|
||||||
value = value,
|
|
||||||
attrs = attrs,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Input {
|
|
||||||
pub fn new(ty: &str, name: &str) -> Self {
|
|
||||||
Self {
|
|
||||||
ty: ty.to_owned(),
|
|
||||||
name: name.to_owned(),
|
|
||||||
id: None,
|
|
||||||
value: None,
|
|
||||||
attributes: Attributes::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_id(mut self, val: &str) -> Self {
|
|
||||||
self.id = Some(val.to_owned());
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_attributes<'a>(
|
|
||||||
mut self,
|
|
||||||
values: impl IntoIterator<Item = (&'a str, &'a str)>,
|
|
||||||
) -> Self {
|
|
||||||
self.attributes = Attributes(
|
|
||||||
values
|
|
||||||
.into_iter()
|
|
||||||
.map(|(a, b)| (a.to_owned(), b.to_owned()))
|
|
||||||
.collect::<Vec<(String, String)>>(),
|
|
||||||
);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Button {
|
|
||||||
ty: Option<String>,
|
|
||||||
name: Option<String>,
|
|
||||||
label: String,
|
|
||||||
attributes: Attributes,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Button {
|
|
||||||
pub fn new(label: &str) -> Self {
|
|
||||||
Self {
|
|
||||||
ty: None,
|
|
||||||
name: None,
|
|
||||||
label: label.to_owned(),
|
|
||||||
attributes: Attributes::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_type(mut self, ty: &str) -> Self {
|
|
||||||
self.ty = Some(ty.to_owned());
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_attributes<'a>(
|
|
||||||
mut self,
|
|
||||||
values: impl IntoIterator<Item = (&'a str, &'a str)>,
|
|
||||||
) -> Self {
|
|
||||||
self.attributes = Attributes(
|
|
||||||
values
|
|
||||||
.into_iter()
|
|
||||||
.map(|(a, b)| (a.to_owned(), b.to_owned()))
|
|
||||||
.collect::<Vec<(String, String)>>(),
|
|
||||||
);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Html for Button {
|
|
||||||
fn to_html_string(&self) -> String {
|
|
||||||
let ty = match self.ty {
|
|
||||||
Some(ref ty) => format!("type={}", ty),
|
|
||||||
None => "".to_owned(),
|
|
||||||
};
|
|
||||||
let name = match self.name {
|
|
||||||
Some(ref name) => format!("name={}", name),
|
|
||||||
None => "".to_owned(),
|
|
||||||
};
|
|
||||||
format!(
|
|
||||||
"<button {ty} {name} {attrs}>{label}</button>",
|
|
||||||
name = name,
|
|
||||||
label = self.label,
|
|
||||||
attrs = self.attributes.to_string()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,6 +0,0 @@
|
||||||
mod store;
|
|
||||||
|
|
||||||
pub use store::{
|
|
||||||
AuthDB, AuthError, AuthToken, DeleteFileError, FileHandle, FileId, FileInfo, ReadFileError,
|
|
||||||
SessionToken, Store, Username, WriteFileError,
|
|
||||||
};
|
|
|
@ -1,173 +0,0 @@
|
||||||
extern crate log;
|
|
||||||
|
|
||||||
use cookie::Cookie;
|
|
||||||
use handlers::{file, handle_auth, handle_css, handle_delete, handle_upload, thumbnail};
|
|
||||||
use std::{
|
|
||||||
collections::{HashMap, HashSet},
|
|
||||||
convert::Infallible,
|
|
||||||
net::{IpAddr, Ipv4Addr, SocketAddr},
|
|
||||||
path::PathBuf,
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
use tokio::sync::RwLock;
|
|
||||||
use warp::{Filter, Rejection};
|
|
||||||
|
|
||||||
mod handlers;
|
|
||||||
mod html;
|
|
||||||
mod pages;
|
|
||||||
|
|
||||||
const MAX_UPLOAD: u64 = 15 * 1024 * 1024;
|
|
||||||
|
|
||||||
pub use file_service::{
|
|
||||||
AuthDB, AuthError, AuthToken, DeleteFileError, FileHandle, FileId, FileInfo, ReadFileError,
|
|
||||||
SessionToken, Store, Username, WriteFileError,
|
|
||||||
};
|
|
||||||
pub use handlers::handle_index;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct App {
|
|
||||||
authdb: Arc<RwLock<AuthDB>>,
|
|
||||||
store: Arc<RwLock<Store>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl App {
|
|
||||||
pub fn new(authdb: AuthDB, store: Store) -> Self {
|
|
||||||
Self {
|
|
||||||
authdb: Arc::new(RwLock::new(authdb)),
|
|
||||||
store: Arc::new(RwLock::new(store)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn authenticate(&self, token: AuthToken) -> Result<Option<SessionToken>, AuthError> {
|
|
||||||
self.authdb.read().await.authenticate(token).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn validate_session(
|
|
||||||
&self,
|
|
||||||
token: SessionToken,
|
|
||||||
) -> Result<Option<Username>, AuthError> {
|
|
||||||
self.authdb.read().await.validate_session(token).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn list_files(&self) -> Result<HashSet<FileId>, ReadFileError> {
|
|
||||||
self.store.read().await.list_files()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_file(&self, id: &FileId) -> Result<FileHandle, ReadFileError> {
|
|
||||||
self.store.read().await.get_file(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn add_file(
|
|
||||||
&self,
|
|
||||||
filename: String,
|
|
||||||
content: Vec<u8>,
|
|
||||||
) -> Result<FileHandle, WriteFileError> {
|
|
||||||
self.store.write().await.add_file(filename, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn delete_file(&self, id: FileId) -> Result<(), DeleteFileError> {
|
|
||||||
self.store.write().await.delete_file(&id)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_app(app: App) -> impl Filter<Extract = (App,), Error = Infallible> + Clone {
|
|
||||||
warp::any().map(move || app.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_cookies(cookie_str: &str) -> Result<HashMap<String, String>, cookie::ParseError> {
|
|
||||||
Cookie::split_parse(cookie_str)
|
|
||||||
.map(|c| c.map(|c| (c.name().to_owned(), c.value().to_owned())))
|
|
||||||
.collect::<Result<HashMap<String, String>, cookie::ParseError>>()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_session_token(cookies: HashMap<String, String>) -> Option<SessionToken> {
|
|
||||||
cookies.get("session").cloned().map(SessionToken::from)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn maybe_with_session() -> impl Filter<Extract = (Option<SessionToken>,), Error = Rejection> + Copy
|
|
||||||
{
|
|
||||||
warp::any()
|
|
||||||
.and(warp::header::optional::<String>("cookie"))
|
|
||||||
.map(|cookie_str: Option<String>| match cookie_str {
|
|
||||||
Some(cookie_str) => parse_cookies(&cookie_str).ok().and_then(get_session_token),
|
|
||||||
None => None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_session() -> impl Filter<Extract = (SessionToken,), Error = Rejection> + Copy {
|
|
||||||
warp::any()
|
|
||||||
.and(warp::header::<String>("cookie"))
|
|
||||||
.and_then(|cookie_str: String| async move {
|
|
||||||
match parse_cookies(&cookie_str).ok().and_then(get_session_token) {
|
|
||||||
Some(session_token) => Ok(session_token),
|
|
||||||
None => Err(warp::reject()),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
pub async fn main() {
|
|
||||||
pretty_env_logger::init();
|
|
||||||
|
|
||||||
let authdb = AuthDB::new(PathBuf::from(&std::env::var("AUTHDB").unwrap()))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
let store = Store::new(PathBuf::from(&std::env::var("FILE_SHARE_DIR").unwrap()));
|
|
||||||
|
|
||||||
let app = App::new(authdb, store);
|
|
||||||
|
|
||||||
let log = warp::log("file_service");
|
|
||||||
let root = warp::path!()
|
|
||||||
.and(warp::get())
|
|
||||||
.and(with_app(app.clone()))
|
|
||||||
.and(maybe_with_session())
|
|
||||||
.then(handle_index);
|
|
||||||
|
|
||||||
let styles = warp::path!("css").and(warp::get()).then(handle_css);
|
|
||||||
|
|
||||||
let auth = warp::path!("auth")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(with_app(app.clone()))
|
|
||||||
.and(warp::filters::body::form())
|
|
||||||
.then(handle_auth);
|
|
||||||
|
|
||||||
let upload_via_form = warp::path!("upload")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(with_app(app.clone()))
|
|
||||||
.and(with_session())
|
|
||||||
.and(warp::multipart::form().max_length(MAX_UPLOAD))
|
|
||||||
.then(handle_upload);
|
|
||||||
|
|
||||||
let delete_via_form = warp::path!("delete" / String)
|
|
||||||
.and(warp::post())
|
|
||||||
.and(with_app(app.clone()))
|
|
||||||
.and(with_session())
|
|
||||||
.then(|id, app, token| handle_delete(app, token, FileId::from(id)));
|
|
||||||
|
|
||||||
let thumbnail = warp::path!(String / "tn")
|
|
||||||
.and(warp::get())
|
|
||||||
.and(warp::header::optional::<String>("if-none-match"))
|
|
||||||
.and(with_app(app.clone()))
|
|
||||||
.then(move |id, old_etags, app: App| thumbnail(app, id, old_etags));
|
|
||||||
|
|
||||||
let file = warp::path!(String)
|
|
||||||
.and(warp::get())
|
|
||||||
.and(warp::header::optional::<String>("if-none-match"))
|
|
||||||
.and(with_app(app.clone()))
|
|
||||||
.then(move |id, old_etags, app: App| file(app, id, old_etags));
|
|
||||||
|
|
||||||
let server = warp::serve(
|
|
||||||
root.or(styles)
|
|
||||||
.or(auth)
|
|
||||||
.or(upload_via_form)
|
|
||||||
.or(delete_via_form)
|
|
||||||
.or(thumbnail)
|
|
||||||
.or(file)
|
|
||||||
.with(log),
|
|
||||||
);
|
|
||||||
|
|
||||||
server
|
|
||||||
.run(SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 8002))
|
|
||||||
.await;
|
|
||||||
}
|
|
|
@ -1,114 +0,0 @@
|
||||||
use crate::html::*;
|
|
||||||
use build_html::{self, Container, ContainerType, Html, HtmlContainer};
|
|
||||||
use file_service::{FileHandle, FileInfo, ReadFileError};
|
|
||||||
|
|
||||||
pub fn auth(_message: Option<String>) -> build_html::HtmlPage {
|
|
||||||
build_html::HtmlPage::new()
|
|
||||||
.with_title("Sign In")
|
|
||||||
.with_stylesheet("/css")
|
|
||||||
.with_container(
|
|
||||||
Container::new(ContainerType::Div)
|
|
||||||
.with_attributes([("class", "authentication-page")])
|
|
||||||
.with_container(auth_form()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn auth_form() -> Container {
|
|
||||||
Container::default()
|
|
||||||
.with_attributes([("class", "card authentication-form")])
|
|
||||||
.with_html(
|
|
||||||
Form::new()
|
|
||||||
.with_path("/auth")
|
|
||||||
.with_method("post")
|
|
||||||
.with_container(
|
|
||||||
Container::new(ContainerType::Div)
|
|
||||||
.with_html(
|
|
||||||
Input::new("password", "password")
|
|
||||||
.with_id("for-token-input")
|
|
||||||
.with_attributes([
|
|
||||||
("size", "50"),
|
|
||||||
("class", "authentication-form__input"),
|
|
||||||
]),
|
|
||||||
)
|
|
||||||
.with_html(
|
|
||||||
Button::new("Sign In")
|
|
||||||
.with_attributes([("class", "authentication-form__button")]),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn gallery(handles: Vec<Result<FileHandle, ReadFileError>>) -> build_html::HtmlPage {
|
|
||||||
let mut page = build_html::HtmlPage::new()
|
|
||||||
.with_title("Gallery")
|
|
||||||
.with_stylesheet("/css")
|
|
||||||
.with_container(
|
|
||||||
Container::new(ContainerType::Div)
|
|
||||||
.with_attributes([("class", "gallery-page")])
|
|
||||||
.with_header(1, "Gallery")
|
|
||||||
.with_html(upload_form()),
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut gallery = Container::new(ContainerType::Div).with_attributes([("class", "gallery")]);
|
|
||||||
for handle in handles {
|
|
||||||
let container = match handle {
|
|
||||||
Ok(ref handle) => thumbnail(&handle.info),
|
|
||||||
Err(err) => Container::new(ContainerType::Div)
|
|
||||||
.with_attributes(vec![("class", "file")])
|
|
||||||
.with_paragraph(format!("{:?}", err)),
|
|
||||||
};
|
|
||||||
gallery.add_container(container);
|
|
||||||
}
|
|
||||||
page.add_container(gallery);
|
|
||||||
page
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn upload_form() -> Form {
|
|
||||||
Form::new()
|
|
||||||
.with_path("/upload")
|
|
||||||
.with_method("post")
|
|
||||||
.with_encoding("multipart/form-data")
|
|
||||||
.with_container(
|
|
||||||
Container::new(ContainerType::Div)
|
|
||||||
.with_attributes([("class", "card upload-form")])
|
|
||||||
.with_html(Input::new("file", "file").with_attributes([
|
|
||||||
("id", "for-selector-input"),
|
|
||||||
("placeholder", "select file"),
|
|
||||||
("class", "upload-form__selector"),
|
|
||||||
]))
|
|
||||||
.with_html(
|
|
||||||
Button::new("Upload file")
|
|
||||||
.with_attributes([("class", "upload-form__button")])
|
|
||||||
.with_type("submit"),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn thumbnail(info: &FileInfo) -> Container {
|
|
||||||
Container::new(ContainerType::Div)
|
|
||||||
.with_attributes(vec![("class", "card thumbnail")])
|
|
||||||
.with_html(
|
|
||||||
Container::new(ContainerType::Div).with_link(
|
|
||||||
format!("/{}", *info.id),
|
|
||||||
Container::default()
|
|
||||||
.with_attributes([("class", "thumbnail")])
|
|
||||||
.with_image(format!("{}/tn", *info.id), "test data")
|
|
||||||
.to_html_string(),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.with_html(
|
|
||||||
Container::new(ContainerType::Div)
|
|
||||||
.with_html(
|
|
||||||
Container::new(ContainerType::UnorderedList)
|
|
||||||
.with_attributes(vec![("class", "thumbnail__metadata")])
|
|
||||||
.with_html(info.name.clone())
|
|
||||||
.with_html(format!("{}", info.created.format("%Y-%m-%d"))),
|
|
||||||
)
|
|
||||||
.with_html(
|
|
||||||
Form::new()
|
|
||||||
.with_path(&format!("/delete/{}", *info.id))
|
|
||||||
.with_method("post")
|
|
||||||
.with_html(Button::new("Delete")),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
|
@ -1,299 +0,0 @@
|
||||||
use super::{fileinfo::FileInfo, FileId, ReadFileError, WriteFileError};
|
|
||||||
use chrono::prelude::*;
|
|
||||||
use hex_string::HexString;
|
|
||||||
use image::imageops::FilterType;
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use std::{
|
|
||||||
convert::TryFrom,
|
|
||||||
io::{Read, Write},
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
};
|
|
||||||
use thiserror::Error;
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum PathError {
|
|
||||||
#[error("path cannot be derived from input")]
|
|
||||||
InvalidPath,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct PathResolver {
|
|
||||||
base: PathBuf,
|
|
||||||
id: FileId,
|
|
||||||
extension: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PathResolver {
|
|
||||||
pub fn new(base: &Path, id: FileId, extension: String) -> Self {
|
|
||||||
Self {
|
|
||||||
base: base.to_owned(),
|
|
||||||
id,
|
|
||||||
extension,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn metadata_path_by_id(base: &Path, id: FileId) -> PathBuf {
|
|
||||||
let mut path = base.to_path_buf();
|
|
||||||
path.push(PathBuf::from(id.clone()));
|
|
||||||
path.set_extension("json");
|
|
||||||
path
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn id(&self) -> FileId {
|
|
||||||
self.id.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn file_path(&self) -> PathBuf {
|
|
||||||
let mut path = self.base.clone();
|
|
||||||
path.push(PathBuf::from(self.id.clone()));
|
|
||||||
path.set_extension(self.extension.clone());
|
|
||||||
path
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn metadata_path(&self) -> PathBuf {
|
|
||||||
let mut path = self.base.clone();
|
|
||||||
path.push(PathBuf::from(self.id.clone()));
|
|
||||||
path.set_extension("json");
|
|
||||||
path
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn thumbnail_path(&self) -> PathBuf {
|
|
||||||
let mut path = self.base.clone();
|
|
||||||
path.push(PathBuf::from(self.id.clone()));
|
|
||||||
path.set_extension(format!("tn.{}", self.extension));
|
|
||||||
path
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<String> for PathResolver {
|
|
||||||
type Error = PathError;
|
|
||||||
fn try_from(s: String) -> Result<Self, Self::Error> {
|
|
||||||
PathResolver::try_from(s.as_str())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&str> for PathResolver {
|
|
||||||
type Error = PathError;
|
|
||||||
fn try_from(s: &str) -> Result<Self, Self::Error> {
|
|
||||||
PathResolver::try_from(Path::new(s))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<PathBuf> for PathResolver {
|
|
||||||
type Error = PathError;
|
|
||||||
fn try_from(path: PathBuf) -> Result<Self, Self::Error> {
|
|
||||||
PathResolver::try_from(path.as_path())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&Path> for PathResolver {
|
|
||||||
type Error = PathError;
|
|
||||||
fn try_from(path: &Path) -> Result<Self, Self::Error> {
|
|
||||||
Ok(Self {
|
|
||||||
base: path
|
|
||||||
.parent()
|
|
||||||
.map(|s| s.to_owned())
|
|
||||||
.ok_or(PathError::InvalidPath)?,
|
|
||||||
id: path
|
|
||||||
.file_stem()
|
|
||||||
.and_then(|s| s.to_str().map(FileId::from))
|
|
||||||
.ok_or(PathError::InvalidPath)?,
|
|
||||||
extension: path
|
|
||||||
.extension()
|
|
||||||
.and_then(|s| s.to_str().map(|s| s.to_owned()))
|
|
||||||
.ok_or(PathError::InvalidPath)?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// One file in the database, complete with the path of the file and information about the
|
|
||||||
/// thumbnail of the file.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct FileHandle {
|
|
||||||
pub id: FileId,
|
|
||||||
pub path: PathResolver,
|
|
||||||
pub info: FileInfo,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileHandle {
|
|
||||||
/// Create a new entry in the database
|
|
||||||
pub fn new(filename: String, root: PathBuf) -> Result<Self, WriteFileError> {
|
|
||||||
let id = FileId::from(Uuid::new_v4().hyphenated().to_string());
|
|
||||||
let path = PathBuf::from(filename);
|
|
||||||
|
|
||||||
let name = path
|
|
||||||
.file_stem()
|
|
||||||
.and_then(|s| s.to_str().map(|s| s.to_owned()))
|
|
||||||
.ok_or(WriteFileError::InvalidPath)?;
|
|
||||||
let extension = path
|
|
||||||
.extension()
|
|
||||||
.and_then(|s| s.to_str().map(|s| s.to_owned()))
|
|
||||||
.ok_or(WriteFileError::InvalidPath)?;
|
|
||||||
let path = PathResolver {
|
|
||||||
base: root.clone(),
|
|
||||||
id: id.clone(),
|
|
||||||
extension: extension.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let file_type = mime_guess::from_ext(&extension)
|
|
||||||
.first_or_text_plain()
|
|
||||||
.essence_str()
|
|
||||||
.to_owned();
|
|
||||||
|
|
||||||
let info = FileInfo {
|
|
||||||
id: id.clone(),
|
|
||||||
name,
|
|
||||||
size: 0,
|
|
||||||
created: Utc::now(),
|
|
||||||
file_type,
|
|
||||||
hash: "".to_owned(),
|
|
||||||
extension,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut md_file = std::fs::File::create(path.metadata_path())?;
|
|
||||||
let _ = md_file.write(&serde_json::to_vec(&info)?)?;
|
|
||||||
|
|
||||||
Ok(Self { id, path, info })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn load(id: &FileId, root: &Path) -> Result<Self, ReadFileError> {
|
|
||||||
let info = FileInfo::load(PathResolver::metadata_path_by_id(root, id.clone()))?;
|
|
||||||
let resolver = PathResolver::new(root, id.clone(), info.extension.clone());
|
|
||||||
Ok(Self {
|
|
||||||
id: info.id.clone(),
|
|
||||||
path: resolver,
|
|
||||||
info,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_content(&mut self, content: Vec<u8>) -> Result<(), WriteFileError> {
|
|
||||||
let mut content_file = std::fs::File::create(self.path.file_path())?;
|
|
||||||
let byte_count = content_file.write(&content)?;
|
|
||||||
self.info.size = byte_count;
|
|
||||||
self.info.hash = self.hash_content(&content).as_string();
|
|
||||||
|
|
||||||
let mut md_file = std::fs::File::create(self.path.metadata_path())?;
|
|
||||||
let _ = md_file.write(&serde_json::to_vec(&self.info)?)?;
|
|
||||||
|
|
||||||
self.write_thumbnail()?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn content(&self) -> Result<Vec<u8>, ReadFileError> {
|
|
||||||
load_content(&self.path.file_path())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn thumbnail(&self) -> Result<Vec<u8>, ReadFileError> {
|
|
||||||
load_content(&self.path.thumbnail_path())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hash_content(&self, data: &Vec<u8>) -> HexString {
|
|
||||||
HexString::from_bytes(&Sha256::digest(data).to_vec())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_thumbnail(&self) -> Result<(), WriteFileError> {
|
|
||||||
let img = image::open(self.path.file_path())?;
|
|
||||||
let tn = img.resize(640, 640, FilterType::Nearest);
|
|
||||||
tn.save(self.path.thumbnail_path())?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete(self) {
|
|
||||||
let _ = std::fs::remove_file(self.path.thumbnail_path());
|
|
||||||
let _ = std::fs::remove_file(self.path.file_path());
|
|
||||||
let _ = std::fs::remove_file(self.path.metadata_path());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load_content(path: &Path) -> Result<Vec<u8>, ReadFileError> {
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
let mut file = std::fs::File::open(path)?;
|
|
||||||
file.read_to_end(&mut buf)?;
|
|
||||||
Ok(buf)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use std::{convert::TryFrom, path::PathBuf};
|
|
||||||
use tempdir::TempDir;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn paths() {
|
|
||||||
let resolver = PathResolver::try_from("path/82420255-d3c8-4d90-a582-f94be588c70c.png")
|
|
||||||
.expect("to have a valid path");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
resolver.file_path(),
|
|
||||||
PathBuf::from("path/82420255-d3c8-4d90-a582-f94be588c70c.png")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
resolver.metadata_path(),
|
|
||||||
PathBuf::from("path/82420255-d3c8-4d90-a582-f94be588c70c.json")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
resolver.thumbnail_path(),
|
|
||||||
PathBuf::from("path/82420255-d3c8-4d90-a582-f94be588c70c.tn.png")
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_creates_file_info() {
|
|
||||||
let tmp = TempDir::new("var").unwrap();
|
|
||||||
let handle =
|
|
||||||
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
|
|
||||||
assert_eq!(handle.info.name, "rawr");
|
|
||||||
assert_eq!(handle.info.size, 0);
|
|
||||||
assert_eq!(handle.info.file_type, "image/png");
|
|
||||||
assert_eq!(handle.info.extension, "png");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_opens_a_file() {
|
|
||||||
let tmp = TempDir::new("var").unwrap();
|
|
||||||
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_deletes_a_file() {
|
|
||||||
let tmp = TempDir::new("var").unwrap();
|
|
||||||
let f =
|
|
||||||
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
|
|
||||||
f.delete();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_can_return_a_thumbnail() {
|
|
||||||
let tmp = TempDir::new("var").unwrap();
|
|
||||||
let _ =
|
|
||||||
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
|
|
||||||
/*
|
|
||||||
assert_eq!(
|
|
||||||
f.thumbnail(),
|
|
||||||
Thumbnail {
|
|
||||||
id: String::from("rawr.png"),
|
|
||||||
root: PathBuf::from("var/"),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_can_return_a_file_stream() {
|
|
||||||
let tmp = TempDir::new("var").unwrap();
|
|
||||||
let _ =
|
|
||||||
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
|
|
||||||
// f.stream().expect("to succeed");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_raises_an_error_when_file_not_found() {
|
|
||||||
let tmp = TempDir::new("var").unwrap();
|
|
||||||
match FileHandle::load(&FileId::from("rawr"), tmp.path()) {
|
|
||||||
Err(ReadFileError::FileNotFound(_)) => assert!(true),
|
|
||||||
_ => assert!(false),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,76 +0,0 @@
|
||||||
use crate::FileId;
|
|
||||||
|
|
||||||
use super::{ReadFileError, WriteFileError};
|
|
||||||
use chrono::prelude::*;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::{
|
|
||||||
io::{Read, Write},
|
|
||||||
path::PathBuf,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
|
||||||
pub struct FileInfo {
|
|
||||||
pub id: FileId,
|
|
||||||
|
|
||||||
// Early versions of the application didn't support a name field, so it is possible that
|
|
||||||
// metadata won't contain the name. We can just default to an empty string when loading the
|
|
||||||
// metadata, as all future versions will require a filename when the file gets uploaded.
|
|
||||||
#[serde(default)]
|
|
||||||
pub name: String,
|
|
||||||
pub size: usize,
|
|
||||||
pub created: DateTime<Utc>,
|
|
||||||
pub file_type: String,
|
|
||||||
pub hash: String,
|
|
||||||
pub extension: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileInfo {
|
|
||||||
pub fn load(path: PathBuf) -> Result<Self, ReadFileError> {
|
|
||||||
let mut content: Vec<u8> = Vec::new();
|
|
||||||
let mut file =
|
|
||||||
std::fs::File::open(path.clone()).map_err(|_| ReadFileError::FileNotFound(path))?;
|
|
||||||
file.read_to_end(&mut content)?;
|
|
||||||
let js = serde_json::from_slice(&content)?;
|
|
||||||
|
|
||||||
Ok(js)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn save(&self, path: PathBuf) -> Result<(), WriteFileError> {
|
|
||||||
let ser = serde_json::to_string(self).unwrap();
|
|
||||||
let mut file = std::fs::File::create(path)?;
|
|
||||||
let _ = file.write(ser.as_bytes())?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use crate::store::FileId;
|
|
||||||
use tempdir::TempDir;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_saves_and_loads_metadata() {
|
|
||||||
let tmp = TempDir::new("var").unwrap();
|
|
||||||
let created = Utc::now();
|
|
||||||
|
|
||||||
let info = FileInfo {
|
|
||||||
id: FileId("temp-id".to_owned()),
|
|
||||||
name: "test-image".to_owned(),
|
|
||||||
size: 23777,
|
|
||||||
created,
|
|
||||||
file_type: "image/png".to_owned(),
|
|
||||||
hash: "abcdefg".to_owned(),
|
|
||||||
extension: "png".to_owned(),
|
|
||||||
};
|
|
||||||
let mut path = tmp.path().to_owned();
|
|
||||||
path.push(&PathBuf::from(info.id.clone()));
|
|
||||||
info.save(path.clone()).unwrap();
|
|
||||||
|
|
||||||
let info_ = FileInfo::load(path).unwrap();
|
|
||||||
assert_eq!(info_.size, 23777);
|
|
||||||
assert_eq!(info_.created, info.created);
|
|
||||||
assert_eq!(info_.file_type, "image/png");
|
|
||||||
assert_eq!(info_.hash, info.hash);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,566 +0,0 @@
|
||||||
use base64ct::{Base64, Encoding};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use sqlx::{
|
|
||||||
sqlite::{SqlitePool, SqliteRow},
|
|
||||||
Row,
|
|
||||||
};
|
|
||||||
use std::{collections::HashSet, ops::Deref, path::PathBuf};
|
|
||||||
use thiserror::Error;
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
mod filehandle;
|
|
||||||
mod fileinfo;
|
|
||||||
|
|
||||||
pub use filehandle::FileHandle;
|
|
||||||
pub use fileinfo::FileInfo;
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum WriteFileError {
|
|
||||||
#[error("root file path does not exist")]
|
|
||||||
RootNotFound,
|
|
||||||
|
|
||||||
#[error("permission denied")]
|
|
||||||
PermissionDenied,
|
|
||||||
|
|
||||||
#[error("invalid path")]
|
|
||||||
InvalidPath,
|
|
||||||
|
|
||||||
#[error("no metadata available")]
|
|
||||||
NoMetadata,
|
|
||||||
|
|
||||||
#[error("file could not be loaded")]
|
|
||||||
LoadError(#[from] ReadFileError),
|
|
||||||
|
|
||||||
#[error("image conversion failed")]
|
|
||||||
ImageError(#[from] image::ImageError),
|
|
||||||
|
|
||||||
#[error("JSON error")]
|
|
||||||
JSONError(#[from] serde_json::error::Error),
|
|
||||||
|
|
||||||
#[error("IO error")]
|
|
||||||
IOError(#[from] std::io::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum ReadFileError {
|
|
||||||
#[error("file not found")]
|
|
||||||
FileNotFound(PathBuf),
|
|
||||||
|
|
||||||
#[error("path is not a file")]
|
|
||||||
NotAFile,
|
|
||||||
|
|
||||||
#[error("permission denied")]
|
|
||||||
PermissionDenied,
|
|
||||||
|
|
||||||
#[error("JSON error")]
|
|
||||||
JSONError(#[from] serde_json::error::Error),
|
|
||||||
|
|
||||||
#[error("IO error")]
|
|
||||||
IOError(#[from] std::io::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum DeleteFileError {
|
|
||||||
#[error("file not found")]
|
|
||||||
FileNotFound(PathBuf),
|
|
||||||
|
|
||||||
#[error("metadata path is not a file")]
|
|
||||||
NotAFile,
|
|
||||||
|
|
||||||
#[error("cannot read metadata")]
|
|
||||||
PermissionDenied,
|
|
||||||
|
|
||||||
#[error("invalid metadata path")]
|
|
||||||
MetadataParseError(serde_json::error::Error),
|
|
||||||
|
|
||||||
#[error("IO error")]
|
|
||||||
IOError(#[from] std::io::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<ReadFileError> for DeleteFileError {
|
|
||||||
fn from(err: ReadFileError) -> Self {
|
|
||||||
match err {
|
|
||||||
ReadFileError::FileNotFound(path) => DeleteFileError::FileNotFound(path),
|
|
||||||
ReadFileError::NotAFile => DeleteFileError::NotAFile,
|
|
||||||
ReadFileError::PermissionDenied => DeleteFileError::PermissionDenied,
|
|
||||||
ReadFileError::JSONError(err) => DeleteFileError::MetadataParseError(err),
|
|
||||||
ReadFileError::IOError(err) => DeleteFileError::IOError(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum AuthError {
|
|
||||||
#[error("authentication token is duplicated")]
|
|
||||||
DuplicateAuthToken,
|
|
||||||
|
|
||||||
#[error("session token is duplicated")]
|
|
||||||
DuplicateSessionToken,
|
|
||||||
|
|
||||||
#[error("database failed")]
|
|
||||||
SqlError(sqlx::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<sqlx::Error> for AuthError {
|
|
||||||
fn from(err: sqlx::Error) -> AuthError {
|
|
||||||
AuthError::SqlError(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
|
||||||
pub struct Username(String);
|
|
||||||
|
|
||||||
impl From<String> for Username {
|
|
||||||
fn from(s: String) -> Self {
|
|
||||||
Self(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for Username {
|
|
||||||
fn from(s: &str) -> Self {
|
|
||||||
Self(s.to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Username> for String {
|
|
||||||
fn from(s: Username) -> Self {
|
|
||||||
Self::from(&s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Username> for String {
|
|
||||||
fn from(s: &Username) -> Self {
|
|
||||||
let Username(s) = s;
|
|
||||||
Self::from(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for Username {
|
|
||||||
type Target = String;
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl sqlx::FromRow<'_, SqliteRow> for Username {
|
|
||||||
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
|
|
||||||
let name: String = row.try_get("username")?;
|
|
||||||
Ok(Username::from(name))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
|
||||||
pub struct AuthToken(String);
|
|
||||||
|
|
||||||
impl From<String> for AuthToken {
|
|
||||||
fn from(s: String) -> Self {
|
|
||||||
Self(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for AuthToken {
|
|
||||||
fn from(s: &str) -> Self {
|
|
||||||
Self(s.to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<AuthToken> for PathBuf {
|
|
||||||
fn from(s: AuthToken) -> Self {
|
|
||||||
Self::from(&s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&AuthToken> for PathBuf {
|
|
||||||
fn from(s: &AuthToken) -> Self {
|
|
||||||
let AuthToken(s) = s;
|
|
||||||
Self::from(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for AuthToken {
|
|
||||||
type Target = String;
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
|
||||||
pub struct SessionToken(String);
|
|
||||||
|
|
||||||
impl From<String> for SessionToken {
|
|
||||||
fn from(s: String) -> Self {
|
|
||||||
Self(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for SessionToken {
|
|
||||||
fn from(s: &str) -> Self {
|
|
||||||
Self(s.to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<SessionToken> for PathBuf {
|
|
||||||
fn from(s: SessionToken) -> Self {
|
|
||||||
Self::from(&s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&SessionToken> for PathBuf {
|
|
||||||
fn from(s: &SessionToken) -> Self {
|
|
||||||
let SessionToken(s) = s;
|
|
||||||
Self::from(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for SessionToken {
|
|
||||||
type Target = String;
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
|
||||||
pub struct FileId(String);
|
|
||||||
|
|
||||||
impl From<String> for FileId {
|
|
||||||
fn from(s: String) -> Self {
|
|
||||||
Self(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for FileId {
|
|
||||||
fn from(s: &str) -> Self {
|
|
||||||
Self(s.to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<FileId> for PathBuf {
|
|
||||||
fn from(s: FileId) -> Self {
|
|
||||||
Self::from(&s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&FileId> for PathBuf {
|
|
||||||
fn from(s: &FileId) -> Self {
|
|
||||||
let FileId(s) = s;
|
|
||||||
Self::from(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for FileId {
|
|
||||||
type Target = String;
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait FileRoot {
|
|
||||||
fn root(&self) -> PathBuf;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Context(PathBuf);
|
|
||||||
|
|
||||||
impl FileRoot for Context {
|
|
||||||
fn root(&self) -> PathBuf {
|
|
||||||
self.0.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct AuthDB {
|
|
||||||
pool: SqlitePool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AuthDB {
|
|
||||||
pub async fn new(path: PathBuf) -> Result<Self, sqlx::Error> {
|
|
||||||
let migrator = sqlx::migrate!("./migrations");
|
|
||||||
let pool = SqlitePool::connect(&format!("sqlite://{}", path.to_str().unwrap())).await?;
|
|
||||||
migrator.run(&pool).await?;
|
|
||||||
Ok(Self { pool })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn add_user(&self, username: Username) -> Result<AuthToken, AuthError> {
|
|
||||||
let mut hasher = Sha256::new();
|
|
||||||
hasher.update(Uuid::new_v4().hyphenated().to_string());
|
|
||||||
hasher.update(username.to_string());
|
|
||||||
let auth_token = Base64::encode_string(&hasher.finalize());
|
|
||||||
|
|
||||||
let _ = sqlx::query("INSERT INTO users (username, token) VALUES ($1, $2)")
|
|
||||||
.bind(username.to_string())
|
|
||||||
.bind(auth_token.clone())
|
|
||||||
.execute(&self.pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(AuthToken::from(auth_token))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn list_users(&self) -> Result<Vec<Username>, AuthError> {
|
|
||||||
let usernames = sqlx::query_as::<_, Username>("SELECT (username) FROM users")
|
|
||||||
.fetch_all(&self.pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(usernames)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn authenticate(&self, token: AuthToken) -> Result<Option<SessionToken>, AuthError> {
|
|
||||||
let results = sqlx::query("SELECT * FROM users WHERE token = $1")
|
|
||||||
.bind(token.to_string())
|
|
||||||
.fetch_all(&self.pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if results.len() > 1 {
|
|
||||||
return Err(AuthError::DuplicateAuthToken);
|
|
||||||
}
|
|
||||||
|
|
||||||
if results.is_empty() {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
let user_id: i64 = results[0].try_get("id")?;
|
|
||||||
|
|
||||||
let mut hasher = Sha256::new();
|
|
||||||
hasher.update(Uuid::new_v4().hyphenated().to_string());
|
|
||||||
hasher.update(token.to_string());
|
|
||||||
let session_token = Base64::encode_string(&hasher.finalize());
|
|
||||||
|
|
||||||
let _ = sqlx::query("INSERT INTO sessions (token, user_id) VALUES ($1, $2)")
|
|
||||||
.bind(session_token.clone())
|
|
||||||
.bind(user_id)
|
|
||||||
.execute(&self.pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(Some(SessionToken::from(session_token)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn validate_session(
|
|
||||||
&self,
|
|
||||||
token: SessionToken,
|
|
||||||
) -> Result<Option<Username>, AuthError> {
|
|
||||||
let rows = sqlx::query(
|
|
||||||
"SELECT users.username FROM sessions INNER JOIN users ON sessions.user_id = users.id WHERE sessions.token = $1",
|
|
||||||
)
|
|
||||||
.bind(token.to_string())
|
|
||||||
.fetch_all(&self.pool)
|
|
||||||
.await?;
|
|
||||||
if rows.len() > 1 {
|
|
||||||
return Err(AuthError::DuplicateSessionToken);
|
|
||||||
}
|
|
||||||
|
|
||||||
if rows.is_empty() {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
let username: String = rows[0].try_get("username")?;
|
|
||||||
Ok(Some(Username::from(username)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Store {
|
|
||||||
files_root: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Store {
|
|
||||||
pub fn new(files_root: PathBuf) -> Self {
|
|
||||||
Self { files_root }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn list_files(&self) -> Result<HashSet<FileId>, ReadFileError> {
|
|
||||||
let paths = std::fs::read_dir(&self.files_root)?;
|
|
||||||
let info_files = paths
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|path| {
|
|
||||||
let path_ = path.unwrap().path();
|
|
||||||
if path_.extension().and_then(|s| s.to_str()) == Some("json") {
|
|
||||||
let stem = path_.file_stem().and_then(|s| s.to_str()).unwrap();
|
|
||||||
Some(FileId::from(stem))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<HashSet<FileId>>();
|
|
||||||
Ok(info_files)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_file(
|
|
||||||
&mut self,
|
|
||||||
filename: String,
|
|
||||||
content: Vec<u8>,
|
|
||||||
) -> Result<FileHandle, WriteFileError> {
|
|
||||||
let mut file = FileHandle::new(filename, self.files_root.clone())?;
|
|
||||||
file.set_content(content)?;
|
|
||||||
Ok(file)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_file(&self, id: &FileId) -> Result<FileHandle, ReadFileError> {
|
|
||||||
FileHandle::load(id, &self.files_root)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete_file(&mut self, id: &FileId) -> Result<(), DeleteFileError> {
|
|
||||||
let handle = FileHandle::load(id, &self.files_root)?;
|
|
||||||
handle.delete();
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_metadata(&self, id: &FileId) -> Result<FileInfo, ReadFileError> {
|
|
||||||
let mut path = self.files_root.clone();
|
|
||||||
path.push(PathBuf::from(id));
|
|
||||||
path.set_extension("json");
|
|
||||||
FileInfo::load(path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use cool_asserts::assert_matches;
|
|
||||||
use std::{collections::HashSet, io::Read};
|
|
||||||
use tempdir::TempDir;
|
|
||||||
|
|
||||||
fn with_file<F>(test_fn: F)
|
|
||||||
where
|
|
||||||
F: FnOnce(Store, FileId, TempDir),
|
|
||||||
{
|
|
||||||
let tmp = TempDir::new("var").unwrap();
|
|
||||||
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
let mut file = std::fs::File::open("fixtures/rawr.png").unwrap();
|
|
||||||
file.read_to_end(&mut buf).unwrap();
|
|
||||||
|
|
||||||
let mut store = Store::new(PathBuf::from(tmp.path()));
|
|
||||||
let file_record = store.add_file("rawr.png".to_owned(), buf).unwrap();
|
|
||||||
|
|
||||||
test_fn(store, file_record.id, tmp);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn adds_files() {
|
|
||||||
with_file(|store, id, tmp| {
|
|
||||||
let file = store.get_file(&id).expect("to retrieve the file");
|
|
||||||
|
|
||||||
assert_eq!(file.content().map(|file| file.len()).unwrap(), 23777);
|
|
||||||
|
|
||||||
assert!(tmp.path().join(&(*id)).with_extension("png").exists());
|
|
||||||
assert!(tmp.path().join(&(*id)).with_extension("json").exists());
|
|
||||||
assert!(tmp.path().join(&(*id)).with_extension("tn.png").exists());
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn sets_up_metadata_for_file() {
|
|
||||||
with_file(|store, id, tmp| {
|
|
||||||
assert!(tmp.path().join(&(*id)).with_extension("png").exists());
|
|
||||||
let info = store.get_metadata(&id).expect("to retrieve the metadata");
|
|
||||||
|
|
||||||
assert_matches!(info, FileInfo { size, file_type, hash, extension, .. } => {
|
|
||||||
assert_eq!(size, 23777);
|
|
||||||
assert_eq!(file_type, "image/png");
|
|
||||||
assert_eq!(hash, "b6cd35e113b95d62f53d9cbd27ccefef47d3e324aef01a2db6c0c6d3a43c89ee".to_owned());
|
|
||||||
assert_eq!(extension, "png".to_owned());
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
#[test]
|
|
||||||
fn sets_up_thumbnail_for_file() {
|
|
||||||
with_file(|store, id| {
|
|
||||||
let (_, thumbnail) = store.get_thumbnail(&id).expect("to retrieve the thumbnail");
|
|
||||||
assert_eq!(thumbnail.content().map(|file| file.len()).unwrap(), 48869);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn deletes_associated_files() {
|
|
||||||
with_file(|mut store, id, tmp| {
|
|
||||||
store.delete_file(&id).expect("file to be deleted");
|
|
||||||
|
|
||||||
assert!(!tmp.path().join(&(*id)).with_extension("png").exists());
|
|
||||||
assert!(!tmp.path().join(&(*id)).with_extension("json").exists());
|
|
||||||
assert!(!tmp.path().join(&(*id)).with_extension("tn.png").exists());
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn lists_files_in_the_db() {
|
|
||||||
with_file(|store, id, _| {
|
|
||||||
let resolvers = store.list_files().expect("file listing to succeed");
|
|
||||||
let ids = resolvers.into_iter().collect::<HashSet<FileId>>();
|
|
||||||
|
|
||||||
assert_eq!(ids.len(), 1);
|
|
||||||
assert!(ids.contains(&id));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod authdb_test {
|
|
||||||
use super::*;
|
|
||||||
use cool_asserts::assert_matches;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn can_create_and_list_users() {
|
|
||||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
|
||||||
.await
|
|
||||||
.expect("a memory-only database will be created");
|
|
||||||
let _ = db
|
|
||||||
.add_user(Username::from("savanni"))
|
|
||||||
.await
|
|
||||||
.expect("user to be created");
|
|
||||||
assert_matches!(db.list_users().await, Ok(names) => {
|
|
||||||
let names = names.into_iter().collect::<HashSet<Username>>();
|
|
||||||
assert!(names.contains(&Username::from("savanni")));
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn unknown_auth_token_returns_nothing() {
|
|
||||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
|
||||||
.await
|
|
||||||
.expect("a memory-only database will be created");
|
|
||||||
let _ = db
|
|
||||||
.add_user(Username::from("savanni"))
|
|
||||||
.await
|
|
||||||
.expect("user to be created");
|
|
||||||
|
|
||||||
let token = AuthToken::from("0000000000");
|
|
||||||
|
|
||||||
assert_matches!(db.authenticate(token).await, Ok(None));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn auth_token_becomes_session_token() {
|
|
||||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
|
||||||
.await
|
|
||||||
.expect("a memory-only database will be created");
|
|
||||||
let token = db
|
|
||||||
.add_user(Username::from("savanni"))
|
|
||||||
.await
|
|
||||||
.expect("user to be created");
|
|
||||||
|
|
||||||
assert_matches!(db.authenticate(token).await, Ok(_));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn can_validate_session_token() {
|
|
||||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
|
||||||
.await
|
|
||||||
.expect("a memory-only database will be created");
|
|
||||||
let token = db
|
|
||||||
.add_user(Username::from("savanni"))
|
|
||||||
.await
|
|
||||||
.expect("user to be created");
|
|
||||||
let session = db
|
|
||||||
.authenticate(token)
|
|
||||||
.await
|
|
||||||
.expect("token authentication should succeed")
|
|
||||||
.expect("session token should be found");
|
|
||||||
|
|
||||||
assert_matches!(
|
|
||||||
db.validate_session(session).await,
|
|
||||||
Ok(Some(username)) => {
|
|
||||||
assert_eq!(username, Username::from("savanni"));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,91 +0,0 @@
|
||||||
use super::{ReadFileError, WriteFileError};
|
|
||||||
use image::imageops::FilterType;
|
|
||||||
use std::{
|
|
||||||
fs::remove_file,
|
|
||||||
io::Read,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct Thumbnail {
|
|
||||||
pub path: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Thumbnail {
|
|
||||||
pub fn open(
|
|
||||||
origin_path: PathBuf,
|
|
||||||
thumbnail_path: PathBuf,
|
|
||||||
) -> Result<Thumbnail, WriteFileError> {
|
|
||||||
let s = Thumbnail {
|
|
||||||
path: PathBuf::from(thumbnail_path),
|
|
||||||
};
|
|
||||||
|
|
||||||
if !s.path.exists() {
|
|
||||||
let img = image::open(&origin_path)?;
|
|
||||||
let tn = img.resize(640, 640, FilterType::Nearest);
|
|
||||||
tn.save(&s.path)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn load(path: PathBuf) -> Result<Thumbnail, ReadFileError> {
|
|
||||||
let s = Thumbnail { path: path.clone() };
|
|
||||||
|
|
||||||
if !s.path.exists() {
|
|
||||||
return Err(ReadFileError::FileNotFound(path));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
pub fn from_path(path: &Path) -> Result<Thumbnail, ReadFileError> {
|
|
||||||
let id = path
|
|
||||||
.file_name()
|
|
||||||
.map(|s| String::from(s.to_string_lossy()))
|
|
||||||
.ok_or(ReadFileError::NotAnImage(PathBuf::from(path)))?;
|
|
||||||
|
|
||||||
let path = path
|
|
||||||
.parent()
|
|
||||||
.ok_or(ReadFileError::FileNotFound(PathBuf::from(path)))?;
|
|
||||||
|
|
||||||
Thumbnail::open(&id, root)
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
pub fn stream(&self) -> Result<std::fs::File, ReadFileError> {
|
|
||||||
std::fs::File::open(self.path.clone()).map_err(|err| {
|
|
||||||
if err.kind() == std::io::ErrorKind::NotFound {
|
|
||||||
ReadFileError::FileNotFound
|
|
||||||
} else {
|
|
||||||
ReadFileError::from(err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
pub fn delete(self) -> Result<(), WriteFileError> {
|
|
||||||
remove_file(self.path).map_err(WriteFileError::from)
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use crate::store::utils::FileCleanup;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_creates_a_thumbnail_if_one_does_not_exist() {
|
|
||||||
let _ = FileCleanup(PathBuf::from("var/rawr.tn.png"));
|
|
||||||
let _ = Thumbnail::open(
|
|
||||||
PathBuf::from("fixtures/rawr.png"),
|
|
||||||
PathBuf::from("var/rawr.tn.png"),
|
|
||||||
)
|
|
||||||
.expect("thumbnail open must work");
|
|
||||||
assert!(Path::new("var/rawr.tn.png").is_file());
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,15 +0,0 @@
|
||||||
<html>
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<title> {{title}} </title>
|
|
||||||
<link href="/css" rel="stylesheet" type="text/css" media="screen" />
|
|
||||||
<script src="/script"></script>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
|
|
||||||
<a href="/file/{{id}}"><img src="/tn/{{id}}" /></a>
|
|
||||||
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
|
@ -1,54 +0,0 @@
|
||||||
<html>
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<title> Admin list of files </title>
|
|
||||||
<link href="/css" rel="stylesheet" type="text/css" media="screen" />
|
|
||||||
<script src="/script"></script>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<h1> Admin list of files </h1>
|
|
||||||
|
|
||||||
<div class="uploadform">
|
|
||||||
<form action="/" method="post" enctype="multipart/form-data">
|
|
||||||
<div id="file-selector">
|
|
||||||
<input type="file" name="file" id="file-selector-input" />
|
|
||||||
<label for="file-selector-input" onclick="selectFile('file-selector')">Select a file</label>
|
|
||||||
</div>
|
|
||||||
<input type="submit" value="Upload file" />
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="files">
|
|
||||||
{{#files}}
|
|
||||||
<div class="file">
|
|
||||||
{{#error}}
|
|
||||||
<div>
|
|
||||||
<p> {{error}} </p>
|
|
||||||
</div>
|
|
||||||
{{/error}}
|
|
||||||
|
|
||||||
{{#file}}
|
|
||||||
<div class="thumbnail">
|
|
||||||
<a href="/file/{{id}}"><img src="/tn/{{id}}" /></a>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<ul>
|
|
||||||
<li> {{date}} </li>
|
|
||||||
<li> {{type_}} </li>
|
|
||||||
<li> {{size}} </li>
|
|
||||||
</ul>
|
|
||||||
<div>
|
|
||||||
<form action="/{{id}}" method="post">
|
|
||||||
<input type="hidden" name="_method" value="delete" />
|
|
||||||
<input type="submit" value="Delete" />
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{{/file}}
|
|
||||||
</div>
|
|
||||||
{{/files}}
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
|
@ -1,10 +0,0 @@
|
||||||
const selectFile = (selectorId) => {
|
|
||||||
console.log("wide arrow functions work: " + selectorId);
|
|
||||||
const input = document.querySelector("#" + selectorId + " input[type='file']")
|
|
||||||
const label = document.querySelector("#" + selectorId + " label")
|
|
||||||
input.addEventListener("change", (e) => {
|
|
||||||
if (input.files.length > 0) {
|
|
||||||
label.innerHTML = input.files[0].name
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
|
@ -1,186 +0,0 @@
|
||||||
:root {
|
|
||||||
--main-bg-color: #e5f0fc;
|
|
||||||
--fg-color: #449dfc;
|
|
||||||
|
|
||||||
--space-small: 4px;
|
|
||||||
--space-medium: 8px;
|
|
||||||
--space-large: 12px;
|
|
||||||
|
|
||||||
--hover-low: 4px 4px 4px gray;
|
|
||||||
}
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family: 'Ariel', sans-serif;
|
|
||||||
background-color: var(--main-bg-color);
|
|
||||||
}
|
|
||||||
|
|
||||||
.card {
|
|
||||||
border: 1px solid black;
|
|
||||||
border-radius: 5px;
|
|
||||||
box-shadow: var(--hover-low);
|
|
||||||
margin: var(--space-large);
|
|
||||||
padding: var(--space-medium);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
.authentication-page {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
justify-content: center;
|
|
||||||
align-items: center;
|
|
||||||
height: 200px;
|
|
||||||
margin: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.authentication-form {
|
|
||||||
}
|
|
||||||
|
|
||||||
.authentication-form__label {
|
|
||||||
margin: var(--space-small);
|
|
||||||
}
|
|
||||||
|
|
||||||
.authentication-form__input {
|
|
||||||
margin: var(--space-small);
|
|
||||||
}
|
|
||||||
|
|
||||||
.gallery-page {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
.upload-form {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
.upload-form__selector {
|
|
||||||
margin: var(--space-small);
|
|
||||||
}
|
|
||||||
|
|
||||||
.upload-form__button {
|
|
||||||
margin: var(--space-small);
|
|
||||||
}
|
|
||||||
|
|
||||||
.gallery {
|
|
||||||
display: flex;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.thumbnail {
|
|
||||||
width: 300px;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
justify-content: space-between;
|
|
||||||
}
|
|
||||||
|
|
||||||
.thumbnail__image {
|
|
||||||
max-width: 100%;
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.thumbnail__metadata {
|
|
||||||
list-style: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
[type="submit"] {
|
|
||||||
border-radius: 1em;
|
|
||||||
margin: 1em;
|
|
||||||
padding: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.uploadform {
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
[type="file"] {
|
|
||||||
border: 0;
|
|
||||||
clip: rect(0, 0, 0, 0);
|
|
||||||
height: 1px;
|
|
||||||
overflow: hidden;
|
|
||||||
padding: 0;
|
|
||||||
position: absolute !important;
|
|
||||||
white-space: nowrap;
|
|
||||||
width: 1px;
|
|
||||||
}
|
|
||||||
|
|
||||||
[type="file"] + label {
|
|
||||||
background-color: rgb(0, 86, 112);
|
|
||||||
border-radius: 1em;
|
|
||||||
color: #fff;
|
|
||||||
cursor: pointer;
|
|
||||||
display: inline-block;
|
|
||||||
padding: 1em;
|
|
||||||
margin: 1em;
|
|
||||||
transition: background-color 0.3s;
|
|
||||||
}
|
|
||||||
|
|
||||||
[type="file"]:focus + label,
|
|
||||||
[type="file"] + label:hover {
|
|
||||||
background-color: #67b0ff;
|
|
||||||
}
|
|
||||||
|
|
||||||
[type="file"]:focus + label {
|
|
||||||
outline: 1px dotted #000;
|
|
||||||
outline: -webkit-focus-ring-color auto 5px;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
@media screen and (max-width: 1080px) { /* This is the screen width of a OnePlus 8 */
|
|
||||||
body {
|
|
||||||
font-size: xx-large;
|
|
||||||
}
|
|
||||||
|
|
||||||
.authentication-form {
|
|
||||||
width: 100%;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
.authentication-form__input {
|
|
||||||
font-size: x-large;
|
|
||||||
}
|
|
||||||
|
|
||||||
.authentication-form__button {
|
|
||||||
font-size: x-large;
|
|
||||||
}
|
|
||||||
|
|
||||||
.upload-form__selector {
|
|
||||||
font-size: larger;
|
|
||||||
}
|
|
||||||
|
|
||||||
.upload-form__button {
|
|
||||||
font-size: larger;
|
|
||||||
}
|
|
||||||
|
|
||||||
.thumbnail {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
[type="submit"] {
|
|
||||||
font-size: xx-large;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.uploadform {
|
|
||||||
display: flex;
|
|
||||||
}
|
|
||||||
|
|
||||||
[type="file"] + label {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.thumbnail {
|
|
||||||
max-width: 100%;
|
|
||||||
margin: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.file {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
}
|
|
26
flake.lock
26
flake.lock
|
@ -51,16 +51,16 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1691421349,
|
"lastModified": 1681932375,
|
||||||
"narHash": "sha256-RRJyX0CUrs4uW4gMhd/X4rcDG8PTgaaCQM5rXEJOx6g=",
|
"narHash": "sha256-tSXbYmpnKSSWpzOrs27ie8X3I0yqKA6AuCzCYNtwbCU=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "011567f35433879aae5024fc6ec53f2a0568a6c4",
|
"rev": "3d302c67ab8647327dba84fbdb443cdbf0e82744",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"id": "nixpkgs",
|
"id": "nixpkgs",
|
||||||
"ref": "nixos-23.05",
|
"ref": "nixos-22.11",
|
||||||
"type": "indirect"
|
"type": "indirect"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -103,11 +103,11 @@
|
||||||
"rust-overlay": "rust-overlay"
|
"rust-overlay": "rust-overlay"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1682891040,
|
"lastModified": 1655189312,
|
||||||
"narHash": "sha256-hjajsi7lq24uYitUh4o04UJi1g0Qe6ruPL0s5DgPQMY=",
|
"narHash": "sha256-gpJ57OgIebUpO+7F00VltxSEy6dz2x6HeJ5BcRM8rDA=",
|
||||||
"owner": "cargo2nix",
|
"owner": "cargo2nix",
|
||||||
"repo": "cargo2nix",
|
"repo": "cargo2nix",
|
||||||
"rev": "0167b39f198d72acdf009265634504fd6f5ace15",
|
"rev": "c149357cc3d17f2849c73eb7a09d07a307cdcfe8",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -170,11 +170,11 @@
|
||||||
"nixpkgs": "nixpkgs_3"
|
"nixpkgs": "nixpkgs_3"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1690502632,
|
"lastModified": 1683230849,
|
||||||
"narHash": "sha256-+k81RrxfphDUD5kekWbQ4xuZIHBEAQf67uivaQ34Afs=",
|
"narHash": "sha256-PjIKxX1xIALyWD8NyDeoIZMMfsS4/w/AweAcYOcsLNs=",
|
||||||
"owner": "1Password",
|
"owner": "1Password",
|
||||||
"repo": "typeshare",
|
"repo": "typeshare",
|
||||||
"rev": "9f74772af53759aee2f53e64478523e53083719e",
|
"rev": "2687f8d86ef38c07819715a2f31a21ffc25504e4",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -185,11 +185,11 @@
|
||||||
},
|
},
|
||||||
"unstable": {
|
"unstable": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1690367991,
|
"lastModified": 1681920287,
|
||||||
"narHash": "sha256-2VwOn1l8y6+cu7zjNE8MgeGJNNz1eat1HwHrINeogFA=",
|
"narHash": "sha256-+/d6XQQfhhXVfqfLROJoqj3TuG38CAeoT6jO1g9r1k0=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "c9cf0708f00fbe553319258e48ca89ff9a413703",
|
"rev": "645bc49f34fa8eff95479f0345ff57e55b53437e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|
62
flake.nix
62
flake.nix
|
@ -2,7 +2,7 @@
|
||||||
description = "Lumenescent Dreams Tools";
|
description = "Lumenescent Dreams Tools";
|
||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "nixpkgs/nixos-23.05";
|
nixpkgs.url = "nixpkgs/nixos-22.11";
|
||||||
unstable.url = "nixpkgs/nixos-unstable";
|
unstable.url = "nixpkgs/nixos-unstable";
|
||||||
pkgs-cargo2nix.url = "github:cargo2nix/cargo2nix";
|
pkgs-cargo2nix.url = "github:cargo2nix/cargo2nix";
|
||||||
typeshare.url = "github:1Password/typeshare";
|
typeshare.url = "github:1Password/typeshare";
|
||||||
|
@ -19,17 +19,16 @@
|
||||||
pkgs = import nixpkgs { system = "x86_64-linux"; };
|
pkgs = import nixpkgs { system = "x86_64-linux"; };
|
||||||
pkgs-unstable = import unstable { system = "x86_64-linux"; };
|
pkgs-unstable = import unstable { system = "x86_64-linux"; };
|
||||||
cargo2nix = pkgs-cargo2nix.packages."x86_64-linux";
|
cargo2nix = pkgs-cargo2nix.packages."x86_64-linux";
|
||||||
# armPkgs = import nixpkgs {
|
armPkgs = import nixpkgs {
|
||||||
# system = "x86_64-linux";
|
system = "x86_64-linux";
|
||||||
# crossSystem = pkgs.lib.systems.examples.raspberryPi;
|
crossSystem = pkgs.lib.systems.examples.raspberryPi;
|
||||||
# };
|
};
|
||||||
in
|
in
|
||||||
pkgs.mkShell {
|
pkgs.mkShell {
|
||||||
name = "ld-tools-devshell";
|
name = "ld-tools-devshell";
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
pkgs.cargo-nextest
|
armPkgs.stdenv.cc
|
||||||
pkgs.clang
|
pkgs.clang
|
||||||
pkgs.crate2nix
|
|
||||||
pkgs.entr
|
pkgs.entr
|
||||||
pkgs.glade
|
pkgs.glade
|
||||||
pkgs.glib
|
pkgs.glib
|
||||||
|
@ -39,19 +38,60 @@
|
||||||
pkgs.gst_all_1.gst-plugins-ugly
|
pkgs.gst_all_1.gst-plugins-ugly
|
||||||
pkgs.gst_all_1.gstreamer
|
pkgs.gst_all_1.gstreamer
|
||||||
pkgs.gtk4
|
pkgs.gtk4
|
||||||
pkgs.libadwaita
|
|
||||||
pkgs.nodejs
|
pkgs.nodejs
|
||||||
pkgs.openssl
|
pkgs.openssl
|
||||||
pkgs.pipewire
|
pkgs.pipewire
|
||||||
pkgs.pkg-config
|
pkgs.pkg-config
|
||||||
pkgs.rustup
|
|
||||||
pkgs.sqlite
|
pkgs.sqlite
|
||||||
pkgs.sqlx-cli
|
pkgs.rustup
|
||||||
pkgs.udev
|
pkgs.cargo-nextest
|
||||||
|
pkgs.crate2nix
|
||||||
pkgs.wasm-pack
|
pkgs.wasm-pack
|
||||||
typeshare.packages."x86_64-linux".default
|
typeshare.packages."x86_64-linux".default
|
||||||
];
|
];
|
||||||
LIBCLANG_PATH="${pkgs.llvmPackages.libclang.lib}/lib";
|
LIBCLANG_PATH="${pkgs.llvmPackages.libclang.lib}/lib";
|
||||||
};
|
};
|
||||||
|
packages."x86_64-linux" =
|
||||||
|
let
|
||||||
|
pkgs = import nixpkgs { system = "x86_64-linux"; };
|
||||||
|
standardOverride = attrs: {
|
||||||
|
nativeBuildInputs = [
|
||||||
|
pkgs.pkg-config
|
||||||
|
pkgs.gtk4
|
||||||
|
];
|
||||||
|
verbose = true;
|
||||||
|
};
|
||||||
|
customBuildInfo = pkgs: pkgs.buildRustCrate.override {
|
||||||
|
defaultCrateOverrides = pkgs.defaultCrateOverrides // {
|
||||||
|
cairo-sys-rs = standardOverride;
|
||||||
|
graphene-sys = standardOverride;
|
||||||
|
gobject-sys = standardOverride;
|
||||||
|
pango-sys = standardOverride;
|
||||||
|
gio-sys = standardOverride;
|
||||||
|
gdk-pixbuf-sys = standardOverride;
|
||||||
|
gdk4-sys = standardOverride;
|
||||||
|
gsk4-sys = standardOverride;
|
||||||
|
gtk4-sys = standardOverride;
|
||||||
|
kifu-gtk = attrs: {
|
||||||
|
nativeBuildInputs = [
|
||||||
|
pkgs.glib
|
||||||
|
];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
in {
|
||||||
|
# gobject-sys = pkgs.buildRustCrate cargo.internal.crates.gobject-sys;
|
||||||
|
kifu-gtk = (import ./kifu/kifu-gtk/Cargo.nix {
|
||||||
|
inherit pkgs;
|
||||||
|
buildRustCrateForPkgs = customBuildInfo;
|
||||||
|
rootFeatures = [ "screenplay" ];
|
||||||
|
release = true;
|
||||||
|
}).rootCrate.build;
|
||||||
|
cyberpunk-splash = (import ./cyberpunk-splash/Cargo.nix {
|
||||||
|
inherit pkgs;
|
||||||
|
buildRustCrateForPkgs = customBuildInfo;
|
||||||
|
release = true;
|
||||||
|
}).rootCrate.build;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
[package]
|
[package]
|
||||||
name = "result-extended"
|
name = "flow"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
|
license-file = "../COPYING"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
dev:
|
||||||
|
cargo watch -x build
|
||||||
|
|
||||||
|
test:
|
||||||
|
cargo watch -x test
|
|
@ -33,84 +33,84 @@ use std::{error::Error, fmt};
|
||||||
/// statement.
|
/// statement.
|
||||||
pub trait FatalError: Error {}
|
pub trait FatalError: Error {}
|
||||||
|
|
||||||
/// Result<A, FE, E> represents a return value that might be a success, might be a fatal error, or
|
/// Flow<A, FE, E> represents a return value that might be a success, might be a fatal error, or
|
||||||
/// might be a normal handleable error.
|
/// might be a normal handleable error.
|
||||||
pub enum Result<A, E, FE> {
|
pub enum Flow<A, FE, E> {
|
||||||
/// The operation was successful
|
/// The operation was successful
|
||||||
Ok(A),
|
Ok(A),
|
||||||
/// Ordinary errors. These should be handled and the application should recover gracefully.
|
|
||||||
Err(E),
|
|
||||||
/// The operation encountered a fatal error. These should be bubbled up to a level that can
|
/// The operation encountered a fatal error. These should be bubbled up to a level that can
|
||||||
/// safely shut the application down.
|
/// safely shut the application down.
|
||||||
Fatal(FE),
|
Fatal(FE),
|
||||||
|
/// Ordinary errors. These should be handled and the application should recover gracefully.
|
||||||
|
Err(E),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A, E, FE> Result<A, E, FE> {
|
impl<A, FE, E> Flow<A, FE, E> {
|
||||||
/// Apply an infallible function to a successful value.
|
/// Apply an infallible function to a successful value.
|
||||||
pub fn map<B, O>(self, mapper: O) -> Result<B, E, FE>
|
pub fn map<B, O>(self, mapper: O) -> Flow<B, FE, E>
|
||||||
where
|
where
|
||||||
O: FnOnce(A) -> B,
|
O: FnOnce(A) -> B,
|
||||||
{
|
{
|
||||||
match self {
|
match self {
|
||||||
Result::Ok(val) => Result::Ok(mapper(val)),
|
Flow::Ok(val) => Flow::Ok(mapper(val)),
|
||||||
Result::Err(err) => Result::Err(err),
|
Flow::Fatal(err) => Flow::Fatal(err),
|
||||||
Result::Fatal(err) => Result::Fatal(err),
|
Flow::Err(err) => Flow::Err(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Apply a potentially fallible function to a successful value.
|
/// Apply a potentially fallible function to a successful value.
|
||||||
///
|
///
|
||||||
/// Like `Result.and_then`, the mapping function can itself fail.
|
/// Like `Result.and_then`, the mapping function can itself fail.
|
||||||
pub fn and_then<B, O>(self, handler: O) -> Result<B, E, FE>
|
pub fn and_then<B, O>(self, handler: O) -> Flow<B, FE, E>
|
||||||
where
|
where
|
||||||
O: FnOnce(A) -> Result<B, E, FE>,
|
O: FnOnce(A) -> Flow<B, FE, E>,
|
||||||
{
|
{
|
||||||
match self {
|
match self {
|
||||||
Result::Ok(val) => handler(val),
|
Flow::Ok(val) => handler(val),
|
||||||
Result::Err(err) => Result::Err(err),
|
Flow::Fatal(err) => Flow::Fatal(err),
|
||||||
Result::Fatal(err) => Result::Fatal(err),
|
Flow::Err(err) => Flow::Err(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Map a normal error from one type to another. This is useful for converting an error from
|
/// Map a normal error from one type to another. This is useful for converting an error from
|
||||||
/// one type to another, especially in re-throwing an underlying error. `?` syntax does not
|
/// one type to another, especially in re-throwing an underlying error. `?` syntax does not
|
||||||
/// work with `Result`, so you will likely need to use this a lot.
|
/// work with `Flow`, so you will likely need to use this a lot.
|
||||||
pub fn map_err<F, O>(self, mapper: O) -> Result<A, F, FE>
|
pub fn map_err<F, O>(self, mapper: O) -> Flow<A, FE, F>
|
||||||
where
|
where
|
||||||
O: FnOnce(E) -> F,
|
O: FnOnce(E) -> F,
|
||||||
{
|
{
|
||||||
match self {
|
match self {
|
||||||
Result::Ok(val) => Result::Ok(val),
|
Flow::Ok(val) => Flow::Ok(val),
|
||||||
Result::Err(err) => Result::Err(mapper(err)),
|
Flow::Fatal(err) => Flow::Fatal(err),
|
||||||
Result::Fatal(err) => Result::Fatal(err),
|
Flow::Err(err) => Flow::Err(mapper(err)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Provide a function to use to recover from (or simply re-throw) an error.
|
/// Provide a function to use to recover from (or simply re-throw) an error.
|
||||||
pub fn or_else<O, F>(self, handler: O) -> Result<A, F, FE>
|
pub fn or_else<O, F>(self, handler: O) -> Flow<A, FE, F>
|
||||||
where
|
where
|
||||||
O: FnOnce(E) -> Result<A, F, FE>,
|
O: FnOnce(E) -> Flow<A, FE, F>,
|
||||||
{
|
{
|
||||||
match self {
|
match self {
|
||||||
Result::Ok(val) => Result::Ok(val),
|
Flow::Ok(val) => Flow::Ok(val),
|
||||||
Result::Err(err) => handler(err),
|
Flow::Fatal(err) => Flow::Fatal(err),
|
||||||
Result::Fatal(err) => Result::Fatal(err),
|
Flow::Err(err) => handler(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert from a normal `Result` type to a `Result` type. The error condition for a `Result` will
|
/// Convert from a normal `Result` type to a `Flow` type. The error condition for a `Result` will
|
||||||
/// be treated as `Result::Err`, never `Result::Fatal`.
|
/// be treated as `Flow::Err`, never `Flow::Fatal`.
|
||||||
impl<A, E, FE> From<std::result::Result<A, E>> for Result<A, E, FE> {
|
impl<A, FE, E> From<Result<A, E>> for Flow<A, FE, E> {
|
||||||
fn from(r: std::result::Result<A, E>) -> Self {
|
fn from(r: Result<A, E>) -> Self {
|
||||||
match r {
|
match r {
|
||||||
Ok(val) => Result::Ok(val),
|
Ok(val) => Flow::Ok(val),
|
||||||
Err(err) => Result::Err(err),
|
Err(err) => Flow::Err(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A, E, FE> fmt::Debug for Result<A, E, FE>
|
impl<A, FE, E> fmt::Debug for Flow<A, FE, E>
|
||||||
where
|
where
|
||||||
A: fmt::Debug,
|
A: fmt::Debug,
|
||||||
FE: fmt::Debug,
|
FE: fmt::Debug,
|
||||||
|
@ -118,14 +118,14 @@ where
|
||||||
{
|
{
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Result::Ok(val) => f.write_fmt(format_args!("Result::Ok {:?}", val)),
|
Flow::Ok(val) => f.write_fmt(format_args!("Flow::Ok {:?}", val)),
|
||||||
Result::Err(err) => f.write_fmt(format_args!("Result::Err {:?}", err)),
|
Flow::Err(err) => f.write_fmt(format_args!("Flow::Err {:?}", err)),
|
||||||
Result::Fatal(err) => f.write_fmt(format_args!("Result::Fatal {:?}", err)),
|
Flow::Fatal(err) => f.write_fmt(format_args!("Flow::Fatal {:?}", err)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A, E, FE> PartialEq for Result<A, E, FE>
|
impl<A, FE, E> PartialEq for Flow<A, FE, E>
|
||||||
where
|
where
|
||||||
A: PartialEq,
|
A: PartialEq,
|
||||||
FE: PartialEq,
|
FE: PartialEq,
|
||||||
|
@ -133,27 +133,27 @@ where
|
||||||
{
|
{
|
||||||
fn eq(&self, rhs: &Self) -> bool {
|
fn eq(&self, rhs: &Self) -> bool {
|
||||||
match (self, rhs) {
|
match (self, rhs) {
|
||||||
(Result::Ok(val), Result::Ok(rhs)) => val == rhs,
|
(Flow::Ok(val), Flow::Ok(rhs)) => val == rhs,
|
||||||
(Result::Err(_), Result::Err(_)) => true,
|
(Flow::Err(_), Flow::Err(_)) => true,
|
||||||
(Result::Fatal(_), Result::Fatal(_)) => true,
|
(Flow::Fatal(_), Flow::Fatal(_)) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience function to create an ok value.
|
/// Convenience function to create an ok value.
|
||||||
pub fn ok<A, E: Error, FE: FatalError>(val: A) -> Result<A, E, FE> {
|
pub fn ok<A, FE: FatalError, E: Error>(val: A) -> Flow<A, FE, E> {
|
||||||
Result::Ok(val)
|
Flow::Ok(val)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience function to create an error value.
|
/// Convenience function to create an error value.
|
||||||
pub fn error<A, E: Error, FE: FatalError>(err: E) -> Result<A, E, FE> {
|
pub fn error<A, FE: FatalError, E: Error>(err: E) -> Flow<A, FE, E> {
|
||||||
Result::Err(err)
|
Flow::Err(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience function to create a fatal value.
|
/// Convenience function to create a fatal value.
|
||||||
pub fn fatal<A, E: Error, FE: FatalError>(err: FE) -> Result<A, E, FE> {
|
pub fn fatal<A, FE: FatalError, E: Error>(err: FE) -> Flow<A, FE, E> {
|
||||||
Result::Fatal(err)
|
Flow::Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return early from the current function if the value is a fatal error.
|
/// Return early from the current function if the value is a fatal error.
|
||||||
|
@ -161,9 +161,9 @@ pub fn fatal<A, E: Error, FE: FatalError>(err: FE) -> Result<A, E, FE> {
|
||||||
macro_rules! return_fatal {
|
macro_rules! return_fatal {
|
||||||
($x:expr) => {
|
($x:expr) => {
|
||||||
match $x {
|
match $x {
|
||||||
Result::Fatal(err) => return Result::Fatal(err),
|
Flow::Fatal(err) => return Flow::Fatal(err),
|
||||||
Result::Err(err) => Err(err),
|
Flow::Err(err) => Err(err),
|
||||||
Result::Ok(val) => Ok(val),
|
Flow::Ok(val) => Ok(val),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -173,9 +173,9 @@ macro_rules! return_fatal {
|
||||||
macro_rules! return_error {
|
macro_rules! return_error {
|
||||||
($x:expr) => {
|
($x:expr) => {
|
||||||
match $x {
|
match $x {
|
||||||
Result::Ok(val) => val,
|
Flow::Ok(val) => val,
|
||||||
Result::Err(err) => return Result::Err(err),
|
Flow::Err(err) => return Flow::Err(err),
|
||||||
Result::Fatal(err) => return Result::Fatal(err),
|
Flow::Fatal(err) => return Flow::Fatal(err),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -210,45 +210,45 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_can_map_things() {
|
fn it_can_map_things() {
|
||||||
let success: Result<i32, Error, FatalError> = ok(15);
|
let success: Flow<i32, FatalError, Error> = ok(15);
|
||||||
assert_eq!(ok(16), success.map(|v| v + 1));
|
assert_eq!(ok(16), success.map(|v| v + 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_can_chain_success() {
|
fn it_can_chain_success() {
|
||||||
let success: Result<i32, Error, FatalError> = ok(15);
|
let success: Flow<i32, FatalError, Error> = ok(15);
|
||||||
assert_eq!(ok(16), success.and_then(|v| ok(v + 1)));
|
assert_eq!(ok(16), success.and_then(|v| ok(v + 1)));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_can_handle_an_error() {
|
fn it_can_handle_an_error() {
|
||||||
let failure: Result<i32, Error, FatalError> = error(Error::Error);
|
let failure: Flow<i32, FatalError, Error> = error(Error::Error);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ok::<i32, Error, FatalError>(16),
|
ok::<i32, FatalError, Error>(16),
|
||||||
failure.or_else(|_| ok(16))
|
failure.or_else(|_| ok(16))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn early_exit_on_fatal() {
|
fn early_exit_on_fatal() {
|
||||||
fn ok_func() -> Result<i32, Error, FatalError> {
|
fn ok_func() -> Flow<i32, FatalError, Error> {
|
||||||
let value = return_fatal!(ok::<i32, Error, FatalError>(15));
|
let value = return_fatal!(ok::<i32, FatalError, Error>(15));
|
||||||
match value {
|
match value {
|
||||||
Ok(_) => ok(14),
|
Ok(_) => ok(14),
|
||||||
Err(err) => error(err),
|
Err(err) => error(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn err_func() -> Result<i32, Error, FatalError> {
|
fn err_func() -> Flow<i32, FatalError, Error> {
|
||||||
let value = return_fatal!(error::<i32, Error, FatalError>(Error::Error));
|
let value = return_fatal!(error::<i32, FatalError, Error>(Error::Error));
|
||||||
match value {
|
match value {
|
||||||
Ok(_) => panic!("shouldn't have gotten here"),
|
Ok(_) => panic!("shouldn't have gotten here"),
|
||||||
Err(_) => ok(0),
|
Err(_) => ok(0),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fatal_func() -> Result<i32, Error, FatalError> {
|
fn fatal_func() -> Flow<i32, FatalError, Error> {
|
||||||
let _ = return_fatal!(fatal::<i32, Error, FatalError>(FatalError::FatalError));
|
return_fatal!(fatal::<i32, FatalError, Error>(FatalError::FatalError));
|
||||||
panic!("failed to bail");
|
panic!("failed to bail");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -259,19 +259,19 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_can_early_exit_on_all_errors() {
|
fn it_can_early_exit_on_all_errors() {
|
||||||
fn ok_func() -> Result<i32, Error, FatalError> {
|
fn ok_func() -> Flow<i32, FatalError, Error> {
|
||||||
let value = return_error!(ok::<i32, Error, FatalError>(15));
|
let value = return_error!(ok::<i32, FatalError, Error>(15));
|
||||||
assert_eq!(value, 15);
|
assert_eq!(value, 15);
|
||||||
ok(14)
|
ok(14)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn err_func() -> Result<i32, Error, FatalError> {
|
fn err_func() -> Flow<i32, FatalError, Error> {
|
||||||
return_error!(error::<i32, Error, FatalError>(Error::Error));
|
return_error!(error::<i32, FatalError, Error>(Error::Error));
|
||||||
panic!("failed to bail");
|
panic!("failed to bail");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fatal_func() -> Result<i32, Error, FatalError> {
|
fn fatal_func() -> Flow<i32, FatalError, Error> {
|
||||||
return_error!(fatal::<i32, Error, FatalError>(FatalError::FatalError));
|
return_error!(fatal::<i32, FatalError, Error>(FatalError::FatalError));
|
||||||
panic!("failed to bail");
|
panic!("failed to bail");
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ edition = "2018"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
description = "An ergonomics wrapper around Fluent-RS"
|
description = "An ergonomics wrapper around Fluent-RS"
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
|
license-file = "../COPYING"
|
||||||
homepage = "https://github.com/luminescent-dreams/fluent-ergonomics"
|
homepage = "https://github.com/luminescent-dreams/fluent-ergonomics"
|
||||||
repository = "https://github.com/luminescent-dreams/fluent-ergonomics"
|
repository = "https://github.com/luminescent-dreams/fluent-ergonomics"
|
||||||
categories = ["internationalization"]
|
categories = ["internationalization"]
|
||||||
|
@ -18,7 +19,6 @@ include = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
fluent-bundle = "0.15"
|
fluent = "0.16"
|
||||||
unic-langid = "0.9"
|
unic-langid = "0.9"
|
||||||
fluent-syntax = "0.11"
|
fluent-syntax = "0.11"
|
||||||
intl-memoizer = "*"
|
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
|
||||||
|
dev:
|
||||||
|
cargo watch -x build
|
||||||
|
|
||||||
|
test:
|
||||||
|
cargo watch -x test
|
||||||
|
|
||||||
|
test-once:
|
||||||
|
cargo test
|
|
@ -15,7 +15,7 @@ You should have received a copy of the GNU General Public License along with Lum
|
||||||
//! The Fluent class makes it easier to load translation bundles with language fallbacks and to go
|
//! The Fluent class makes it easier to load translation bundles with language fallbacks and to go
|
||||||
//! through the most common steps of translating a message.
|
//! through the most common steps of translating a message.
|
||||||
//!
|
//!
|
||||||
use fluent_bundle::{bundle::FluentBundle, FluentArgs, FluentError, FluentResource};
|
use fluent::{FluentArgs, FluentBundle, FluentError, FluentResource};
|
||||||
use fluent_syntax::parser::ParserError;
|
use fluent_syntax::parser::ParserError;
|
||||||
use std::collections::hash_map::Entry;
|
use std::collections::hash_map::Entry;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
@ -103,14 +103,7 @@ impl From<FromUtf8Error> for Error {
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct FluentErgo {
|
pub struct FluentErgo {
|
||||||
languages: Vec<LanguageIdentifier>,
|
languages: Vec<LanguageIdentifier>,
|
||||||
bundles: Arc<
|
bundles: Arc<RwLock<HashMap<LanguageIdentifier, FluentBundle<FluentResource>>>>,
|
||||||
RwLock<
|
|
||||||
HashMap<
|
|
||||||
LanguageIdentifier,
|
|
||||||
FluentBundle<FluentResource, intl_memoizer::concurrent::IntlLangMemoizer>,
|
|
||||||
>,
|
|
||||||
>,
|
|
||||||
>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for FluentErgo {
|
impl fmt::Debug for FluentErgo {
|
||||||
|
@ -171,14 +164,11 @@ impl FluentErgo {
|
||||||
match entry {
|
match entry {
|
||||||
Entry::Occupied(mut e) => {
|
Entry::Occupied(mut e) => {
|
||||||
let bundle = e.get_mut();
|
let bundle = e.get_mut();
|
||||||
bundle.add_resource(res).map_err(Error::from)
|
bundle.add_resource(res).map_err(|err| Error::from(err))
|
||||||
}
|
}
|
||||||
Entry::Vacant(e) => {
|
Entry::Vacant(e) => {
|
||||||
let mut bundle: FluentBundle<
|
let mut bundle = FluentBundle::new(vec![lang]);
|
||||||
FluentResource,
|
bundle.add_resource(res).map_err(|err| Error::from(err))?;
|
||||||
intl_memoizer::concurrent::IntlLangMemoizer,
|
|
||||||
> = FluentBundle::new_concurrent(vec![lang]);
|
|
||||||
bundle.add_resource(res).map_err(Error::from)?;
|
|
||||||
e.insert(bundle);
|
e.insert(bundle);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -230,14 +220,14 @@ impl FluentErgo {
|
||||||
/// A typical call with arguments would look like this:
|
/// A typical call with arguments would look like this:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use fluent_bundle::{FluentArgs, FluentValue};
|
/// use fluent::{FluentArgs, FluentValue};
|
||||||
///
|
///
|
||||||
/// let eo_id = "eo".parse::<unic_langid::LanguageIdentifier>().unwrap();
|
/// let eo_id = "eo".parse::<unic_langid::LanguageIdentifier>().unwrap();
|
||||||
/// let en_id = "en-US".parse::<unic_langid::LanguageIdentifier>().unwrap();
|
/// let en_id = "en-US".parse::<unic_langid::LanguageIdentifier>().unwrap();
|
||||||
///
|
///
|
||||||
/// let mut fluent = fluent_ergonomics::FluentErgo::new(&[eo_id, en_id]);
|
/// let mut fluent = fluent_ergonomics::FluentErgo::new(&[eo_id, en_id]);
|
||||||
/// let mut args = FluentArgs::new();
|
/// let mut args = FluentArgs::new();
|
||||||
/// args.set("value", FluentValue::from("15"));
|
/// args.insert("value", FluentValue::from("15"));
|
||||||
/// let r = fluent.tr("length-without-label", Some(&args));
|
/// let r = fluent.tr("length-without-label", Some(&args));
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
|
@ -248,10 +238,16 @@ impl FluentErgo {
|
||||||
///
|
///
|
||||||
pub fn tr(&self, msgid: &str, args: Option<&FluentArgs>) -> Result<String, Error> {
|
pub fn tr(&self, msgid: &str, args: Option<&FluentArgs>) -> Result<String, Error> {
|
||||||
let bundles = self.bundles.read().unwrap();
|
let bundles = self.bundles.read().unwrap();
|
||||||
let result: Option<String> = self.languages.iter().find_map(|lang| {
|
let result: Option<String> = self
|
||||||
|
.languages
|
||||||
|
.iter()
|
||||||
|
.map(|lang| {
|
||||||
let bundle = bundles.get(lang)?;
|
let bundle = bundles.get(lang)?;
|
||||||
self.tr_(bundle, msgid, args)
|
self.tr_(bundle, msgid, args)
|
||||||
});
|
})
|
||||||
|
.filter(|v| v.is_some())
|
||||||
|
.map(|v| v.unwrap())
|
||||||
|
.next();
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Some(r) => Ok(r),
|
Some(r) => Ok(r),
|
||||||
|
@ -261,7 +257,7 @@ impl FluentErgo {
|
||||||
|
|
||||||
fn tr_(
|
fn tr_(
|
||||||
&self,
|
&self,
|
||||||
bundle: &FluentBundle<FluentResource, intl_memoizer::concurrent::IntlLangMemoizer>,
|
bundle: &FluentBundle<FluentResource>,
|
||||||
msgid: &str,
|
msgid: &str,
|
||||||
args: Option<&FluentArgs>,
|
args: Option<&FluentArgs>,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
|
@ -270,8 +266,8 @@ impl FluentErgo {
|
||||||
let res = match pattern {
|
let res = match pattern {
|
||||||
None => None,
|
None => None,
|
||||||
Some(p) => {
|
Some(p) => {
|
||||||
let res = bundle.format_pattern(p, args, &mut errors);
|
let res = bundle.format_pattern(&p, args, &mut errors);
|
||||||
if !errors.is_empty() {
|
if errors.len() > 0 {
|
||||||
println!("Errors in formatting: {:?}", errors)
|
println!("Errors in formatting: {:?}", errors)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -291,7 +287,7 @@ impl FluentErgo {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::FluentErgo;
|
use super::FluentErgo;
|
||||||
use fluent_bundle::{FluentArgs, FluentValue};
|
use fluent::{FluentArgs, FluentValue};
|
||||||
use unic_langid::LanguageIdentifier;
|
use unic_langid::LanguageIdentifier;
|
||||||
|
|
||||||
const EN_TRANSLATIONS: &'static str = "
|
const EN_TRANSLATIONS: &'static str = "
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
# This file is automatically @generated by Cargo.
|
|
||||||
# It is not intended for manual editing.
|
|
||||||
version = 3
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "geo-types"
|
|
||||||
version = "0.1.0"
|
|
|
@ -1,8 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "geo-types"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
|
@ -1,37 +0,0 @@
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct Latitude(f32);
|
|
||||||
|
|
||||||
impl From<f32> for Latitude {
|
|
||||||
fn from(val: f32) -> Self {
|
|
||||||
if !(-90.0..=90.0).contains(&val) {
|
|
||||||
panic!("Latitude is outside of range");
|
|
||||||
}
|
|
||||||
Self(val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Latitude {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct Longitude(f32);
|
|
||||||
|
|
||||||
impl From<f32> for Longitude {
|
|
||||||
fn from(val: f32) -> Self {
|
|
||||||
if !(-180.0..=180.0).contains(&val) {
|
|
||||||
panic!("Longitude is outside fo range");
|
|
||||||
}
|
|
||||||
Self(val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Longitude {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,23 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "gm-control-panel"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
adw = { version = "0.4", package = "libadwaita", features = [ "v1_2", "gtk_v4_6" ] }
|
|
||||||
config = { path = "../config" }
|
|
||||||
config-derive = { path = "../config-derive" }
|
|
||||||
futures = { version = "0.3" }
|
|
||||||
gio = { version = "0.17" }
|
|
||||||
glib = { version = "0.17" }
|
|
||||||
gdk = { version = "0.6", package = "gdk4" }
|
|
||||||
gtk = { version = "0.6", package = "gtk4", features = [ "v4_6" ] }
|
|
||||||
serde = { version = "1" }
|
|
||||||
serde_json = { version = "*" }
|
|
||||||
tokio = { version = "1", features = ["full"] }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
glib-build-tools = "0.16"
|
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
fn main() {
|
|
||||||
glib_build_tools::compile_resources(
|
|
||||||
"resources",
|
|
||||||
"resources/gresources.xml",
|
|
||||||
"com.luminescent-dreams.gm-control-panel.gresource",
|
|
||||||
);
|
|
||||||
}
|
|
|
@ -1,6 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<gresources>
|
|
||||||
<gresource prefix="/com/luminescent-dreams/gm-control-panel/">
|
|
||||||
<file>style.css</file>
|
|
||||||
</gresource>
|
|
||||||
</gresources>
|
|
|
@ -1,6 +0,0 @@
|
||||||
.playlist-card {
|
|
||||||
margin: 8px;
|
|
||||||
padding: 8px;
|
|
||||||
min-width: 100px;
|
|
||||||
min-height: 100px;
|
|
||||||
}
|
|
|
@ -1,64 +0,0 @@
|
||||||
use crate::PlaylistCard;
|
|
||||||
use adw::prelude::AdwApplicationWindowExt;
|
|
||||||
use gio::resources_lookup_data;
|
|
||||||
use gtk::{prelude::*, STYLE_PROVIDER_PRIORITY_USER};
|
|
||||||
use std::iter::Iterator;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct ApplicationWindow {
|
|
||||||
pub window: adw::ApplicationWindow,
|
|
||||||
pub layout: gtk::FlowBox,
|
|
||||||
pub playlists: Vec<PlaylistCard>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ApplicationWindow {
|
|
||||||
pub fn new(app: &adw::Application) -> Self {
|
|
||||||
let window = adw::ApplicationWindow::builder()
|
|
||||||
.application(app)
|
|
||||||
.title("GM-control-panel")
|
|
||||||
.width_request(500)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
let stylesheet = String::from_utf8(
|
|
||||||
resources_lookup_data(
|
|
||||||
"/com/luminescent-dreams/gm-control-panel/style.css",
|
|
||||||
gio::ResourceLookupFlags::NONE,
|
|
||||||
)
|
|
||||||
.expect("stylesheet should just be available")
|
|
||||||
.to_vec(),
|
|
||||||
)
|
|
||||||
.expect("to parse stylesheet");
|
|
||||||
|
|
||||||
let provider = gtk::CssProvider::new();
|
|
||||||
provider.load_from_data(&stylesheet);
|
|
||||||
let context = window.style_context();
|
|
||||||
context.add_provider(&provider, STYLE_PROVIDER_PRIORITY_USER);
|
|
||||||
|
|
||||||
let layout = gtk::FlowBox::new();
|
|
||||||
|
|
||||||
let playlists: Vec<PlaylistCard> = vec![
|
|
||||||
"Creepy Cathedral",
|
|
||||||
"Joyful Tavern",
|
|
||||||
"Exploring",
|
|
||||||
"Out on the streets",
|
|
||||||
"The North Abbey",
|
|
||||||
]
|
|
||||||
.into_iter()
|
|
||||||
.map(|name| {
|
|
||||||
let playlist = PlaylistCard::default();
|
|
||||||
playlist.set_name(name);
|
|
||||||
playlist
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
playlists.iter().for_each(|card| layout.append(card));
|
|
||||||
|
|
||||||
window.set_content(Some(&layout));
|
|
||||||
|
|
||||||
Self {
|
|
||||||
window,
|
|
||||||
layout,
|
|
||||||
playlists,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,40 +0,0 @@
|
||||||
use config::define_config;
|
|
||||||
use config_derive::ConfigOption;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
define_config! {
|
|
||||||
Language(Language),
|
|
||||||
MusicPath(MusicPath),
|
|
||||||
PlaylistDatabasePath(PlaylistDatabasePath),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, ConfigOption)]
|
|
||||||
pub struct Language(String);
|
|
||||||
|
|
||||||
impl std::ops::Deref for Language {
|
|
||||||
type Target = String;
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, ConfigOption)]
|
|
||||||
pub struct MusicPath(PathBuf);
|
|
||||||
|
|
||||||
impl std::ops::Deref for MusicPath {
|
|
||||||
type Target = PathBuf;
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, ConfigOption)]
|
|
||||||
pub struct PlaylistDatabasePath(PathBuf);
|
|
||||||
|
|
||||||
impl std::ops::Deref for PlaylistDatabasePath {
|
|
||||||
type Target = PathBuf;
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,59 +0,0 @@
|
||||||
use glib::{Continue, Sender};
|
|
||||||
use gtk::prelude::*;
|
|
||||||
use std::{
|
|
||||||
env,
|
|
||||||
sync::{Arc, RwLock},
|
|
||||||
};
|
|
||||||
|
|
||||||
mod app_window;
|
|
||||||
use app_window::ApplicationWindow;
|
|
||||||
|
|
||||||
mod config;
|
|
||||||
|
|
||||||
mod playlist_card;
|
|
||||||
use playlist_card::PlaylistCard;
|
|
||||||
|
|
||||||
mod types;
|
|
||||||
use types::PlaybackState;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub enum Message {}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Core {
|
|
||||||
tx: Arc<RwLock<Option<Sender<Message>>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn main() {
|
|
||||||
gio::resources_register_include!("com.luminescent-dreams.gm-control-panel.gresource")
|
|
||||||
.expect("Failed to register resource");
|
|
||||||
|
|
||||||
let app = adw::Application::builder()
|
|
||||||
.application_id("com.luminescent-dreams.gm-control-panel")
|
|
||||||
.build();
|
|
||||||
|
|
||||||
let runtime = tokio::runtime::Builder::new_multi_thread()
|
|
||||||
.enable_all()
|
|
||||||
.build()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let core = Core {
|
|
||||||
tx: Arc::new(RwLock::new(None)),
|
|
||||||
};
|
|
||||||
|
|
||||||
app.connect_activate(move |app| {
|
|
||||||
let (gtk_tx, gtk_rx) =
|
|
||||||
gtk::glib::MainContext::channel::<Message>(gtk::glib::PRIORITY_DEFAULT);
|
|
||||||
|
|
||||||
*core.tx.write().unwrap() = Some(gtk_tx);
|
|
||||||
|
|
||||||
let window = ApplicationWindow::new(app);
|
|
||||||
window.window.present();
|
|
||||||
|
|
||||||
gtk_rx.attach(None, move |_msg| Continue(true));
|
|
||||||
});
|
|
||||||
|
|
||||||
let args: Vec<String> = env::args().collect();
|
|
||||||
ApplicationExtManual::run_with_args(&app, &args);
|
|
||||||
runtime.shutdown_background();
|
|
||||||
}
|
|
|
@ -1,56 +0,0 @@
|
||||||
use crate::PlaybackState;
|
|
||||||
use glib::Object;
|
|
||||||
use gtk::{prelude::*, subclass::prelude::*};
|
|
||||||
|
|
||||||
pub struct PlaylistCardPrivate {
|
|
||||||
name: gtk::Label,
|
|
||||||
playing: gtk::Label,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for PlaylistCardPrivate {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
name: gtk::Label::new(None),
|
|
||||||
playing: gtk::Label::new(Some("Stopped")),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[glib::object_subclass]
|
|
||||||
impl ObjectSubclass for PlaylistCardPrivate {
|
|
||||||
const NAME: &'static str = "PlaylistCard";
|
|
||||||
type Type = PlaylistCard;
|
|
||||||
type ParentType = gtk::Box;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ObjectImpl for PlaylistCardPrivate {}
|
|
||||||
impl WidgetImpl for PlaylistCardPrivate {}
|
|
||||||
impl BoxImpl for PlaylistCardPrivate {}
|
|
||||||
|
|
||||||
glib::wrapper! {
|
|
||||||
pub struct PlaylistCard(ObjectSubclass<PlaylistCardPrivate>) @extends gtk::Box, gtk::Widget, @implements gtk::Orientable;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for PlaylistCard {
|
|
||||||
fn default() -> Self {
|
|
||||||
let s: Self = Object::builder().build();
|
|
||||||
s.set_orientation(gtk::Orientation::Vertical);
|
|
||||||
s.add_css_class("playlist-card");
|
|
||||||
s.add_css_class("card");
|
|
||||||
|
|
||||||
s.append(&s.imp().name);
|
|
||||||
s.append(&s.imp().playing);
|
|
||||||
|
|
||||||
s
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PlaylistCard {
|
|
||||||
pub fn set_name(&self, s: &str) {
|
|
||||||
self.imp().name.set_text(s);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_playback(&self, s: PlaybackState) {
|
|
||||||
self.imp().playing.set_text(&format!("{}", s))
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,16 +0,0 @@
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub enum PlaybackState {
|
|
||||||
Stopped,
|
|
||||||
Playing,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for PlaybackState {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
Self::Stopped => write!(f, "Stopped"),
|
|
||||||
Self::Playing => write!(f, "Playing"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -3,14 +3,15 @@ name = "hex-grid"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
|
license-file = "../COPYING"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cairo-rs = "0.17"
|
cairo-rs = "0.16"
|
||||||
gio = "0.17"
|
gio = "0.16"
|
||||||
glib = "0.17"
|
glib = "0.16"
|
||||||
gtk = { version = "0.6", package = "gtk4" }
|
gtk = { version = "0.5", package = "gtk4" }
|
||||||
coordinates = { path = "../coordinates" }
|
coordinates = { path = "../coordinates" }
|
||||||
image = { version = "0.24" }
|
image = { version = "0.24" }
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
|
use gtk::prelude::*;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let _ = gtk::init();
|
gtk::init();
|
||||||
for name in gtk::IconTheme::new().icon_names() {
|
for name in gtk::IconTheme::new().icon_names() {
|
||||||
println!("{}", name);
|
println!("{}", name);
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,9 +10,10 @@ Luminescent Dreams Tools is distributed in the hope that it will be useful, but
|
||||||
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
use cairo::Context;
|
||||||
use coordinates::{hex_map::parse_data, AxialAddr};
|
use coordinates::{hex_map::parse_data, AxialAddr};
|
||||||
use gio::resources_lookup_data;
|
use gio::resources_lookup_data;
|
||||||
use glib::Object;
|
use glib::{subclass::InitializingObject, Object};
|
||||||
use gtk::{gio, prelude::*, subclass::prelude::*, Application, DrawingArea};
|
use gtk::{gio, prelude::*, subclass::prelude::*, Application, DrawingArea};
|
||||||
use image::io::Reader as ImageReader;
|
use image::io::Reader as ImageReader;
|
||||||
use std::{cell::RefCell, io::Cursor, rc::Rc};
|
use std::{cell::RefCell, io::Cursor, rc::Rc};
|
||||||
|
@ -22,7 +23,7 @@ mod palette_entry;
|
||||||
mod tile;
|
mod tile;
|
||||||
mod utilities;
|
mod utilities;
|
||||||
|
|
||||||
const APP_ID: &str = "com.luminescent-dreams.hex-grid";
|
const APP_ID: &'static str = "com.luminescent-dreams.hex-grid";
|
||||||
const HEX_RADIUS: f64 = 50.;
|
const HEX_RADIUS: f64 = 50.;
|
||||||
const MAP_RADIUS: usize = 3;
|
const MAP_RADIUS: usize = 3;
|
||||||
const DRAWING_ORIGIN: (f64, f64) = (1024. / 2., 768. / 2.);
|
const DRAWING_ORIGIN: (f64, f64) = (1024. / 2., 768. / 2.);
|
||||||
|
@ -177,14 +178,14 @@ impl ObjectImpl for HexGridWindowPrivate {
|
||||||
let norm_x = x - DRAWING_ORIGIN.0;
|
let norm_x = x - DRAWING_ORIGIN.0;
|
||||||
let norm_y = y - DRAWING_ORIGIN.1;
|
let norm_y = y - DRAWING_ORIGIN.1;
|
||||||
let q = (2. / 3. * norm_x) / HEX_RADIUS;
|
let q = (2. / 3. * norm_x) / HEX_RADIUS;
|
||||||
let r = (-1. / 3. * norm_x + (3_f64).sqrt() / 3. * norm_y) / HEX_RADIUS;
|
let r = (-1. / 3. * norm_x + (3. as f64).sqrt() / 3. * norm_y) / HEX_RADIUS;
|
||||||
|
|
||||||
let (q, r) = axial_round(q, r);
|
let (q, r) = axial_round(q, r);
|
||||||
let coordinate = AxialAddr::new(q, r);
|
let coordinate = AxialAddr::new(q, r);
|
||||||
canvas_address.set_value(&format!("{:.0} {:.0}", x, y));
|
canvas_address.set_value(&format!("{:.0} {:.0}", x, y));
|
||||||
|
|
||||||
if coordinate.distance(&AxialAddr::origin()) > MAP_RADIUS {
|
if coordinate.distance(&AxialAddr::origin()) > MAP_RADIUS {
|
||||||
hex_address.set_value("-----");
|
hex_address.set_value(&format!("-----"));
|
||||||
*c.borrow_mut() = None;
|
*c.borrow_mut() = None;
|
||||||
} else {
|
} else {
|
||||||
hex_address.set_value(&format!("{:.0} {:.0}", coordinate.q(), coordinate.r()));
|
hex_address.set_value(&format!("{:.0} {:.0}", coordinate.q(), coordinate.r()));
|
||||||
|
@ -208,10 +209,10 @@ impl ObjectImpl for HexGridWindowPrivate {
|
||||||
DRAWING_ORIGIN.0 + HEX_RADIUS * (3. / 2. * (coordinate.q() as f64));
|
DRAWING_ORIGIN.0 + HEX_RADIUS * (3. / 2. * (coordinate.q() as f64));
|
||||||
let center_y = DRAWING_ORIGIN.1
|
let center_y = DRAWING_ORIGIN.1
|
||||||
+ HEX_RADIUS
|
+ HEX_RADIUS
|
||||||
* ((3_f64).sqrt() / 2. * (coordinate.q() as f64)
|
* ((3. as f64).sqrt() / 2. * (coordinate.q() as f64)
|
||||||
+ (3_f64).sqrt() * (coordinate.r() as f64));
|
+ (3. as f64).sqrt() * (coordinate.r() as f64));
|
||||||
let translate_x = center_x - HEX_RADIUS;
|
let translate_x = center_x - HEX_RADIUS;
|
||||||
let translate_y = center_y - (3_f64).sqrt() * HEX_RADIUS / 2.;
|
let translate_y = center_y - (3. as f64).sqrt() * HEX_RADIUS / 2.;
|
||||||
|
|
||||||
let tile = match hex_map.get(&coordinate).unwrap() {
|
let tile = match hex_map.get(&coordinate).unwrap() {
|
||||||
tile::Terrain::Mountain => &mountain,
|
tile::Terrain::Mountain => &mountain,
|
||||||
|
@ -227,7 +228,7 @@ impl ObjectImpl for HexGridWindowPrivate {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
self.drawing_area.add_controller(motion_controller);
|
self.drawing_area.add_controller(&motion_controller);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -248,11 +249,10 @@ impl HexGridWindow {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
fn draw_hexagon(context: &Context, center_x: f64, center_y: f64, radius: f64) {
|
fn draw_hexagon(context: &Context, center_x: f64, center_y: f64, radius: f64) {
|
||||||
let ul_x = center_x - radius;
|
let ul_x = center_x - radius;
|
||||||
let ul_y = center_y - (3_f64).sqrt() * radius / 2.;
|
let ul_y = center_y - (3. as f64).sqrt() * radius / 2.;
|
||||||
let points: Vec<(f64, f64)> = utilities::hexagon(radius * 2., (3_f64).sqrt() * radius);
|
let points: Vec<(f64, f64)> = utilities::hexagon(radius * 2., (3. as f64).sqrt() * radius);
|
||||||
context.new_path();
|
context.new_path();
|
||||||
context.move_to(ul_x + points[0].0, ul_y + points[0].1);
|
context.move_to(ul_x + points[0].0, ul_y + points[0].1);
|
||||||
context.line_to(ul_x + points[1].0, ul_y + points[1].1);
|
context.line_to(ul_x + points[1].0, ul_y + points[1].1);
|
||||||
|
@ -262,7 +262,6 @@ fn draw_hexagon(context: &Context, center_x: f64, center_y: f64, radius: f64) {
|
||||||
context.line_to(ul_x + points[5].0, ul_y + points[5].1);
|
context.line_to(ul_x + points[5].0, ul_y + points[5].1);
|
||||||
context.close_path();
|
context.close_path();
|
||||||
}
|
}
|
||||||
*/
|
|
||||||
|
|
||||||
fn axial_round(q_f64: f64, r_f64: f64) -> (i32, i32) {
|
fn axial_round(q_f64: f64, r_f64: f64) -> (i32, i32) {
|
||||||
let s_f64 = -q_f64 - r_f64;
|
let s_f64 = -q_f64 - r_f64;
|
||||||
|
|
|
@ -27,20 +27,20 @@ pub fn hexagon(width: f64, height: f64) -> Vec<(f64, f64)> {
|
||||||
(center_x + radius, center_y),
|
(center_x + radius, center_y),
|
||||||
(
|
(
|
||||||
center_x + radius / 2.,
|
center_x + radius / 2.,
|
||||||
center_y + (3_f64).sqrt() * radius / 2.,
|
center_y + (3. as f64).sqrt() * radius / 2.,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
center_x - radius / 2.,
|
center_x - radius / 2.,
|
||||||
center_y + (3_f64).sqrt() * radius / 2.,
|
center_y + (3. as f64).sqrt() * radius / 2.,
|
||||||
),
|
),
|
||||||
(center_x - radius, center_y),
|
(center_x - radius, center_y),
|
||||||
(
|
(
|
||||||
center_x - radius / 2.,
|
center_x - radius / 2.,
|
||||||
center_y - (3_f64).sqrt() * radius / 2.,
|
center_y - (3. as f64).sqrt() * radius / 2.,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
center_x + radius / 2.,
|
center_x + radius / 2.,
|
||||||
center_y - (3_f64).sqrt() * radius / 2.,
|
center_y - (3. as f64).sqrt() * radius / 2.,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,5 @@
|
||||||
[package]
|
[package]
|
||||||
name = "ifc"
|
name = "international-fixed-calendar"
|
||||||
description = "chrono-compatible-ish date objects for the International Fixed Calendar"
|
description = "chrono-compatible-ish date objects for the International Fixed Calendar"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Savanni D'Gerinel <savanni@luminescent-dreams.com>"]
|
authors = ["Savanni D'Gerinel <savanni@luminescent-dreams.com>"]
|
||||||
|
@ -7,8 +7,21 @@ edition = "2018"
|
||||||
keywords = ["date", "time", "calendar"]
|
keywords = ["date", "time", "calendar"]
|
||||||
categories = ["date-and-time"]
|
categories = ["date-and-time"]
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
|
license-file = "../COPYING"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = { version = "0.4" }
|
chrono = "0.4"
|
||||||
|
chrono-tz = "0.6"
|
||||||
|
iron = "0.6.1"
|
||||||
|
mustache = "0.9.0"
|
||||||
|
params = "*"
|
||||||
|
router = "*"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
thiserror = { version = "1" }
|
|
||||||
|
[[bin]]
|
||||||
|
name = "ifc-today"
|
||||||
|
path = "src/today.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "ifc-web"
|
||||||
|
path = "src/web.rs"
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
|
||||||
|
dev:
|
||||||
|
cargo watch -x build
|
||||||
|
|
||||||
|
test:
|
||||||
|
cargo watch -x test
|
||||||
|
|
||||||
|
test-once:
|
||||||
|
cargo test
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue