Compare commits
13 Commits
5baaf9fb8a
...
83a4839b1d
Author | SHA1 | Date |
---|---|---|
Savanni D'Gerinel | 83a4839b1d | |
Savanni D'Gerinel | 0e0d67a9ac | |
Savanni D'Gerinel | e5fb605816 | |
Savanni D'Gerinel | f9db002464 | |
Savanni D'Gerinel | 0ac9bb74a6 | |
Savanni D'Gerinel | f034dfcb8b | |
Savanni D'Gerinel | 7abb33c4fe | |
Savanni D'Gerinel | 581979fc54 | |
Savanni D'Gerinel | bf93625225 | |
Savanni D'Gerinel | 778da0b651 | |
Savanni D'Gerinel | 8b53114d0d | |
Savanni D'Gerinel | 42e931d780 | |
Savanni D'Gerinel | 532210db03 |
|
@ -5,7 +5,7 @@ dist
|
|||
result
|
||||
*.tgz
|
||||
*.tar.gz
|
||||
file-service/*.sqlite
|
||||
file-service/*.sqlite-shm
|
||||
file-service/*.sqlite-wal
|
||||
*.sqlite
|
||||
*.sqlite-shm
|
||||
*.sqlite-wal
|
||||
file-service/var
|
||||
|
|
|
@ -25,14 +25,15 @@ checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234"
|
|||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.3"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
|
||||
checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"getrandom",
|
||||
"once_cell",
|
||||
"version_check 0.9.4",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -134,6 +135,21 @@ dependencies = [
|
|||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "authdb"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"base64ct",
|
||||
"clap",
|
||||
"cool_asserts",
|
||||
"serde 1.0.188",
|
||||
"sha2",
|
||||
"sqlx",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"uuid 0.4.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "0.1.8"
|
||||
|
@ -562,9 +578,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "crc-catalog"
|
||||
version = "2.2.0"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484"
|
||||
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
|
@ -748,6 +764,7 @@ checksum = "a0b0a86c5d31c93238ff4b694fa31f3acdf67440770dc314c57d90e433914397"
|
|||
dependencies = [
|
||||
"generic-array 0.14.7",
|
||||
"num-traits",
|
||||
"serde 1.0.188",
|
||||
"typenum",
|
||||
]
|
||||
|
||||
|
@ -922,6 +939,7 @@ dependencies = [
|
|||
name = "file-service"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"authdb",
|
||||
"base64ct",
|
||||
"build_html",
|
||||
"bytes",
|
||||
|
@ -931,7 +949,7 @@ dependencies = [
|
|||
"cool_asserts",
|
||||
"futures-util",
|
||||
"hex-string",
|
||||
"http",
|
||||
"http 0.2.9",
|
||||
"image 0.23.14",
|
||||
"log 0.4.20",
|
||||
"logger",
|
||||
|
@ -941,7 +959,6 @@ dependencies = [
|
|||
"serde 1.0.188",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"sqlx",
|
||||
"tempdir",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
|
@ -959,10 +976,6 @@ checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6"
|
|||
name = "fitnesstrax"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"chrono-tz",
|
||||
"dimensioned 0.8.0",
|
||||
"emseries",
|
||||
"gio",
|
||||
"glib",
|
||||
"glib-build-tools 0.18.0",
|
||||
|
@ -1115,6 +1128,18 @@ dependencies = [
|
|||
"syn 2.0.37",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ft-core"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"chrono-tz",
|
||||
"dimensioned 0.8.0",
|
||||
"emseries",
|
||||
"serde 1.0.188",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fuchsia-cprng"
|
||||
version = "0.1.1"
|
||||
|
@ -1608,7 +1633,7 @@ dependencies = [
|
|||
"futures-core",
|
||||
"futures-sink",
|
||||
"futures-util",
|
||||
"http",
|
||||
"http 0.2.9",
|
||||
"indexmap 1.9.3",
|
||||
"slab",
|
||||
"tokio",
|
||||
|
@ -1659,7 +1684,7 @@ dependencies = [
|
|||
"base64 0.21.4",
|
||||
"bytes",
|
||||
"headers-core",
|
||||
"http",
|
||||
"http 0.2.9",
|
||||
"httpdate",
|
||||
"mime 0.3.17",
|
||||
"sha1",
|
||||
|
@ -1671,7 +1696,7 @@ version = "0.2.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429"
|
||||
dependencies = [
|
||||
"http",
|
||||
"http 0.2.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1752,6 +1777,17 @@ dependencies = [
|
|||
"itoa",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b32afd38673a8016f7c9ae69e5af41a58f81b1d31689040f2f1959594ce194ea"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
"itoa",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http-body"
|
||||
version = "0.4.5"
|
||||
|
@ -1759,7 +1795,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"http",
|
||||
"http 0.2.9",
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
|
@ -1811,7 +1847,7 @@ dependencies = [
|
|||
"futures-core",
|
||||
"futures-util",
|
||||
"h2",
|
||||
"http",
|
||||
"http 0.2.9",
|
||||
"http-body",
|
||||
"httparse",
|
||||
"httpdate",
|
||||
|
@ -2348,7 +2384,7 @@ dependencies = [
|
|||
"bytes",
|
||||
"encoding_rs",
|
||||
"futures-util",
|
||||
"http",
|
||||
"http 0.2.9",
|
||||
"httparse",
|
||||
"log 0.4.20",
|
||||
"memchr",
|
||||
|
@ -3215,7 +3251,7 @@ dependencies = [
|
|||
"futures-core",
|
||||
"futures-util",
|
||||
"h2",
|
||||
"http",
|
||||
"http 0.2.9",
|
||||
"http-body",
|
||||
"hyper 0.14.27",
|
||||
"hyper-tls",
|
||||
|
@ -3320,16 +3356,14 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rsa"
|
||||
version = "0.9.2"
|
||||
version = "0.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ab43bb47d23c1a631b4b680199a45255dce26fa9ab2fa902581f624ff13e6a8"
|
||||
checksum = "86ef35bf3e7fe15a53c4ab08a998e42271eab13eb0db224126bc7bc4c4bad96d"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"const-oid",
|
||||
"digest",
|
||||
"num-bigint-dig",
|
||||
"num-integer",
|
||||
"num-iter",
|
||||
"num-traits",
|
||||
"pkcs1",
|
||||
"pkcs8",
|
||||
|
@ -3604,9 +3638,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "signature"
|
||||
version = "2.1.0"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500"
|
||||
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
|
||||
dependencies = [
|
||||
"digest",
|
||||
"rand_core 0.6.4",
|
||||
|
@ -4020,18 +4054,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.49"
|
||||
version = "1.0.50"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4"
|
||||
checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.49"
|
||||
version = "1.0.50"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc"
|
||||
checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -4266,9 +4300,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tracing-attributes"
|
||||
version = "0.1.26"
|
||||
version = "0.1.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
|
||||
checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -4309,7 +4343,7 @@ dependencies = [
|
|||
"byteorder",
|
||||
"bytes",
|
||||
"data-encoding",
|
||||
"http",
|
||||
"http 0.2.9",
|
||||
"httparse",
|
||||
"log 0.4.20",
|
||||
"rand 0.8.5",
|
||||
|
@ -4554,6 +4588,18 @@ version = "0.9.4"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "visions"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"authdb",
|
||||
"http 1.0.0",
|
||||
"serde 1.0.188",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"warp",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "void"
|
||||
version = "1.0.2"
|
||||
|
@ -4597,7 +4643,7 @@ dependencies = [
|
|||
"futures-channel",
|
||||
"futures-util",
|
||||
"headers",
|
||||
"http",
|
||||
"http 0.2.9",
|
||||
"hyper 0.14.27",
|
||||
"log 0.4.20",
|
||||
"mime 0.3.17",
|
||||
|
@ -4844,10 +4890,30 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "zeroize"
|
||||
version = "1.6.0"
|
||||
name = "zerocopy"
|
||||
version = "0.7.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
|
||||
checksum = "e97e415490559a91254a2979b4829267a57d2fcd741a98eee8b722fb57289aa0"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.7.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd7e48ccf166952882ca8bd778a43502c64f33bf94c12ebe2a7f08e5a0f6689f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.37",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zeroize"
|
||||
version = "1.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
|
||||
|
||||
[[package]]
|
||||
name = "zune-inflate"
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"authdb",
|
||||
"changeset",
|
||||
"config",
|
||||
"config-derive",
|
||||
|
@ -9,7 +10,8 @@ members = [
|
|||
"dashboard",
|
||||
"emseries",
|
||||
"file-service",
|
||||
"fitnesstrax",
|
||||
"fitnesstrax/core",
|
||||
"fitnesstrax/app",
|
||||
"fluent-ergonomics",
|
||||
"geo-types",
|
||||
"gm-control-panel",
|
||||
|
@ -24,4 +26,5 @@ members = [
|
|||
"screenplay",
|
||||
"sgf",
|
||||
"tree",
|
||||
"visions/server",
|
||||
]
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
[package]
|
||||
name = "authdb"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[lib]
|
||||
name = "authdb"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "auth-cli"
|
||||
path = "src/bin/cli.rs"
|
||||
|
||||
[dependencies]
|
||||
base64ct = { version = "1", features = [ "alloc" ] }
|
||||
clap = { version = "4", features = [ "derive" ] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
sha2 = { version = "0.10" }
|
||||
sqlx = { version = "0.7", features = [ "runtime-tokio", "sqlite" ] }
|
||||
thiserror = { version = "1" }
|
||||
tokio = { version = "1", features = [ "full" ] }
|
||||
uuid = { version = "0.4", features = [ "serde", "v4" ] }
|
||||
|
||||
[dev-dependencies]
|
||||
cool_asserts = "*"
|
|
@ -1,5 +1,5 @@
|
|||
use authdb::{AuthDB, Username};
|
||||
use clap::{Parser, Subcommand};
|
||||
use file_service::{AuthDB, Username};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Subcommand, Debug)]
|
|
@ -0,0 +1,302 @@
|
|||
use base64ct::{Base64, Encoding};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sha2::{Digest, Sha256};
|
||||
use sqlx::{
|
||||
sqlite::{SqlitePool, SqliteRow},
|
||||
Row,
|
||||
};
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use thiserror::Error;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum AuthError {
|
||||
#[error("authentication token is duplicated")]
|
||||
DuplicateAuthToken,
|
||||
|
||||
#[error("session token is duplicated")]
|
||||
DuplicateSessionToken,
|
||||
|
||||
#[error("database failed")]
|
||||
SqlError(sqlx::Error),
|
||||
}
|
||||
|
||||
impl From<sqlx::Error> for AuthError {
|
||||
fn from(err: sqlx::Error) -> AuthError {
|
||||
AuthError::SqlError(err)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
||||
pub struct Username(String);
|
||||
|
||||
impl From<String> for Username {
|
||||
fn from(s: String) -> Self {
|
||||
Self(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Username {
|
||||
fn from(s: &str) -> Self {
|
||||
Self(s.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Username> for String {
|
||||
fn from(s: Username) -> Self {
|
||||
Self::from(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Username> for String {
|
||||
fn from(s: &Username) -> Self {
|
||||
let Username(s) = s;
|
||||
Self::from(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Username {
|
||||
type Target = String;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl sqlx::FromRow<'_, SqliteRow> for Username {
|
||||
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
|
||||
let name: String = row.try_get("username")?;
|
||||
Ok(Username::from(name))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
||||
pub struct AuthToken(String);
|
||||
|
||||
impl From<String> for AuthToken {
|
||||
fn from(s: String) -> Self {
|
||||
Self(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for AuthToken {
|
||||
fn from(s: &str) -> Self {
|
||||
Self(s.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AuthToken> for PathBuf {
|
||||
fn from(s: AuthToken) -> Self {
|
||||
Self::from(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&AuthToken> for PathBuf {
|
||||
fn from(s: &AuthToken) -> Self {
|
||||
let AuthToken(s) = s;
|
||||
Self::from(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for AuthToken {
|
||||
type Target = String;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
||||
pub struct SessionToken(String);
|
||||
|
||||
impl From<String> for SessionToken {
|
||||
fn from(s: String) -> Self {
|
||||
Self(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for SessionToken {
|
||||
fn from(s: &str) -> Self {
|
||||
Self(s.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SessionToken> for PathBuf {
|
||||
fn from(s: SessionToken) -> Self {
|
||||
Self::from(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&SessionToken> for PathBuf {
|
||||
fn from(s: &SessionToken) -> Self {
|
||||
let SessionToken(s) = s;
|
||||
Self::from(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for SessionToken {
|
||||
type Target = String;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AuthDB {
|
||||
pool: SqlitePool,
|
||||
}
|
||||
|
||||
impl AuthDB {
|
||||
pub async fn new(path: PathBuf) -> Result<Self, sqlx::Error> {
|
||||
let migrator = sqlx::migrate!("./migrations");
|
||||
let pool = SqlitePool::connect(&format!("sqlite://{}", path.to_str().unwrap())).await?;
|
||||
migrator.run(&pool).await?;
|
||||
Ok(Self { pool })
|
||||
}
|
||||
|
||||
pub async fn add_user(&self, username: Username) -> Result<AuthToken, AuthError> {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(Uuid::new_v4().hyphenated().to_string());
|
||||
hasher.update(username.to_string());
|
||||
let auth_token = Base64::encode_string(&hasher.finalize());
|
||||
|
||||
let _ = sqlx::query("INSERT INTO users (username, token) VALUES ($1, $2)")
|
||||
.bind(username.to_string())
|
||||
.bind(auth_token.clone())
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(AuthToken::from(auth_token))
|
||||
}
|
||||
|
||||
pub async fn list_users(&self) -> Result<Vec<Username>, AuthError> {
|
||||
let usernames = sqlx::query_as::<_, Username>("SELECT (username) FROM users")
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(usernames)
|
||||
}
|
||||
|
||||
pub async fn authenticate(&self, token: AuthToken) -> Result<Option<SessionToken>, AuthError> {
|
||||
let results = sqlx::query("SELECT * FROM users WHERE token = $1")
|
||||
.bind(token.to_string())
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
if results.len() > 1 {
|
||||
return Err(AuthError::DuplicateAuthToken);
|
||||
}
|
||||
|
||||
if results.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let user_id: i64 = results[0].try_get("id")?;
|
||||
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(Uuid::new_v4().hyphenated().to_string());
|
||||
hasher.update(token.to_string());
|
||||
let session_token = Base64::encode_string(&hasher.finalize());
|
||||
|
||||
let _ = sqlx::query("INSERT INTO sessions (token, user_id) VALUES ($1, $2)")
|
||||
.bind(session_token.clone())
|
||||
.bind(user_id)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(Some(SessionToken::from(session_token)))
|
||||
}
|
||||
|
||||
pub async fn validate_session(
|
||||
&self,
|
||||
token: SessionToken,
|
||||
) -> Result<Option<Username>, AuthError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT users.username FROM sessions INNER JOIN users ON sessions.user_id = users.id WHERE sessions.token = $1",
|
||||
)
|
||||
.bind(token.to_string())
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
if rows.len() > 1 {
|
||||
return Err(AuthError::DuplicateSessionToken);
|
||||
}
|
||||
|
||||
if rows.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let username: String = rows[0].try_get("username")?;
|
||||
Ok(Some(Username::from(username)))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use cool_asserts::assert_matches;
|
||||
use std::collections::HashSet;
|
||||
|
||||
#[tokio::test]
|
||||
async fn can_create_and_list_users() {
|
||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||
.await
|
||||
.expect("a memory-only database will be created");
|
||||
let _ = db
|
||||
.add_user(Username::from("savanni"))
|
||||
.await
|
||||
.expect("user to be created");
|
||||
assert_matches!(db.list_users().await, Ok(names) => {
|
||||
let names = names.into_iter().collect::<HashSet<Username>>();
|
||||
assert!(names.contains(&Username::from("savanni")));
|
||||
})
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn unknown_auth_token_returns_nothing() {
|
||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||
.await
|
||||
.expect("a memory-only database will be created");
|
||||
let _ = db
|
||||
.add_user(Username::from("savanni"))
|
||||
.await
|
||||
.expect("user to be created");
|
||||
|
||||
let token = AuthToken::from("0000000000");
|
||||
|
||||
assert_matches!(db.authenticate(token).await, Ok(None));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn auth_token_becomes_session_token() {
|
||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||
.await
|
||||
.expect("a memory-only database will be created");
|
||||
let token = db
|
||||
.add_user(Username::from("savanni"))
|
||||
.await
|
||||
.expect("user to be created");
|
||||
|
||||
assert_matches!(db.authenticate(token).await, Ok(_));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn can_validate_session_token() {
|
||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||
.await
|
||||
.expect("a memory-only database will be created");
|
||||
let token = db
|
||||
.add_user(Username::from("savanni"))
|
||||
.await
|
||||
.expect("user to be created");
|
||||
let session = db
|
||||
.authenticate(token)
|
||||
.await
|
||||
.expect("token authentication should succeed")
|
||||
.expect("session token should be found");
|
||||
|
||||
assert_matches!(
|
||||
db.validate_session(session).await,
|
||||
Ok(Some(username)) => {
|
||||
assert_eq!(username, Username::from("savanni"));
|
||||
});
|
||||
}
|
||||
}
|
|
@ -42,7 +42,7 @@ where
|
|||
{
|
||||
/// Open a time series database at the specified path. `path` is the full path and filename for
|
||||
/// the database.
|
||||
pub fn open(path: &str) -> Result<Series<T>, EmseriesReadError> {
|
||||
pub fn open<P: AsRef<std::path::Path>>(path: P) -> Result<Series<T>, EmseriesReadError> {
|
||||
let f = OpenOptions::new()
|
||||
.read(true)
|
||||
.append(true)
|
||||
|
|
|
@ -99,8 +99,8 @@ mod test {
|
|||
{
|
||||
let tmp_file = tempfile::NamedTempFile::new().expect("temporary path created");
|
||||
let tmp_path = tmp_file.into_temp_path();
|
||||
let ts: Series<BikeTrip> = Series::open(&tmp_path.to_string_lossy())
|
||||
.expect("the time series should open correctly");
|
||||
let ts: Series<BikeTrip> =
|
||||
Series::open(&tmp_path).expect("the time series should open correctly");
|
||||
test(ts);
|
||||
}
|
||||
|
||||
|
@ -136,8 +136,8 @@ mod test {
|
|||
pub fn can_search_for_an_entry_with_exact_time() {
|
||||
run_test(|path| {
|
||||
let trips = mk_trips();
|
||||
let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let mut ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
|
||||
for trip in &trips[0..=4] {
|
||||
ts.put(trip.clone()).expect("expect a successful put");
|
||||
|
@ -157,8 +157,8 @@ mod test {
|
|||
pub fn can_get_entries_in_time_range() {
|
||||
run_test(|path| {
|
||||
let trips = mk_trips();
|
||||
let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let mut ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
|
||||
for trip in &trips[0..=4] {
|
||||
ts.put(trip.clone()).expect("expect a successful put");
|
||||
|
@ -186,8 +186,8 @@ mod test {
|
|||
let trips = mk_trips();
|
||||
|
||||
{
|
||||
let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let mut ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
|
||||
for trip in &trips[0..=4] {
|
||||
ts.put(trip.clone()).expect("expect a successful put");
|
||||
|
@ -195,8 +195,8 @@ mod test {
|
|||
}
|
||||
|
||||
{
|
||||
let ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
||||
time_range(
|
||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
||||
|
@ -220,8 +220,8 @@ mod test {
|
|||
let trips = mk_trips();
|
||||
|
||||
{
|
||||
let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let mut ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
|
||||
for trip in &trips[0..=2] {
|
||||
ts.put(trip.clone()).expect("expect a successful put");
|
||||
|
@ -229,8 +229,8 @@ mod test {
|
|||
}
|
||||
|
||||
{
|
||||
let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let mut ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
||||
time_range(
|
||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
||||
|
@ -248,8 +248,8 @@ mod test {
|
|||
}
|
||||
|
||||
{
|
||||
let ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
|
||||
time_range(
|
||||
DateTimeTz(UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0).unwrap()).into(),
|
||||
|
@ -273,8 +273,8 @@ mod test {
|
|||
run_test(|path| {
|
||||
let trips = mk_trips();
|
||||
|
||||
let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let mut ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
|
||||
ts.put(trips[0].clone()).expect("expect a successful put");
|
||||
ts.put(trips[1].clone()).expect("expect a successful put");
|
||||
|
@ -310,8 +310,8 @@ mod test {
|
|||
let trips = mk_trips();
|
||||
|
||||
{
|
||||
let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let mut ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
|
||||
ts.put(trips[0].clone()).expect("expect a successful put");
|
||||
ts.put(trips[1].clone()).expect("expect a successful put");
|
||||
|
@ -327,8 +327,8 @@ mod test {
|
|||
}
|
||||
|
||||
{
|
||||
let ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
|
||||
let trips: Vec<(&UniqueId, &BikeTrip)> = ts.records().collect();
|
||||
assert_eq!(trips.len(), 3);
|
||||
|
@ -356,8 +356,8 @@ mod test {
|
|||
let trips = mk_trips();
|
||||
|
||||
{
|
||||
let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let mut ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
let trip_id = ts.put(trips[0].clone()).expect("expect a successful put");
|
||||
ts.put(trips[1].clone()).expect("expect a successful put");
|
||||
ts.put(trips[2].clone()).expect("expect a successful put");
|
||||
|
@ -368,8 +368,8 @@ mod test {
|
|||
}
|
||||
|
||||
{
|
||||
let ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
|
||||
.expect("expect the time series to open correctly");
|
||||
let ts: Series<BikeTrip> =
|
||||
Series::open(&path).expect("expect the time series to open correctly");
|
||||
let recs: Vec<(&UniqueId, &BikeTrip)> = ts.records().collect();
|
||||
assert_eq!(recs.len(), 2);
|
||||
}
|
||||
|
|
|
@ -14,13 +14,10 @@ path = "src/lib.rs"
|
|||
name = "file-service"
|
||||
path = "src/main.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "auth-cli"
|
||||
path = "src/bin/cli.rs"
|
||||
|
||||
[target.auth-cli.dependencies]
|
||||
|
||||
[dependencies]
|
||||
authdb = { path = "../authdb/" }
|
||||
base64ct = { version = "1", features = [ "alloc" ] }
|
||||
build_html = { version = "2" }
|
||||
bytes = { version = "1" }
|
||||
|
@ -38,9 +35,8 @@ mime_guess = "2.0.3"
|
|||
pretty_env_logger = { version = "0.5" }
|
||||
serde_json = "*"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
sha2 = "0.10"
|
||||
sqlx = { version = "0.7", features = [ "runtime-tokio", "sqlite" ] }
|
||||
thiserror = "1.0.20"
|
||||
sha2 = { version = "0.10" }
|
||||
thiserror = { version = "1" }
|
||||
tokio = { version = "1", features = [ "full" ] }
|
||||
uuid = { version = "0.4", features = [ "serde", "v4" ] }
|
||||
warp = { version = "0.3" }
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
mod store;
|
||||
|
||||
pub use store::{
|
||||
AuthDB, AuthError, AuthToken, DeleteFileError, FileHandle, FileId, FileInfo, ReadFileError,
|
||||
SessionToken, Store, Username, WriteFileError,
|
||||
DeleteFileError, FileHandle, FileId, FileInfo, ReadFileError, Store, WriteFileError,
|
||||
};
|
||||
|
|
|
@ -18,9 +18,10 @@ mod pages;
|
|||
|
||||
const MAX_UPLOAD: u64 = 15 * 1024 * 1024;
|
||||
|
||||
pub use file_service::{
|
||||
AuthDB, AuthError, AuthToken, DeleteFileError, FileHandle, FileId, FileInfo, ReadFileError,
|
||||
SessionToken, Store, Username, WriteFileError,
|
||||
use authdb::{AuthDB, AuthError, AuthToken, SessionToken, Username};
|
||||
|
||||
use file_service::{
|
||||
DeleteFileError, FileHandle, FileId, FileInfo, ReadFileError, Store, WriteFileError,
|
||||
};
|
||||
pub use handlers::handle_index;
|
||||
|
||||
|
|
|
@ -1,13 +1,6 @@
|
|||
use base64ct::{Base64, Encoding};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sha2::{Digest, Sha256};
|
||||
use sqlx::{
|
||||
sqlite::{SqlitePool, SqliteRow},
|
||||
Row,
|
||||
};
|
||||
use std::{collections::HashSet, ops::Deref, path::PathBuf};
|
||||
use thiserror::Error;
|
||||
use uuid::Uuid;
|
||||
|
||||
mod filehandle;
|
||||
mod fileinfo;
|
||||
|
@ -90,136 +83,6 @@ impl From<ReadFileError> for DeleteFileError {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum AuthError {
|
||||
#[error("authentication token is duplicated")]
|
||||
DuplicateAuthToken,
|
||||
|
||||
#[error("session token is duplicated")]
|
||||
DuplicateSessionToken,
|
||||
|
||||
#[error("database failed")]
|
||||
SqlError(sqlx::Error),
|
||||
}
|
||||
|
||||
impl From<sqlx::Error> for AuthError {
|
||||
fn from(err: sqlx::Error) -> AuthError {
|
||||
AuthError::SqlError(err)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
||||
pub struct Username(String);
|
||||
|
||||
impl From<String> for Username {
|
||||
fn from(s: String) -> Self {
|
||||
Self(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Username {
|
||||
fn from(s: &str) -> Self {
|
||||
Self(s.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Username> for String {
|
||||
fn from(s: Username) -> Self {
|
||||
Self::from(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Username> for String {
|
||||
fn from(s: &Username) -> Self {
|
||||
let Username(s) = s;
|
||||
Self::from(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Username {
|
||||
type Target = String;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl sqlx::FromRow<'_, SqliteRow> for Username {
|
||||
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
|
||||
let name: String = row.try_get("username")?;
|
||||
Ok(Username::from(name))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
||||
pub struct AuthToken(String);
|
||||
|
||||
impl From<String> for AuthToken {
|
||||
fn from(s: String) -> Self {
|
||||
Self(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for AuthToken {
|
||||
fn from(s: &str) -> Self {
|
||||
Self(s.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AuthToken> for PathBuf {
|
||||
fn from(s: AuthToken) -> Self {
|
||||
Self::from(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&AuthToken> for PathBuf {
|
||||
fn from(s: &AuthToken) -> Self {
|
||||
let AuthToken(s) = s;
|
||||
Self::from(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for AuthToken {
|
||||
type Target = String;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
||||
pub struct SessionToken(String);
|
||||
|
||||
impl From<String> for SessionToken {
|
||||
fn from(s: String) -> Self {
|
||||
Self(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for SessionToken {
|
||||
fn from(s: &str) -> Self {
|
||||
Self(s.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SessionToken> for PathBuf {
|
||||
fn from(s: SessionToken) -> Self {
|
||||
Self::from(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&SessionToken> for PathBuf {
|
||||
fn from(s: &SessionToken) -> Self {
|
||||
let SessionToken(s) = s;
|
||||
Self::from(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for SessionToken {
|
||||
type Target = String;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
|
||||
pub struct FileId(String);
|
||||
|
||||
|
@ -267,95 +130,6 @@ impl FileRoot for Context {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AuthDB {
|
||||
pool: SqlitePool,
|
||||
}
|
||||
|
||||
impl AuthDB {
|
||||
pub async fn new(path: PathBuf) -> Result<Self, sqlx::Error> {
|
||||
let migrator = sqlx::migrate!("./migrations");
|
||||
let pool = SqlitePool::connect(&format!("sqlite://{}", path.to_str().unwrap())).await?;
|
||||
migrator.run(&pool).await?;
|
||||
Ok(Self { pool })
|
||||
}
|
||||
|
||||
pub async fn add_user(&self, username: Username) -> Result<AuthToken, AuthError> {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(Uuid::new_v4().hyphenated().to_string());
|
||||
hasher.update(username.to_string());
|
||||
let auth_token = Base64::encode_string(&hasher.finalize());
|
||||
|
||||
let _ = sqlx::query("INSERT INTO users (username, token) VALUES ($1, $2)")
|
||||
.bind(username.to_string())
|
||||
.bind(auth_token.clone())
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(AuthToken::from(auth_token))
|
||||
}
|
||||
|
||||
pub async fn list_users(&self) -> Result<Vec<Username>, AuthError> {
|
||||
let usernames = sqlx::query_as::<_, Username>("SELECT (username) FROM users")
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(usernames)
|
||||
}
|
||||
|
||||
pub async fn authenticate(&self, token: AuthToken) -> Result<Option<SessionToken>, AuthError> {
|
||||
let results = sqlx::query("SELECT * FROM users WHERE token = $1")
|
||||
.bind(token.to_string())
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
if results.len() > 1 {
|
||||
return Err(AuthError::DuplicateAuthToken);
|
||||
}
|
||||
|
||||
if results.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let user_id: i64 = results[0].try_get("id")?;
|
||||
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(Uuid::new_v4().hyphenated().to_string());
|
||||
hasher.update(token.to_string());
|
||||
let session_token = Base64::encode_string(&hasher.finalize());
|
||||
|
||||
let _ = sqlx::query("INSERT INTO sessions (token, user_id) VALUES ($1, $2)")
|
||||
.bind(session_token.clone())
|
||||
.bind(user_id)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(Some(SessionToken::from(session_token)))
|
||||
}
|
||||
|
||||
pub async fn validate_session(
|
||||
&self,
|
||||
token: SessionToken,
|
||||
) -> Result<Option<Username>, AuthError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT users.username FROM sessions INNER JOIN users ON sessions.user_id = users.id WHERE sessions.token = $1",
|
||||
)
|
||||
.bind(token.to_string())
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
if rows.len() > 1 {
|
||||
return Err(AuthError::DuplicateSessionToken);
|
||||
}
|
||||
|
||||
if rows.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let username: String = rows[0].try_get("username")?;
|
||||
Ok(Some(Username::from(username)))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Store {
|
||||
files_root: PathBuf,
|
||||
}
|
||||
|
@ -493,74 +267,3 @@ mod test {
|
|||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod authdb_test {
|
||||
use super::*;
|
||||
use cool_asserts::assert_matches;
|
||||
|
||||
#[tokio::test]
|
||||
async fn can_create_and_list_users() {
|
||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||
.await
|
||||
.expect("a memory-only database will be created");
|
||||
let _ = db
|
||||
.add_user(Username::from("savanni"))
|
||||
.await
|
||||
.expect("user to be created");
|
||||
assert_matches!(db.list_users().await, Ok(names) => {
|
||||
let names = names.into_iter().collect::<HashSet<Username>>();
|
||||
assert!(names.contains(&Username::from("savanni")));
|
||||
})
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn unknown_auth_token_returns_nothing() {
|
||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||
.await
|
||||
.expect("a memory-only database will be created");
|
||||
let _ = db
|
||||
.add_user(Username::from("savanni"))
|
||||
.await
|
||||
.expect("user to be created");
|
||||
|
||||
let token = AuthToken::from("0000000000");
|
||||
|
||||
assert_matches!(db.authenticate(token).await, Ok(None));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn auth_token_becomes_session_token() {
|
||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||
.await
|
||||
.expect("a memory-only database will be created");
|
||||
let token = db
|
||||
.add_user(Username::from("savanni"))
|
||||
.await
|
||||
.expect("user to be created");
|
||||
|
||||
assert_matches!(db.authenticate(token).await, Ok(_));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn can_validate_session_token() {
|
||||
let db = AuthDB::new(PathBuf::from(":memory:"))
|
||||
.await
|
||||
.expect("a memory-only database will be created");
|
||||
let token = db
|
||||
.add_user(Username::from("savanni"))
|
||||
.await
|
||||
.expect("user to be created");
|
||||
let session = db
|
||||
.authenticate(token)
|
||||
.await
|
||||
.expect("token authentication should succeed")
|
||||
.expect("session token should be found");
|
||||
|
||||
assert_matches!(
|
||||
db.validate_session(session).await,
|
||||
Ok(Some(username)) => {
|
||||
assert_eq!(username, Username::from("savanni"));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
[package]
|
||||
name = "fitnesstrax"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[lib]
|
||||
name = "fitnesstrax"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "fitnesstrax"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
adw = { version = "0.5", package = "libadwaita", features = [ "v1_2" ] }
|
||||
chrono = { version = "0.4" }
|
||||
chrono-tz = { version = "0.8" }
|
||||
dimensioned = { version = "0.8" }
|
||||
emseries = { path = "../emseries" }
|
||||
gio = { version = "0.18" }
|
||||
glib = { version = "0.18" }
|
||||
gtk = { version = "0.7", package = "gtk4", features = [ "v4_8" ] }
|
||||
tokio = { version = "1.34", features = [ "full" ] }
|
||||
|
||||
[build-dependencies]
|
||||
glib-build-tools = "0.18"
|
||||
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
[package]
|
||||
name = "fitnesstrax"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
adw = { version = "0.5", package = "libadwaita", features = [ "v1_2" ] }
|
||||
gio = { version = "0.18" }
|
||||
glib = { version = "0.18" }
|
||||
gtk = { version = "0.7", package = "gtk4", features = [ "v4_8" ] }
|
||||
tokio = { version = "1.34", features = [ "full" ] }
|
||||
|
||||
[build-dependencies]
|
||||
glib-build-tools = "0.18"
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
[package]
|
||||
name = "ft-core"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4" }
|
||||
chrono-tz = { version = "0.8" }
|
||||
dimensioned = { version = "0.8", features = [ "serde" ] }
|
||||
emseries = { path = "../../emseries" }
|
||||
serde = { version = "1", features = [ "derive" ] }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "*"
|
||||
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
use chrono::NaiveDate;
|
||||
use dimensioned::si;
|
||||
use emseries::DateTimeTz;
|
||||
use emseries::{DateTimeTz, Recordable, Timestamp};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// SetRep represents workouts like pushups or situps, which involve doing a "set" of a number of
|
||||
/// actions, resting, and then doing another set.
|
||||
|
@ -15,6 +16,7 @@ pub struct SetRep {
|
|||
}
|
||||
|
||||
/// The number of steps one takes in a single day.
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct Steps {
|
||||
date: NaiveDate,
|
||||
count: u32,
|
||||
|
@ -25,6 +27,7 @@ pub struct Steps {
|
|||
/// record a single 30-km workout if I go on a long-distanec ride. Or I might record multiple 5km
|
||||
/// workouts if I am out running errands. Distance and Duration are both optional because different
|
||||
/// people have different priorities and may choose to measure different things.
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct TimeDistance {
|
||||
/// The precise time (and the relevant timezone) of the workout. One of the edge cases that I
|
||||
/// account for is that a ride which occurred at 11pm in one timezone would then count as 1am
|
||||
|
@ -43,12 +46,14 @@ pub struct TimeDistance {
|
|||
|
||||
/// A singular daily weight measurement. Weight changes slowly enough that it seems unlikely to
|
||||
/// need to track more than a single weight in a day.
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct Weight {
|
||||
date: NaiveDate,
|
||||
weight: si::Kilogram<f64>,
|
||||
}
|
||||
|
||||
/// The unified data structure for all records that are part of the app.
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub enum TraxRecord {
|
||||
BikeRide(TimeDistance),
|
||||
Pushups,
|
||||
|
@ -61,3 +66,47 @@ pub enum TraxRecord {
|
|||
Walk(TimeDistance),
|
||||
Weight(Weight),
|
||||
}
|
||||
|
||||
impl Recordable for TraxRecord {
|
||||
fn timestamp(&self) -> Timestamp {
|
||||
match self {
|
||||
TraxRecord::BikeRide(rec) => Timestamp::DateTime(rec.datetime.clone()),
|
||||
TraxRecord::Pushups => unimplemented!(),
|
||||
TraxRecord::Row(rec) => Timestamp::DateTime(rec.datetime.clone()),
|
||||
TraxRecord::Run(rec) => Timestamp::DateTime(rec.datetime.clone()),
|
||||
TraxRecord::Situps => unimplemented!(),
|
||||
TraxRecord::Squats => unimplemented!(),
|
||||
TraxRecord::Steps(rec) => Timestamp::Date(rec.date),
|
||||
TraxRecord::Swim(rec) => Timestamp::DateTime(rec.datetime.clone()),
|
||||
TraxRecord::Walk(rec) => Timestamp::DateTime(rec.datetime.clone()),
|
||||
TraxRecord::Weight(rec) => Timestamp::Date(rec.date),
|
||||
}
|
||||
}
|
||||
|
||||
fn tags(&self) -> Vec<String> {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use emseries::Series;
|
||||
|
||||
#[test]
|
||||
fn can_record_records() {
|
||||
let file = tempfile::NamedTempFile::new().expect("a temporary file");
|
||||
let path = file.into_temp_path();
|
||||
let mut series: Series<TraxRecord> = Series::open(&path).unwrap();
|
||||
|
||||
let record = TraxRecord::Steps(Steps {
|
||||
date: NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(),
|
||||
count: 1000,
|
||||
});
|
||||
|
||||
let id = series.put(record.clone()).unwrap();
|
||||
|
||||
let record_ = series.get(&id).unwrap();
|
||||
assert_eq!(record_, record);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
[package]
|
||||
name = "visions"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
authdb = { path = "../../authdb/" }
|
||||
http = { version = "1" }
|
||||
serde_json = { version = "*" }
|
||||
serde = { version = "1" }
|
||||
tokio = { version = "1", features = [ "full" ] }
|
||||
warp = { version = "0.3" }
|
|
@ -0,0 +1,24 @@
|
|||
use authdb::{AuthDB, AuthToken};
|
||||
use http::{response::Response, status::StatusCode, Error};
|
||||
|
||||
pub async fn handle_auth(
|
||||
auth_ctx: &AuthDB,
|
||||
auth_token: AuthToken,
|
||||
) -> Result<http::Response<String>, Error> {
|
||||
match auth_ctx.authenticate(auth_token).await {
|
||||
Ok(Some(session)) => match serde_json::to_string(&session) {
|
||||
Ok(session_token) => Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.body(session_token),
|
||||
Err(_) => Response::builder()
|
||||
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
.body("".to_owned()),
|
||||
},
|
||||
Ok(None) => Response::builder()
|
||||
.status(StatusCode::UNAUTHORIZED)
|
||||
.body("".to_owned()),
|
||||
Err(_) => Response::builder()
|
||||
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
.body("".to_owned()),
|
||||
}
|
||||
}
|
|
@ -0,0 +1,105 @@
|
|||
use authdb::{AuthDB, AuthError, AuthToken, SessionToken, Username};
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
net::{IpAddr, Ipv4Addr, SocketAddr},
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
};
|
||||
use warp::{
|
||||
header,
|
||||
http::StatusCode,
|
||||
reply::{Json, Reply},
|
||||
Filter,
|
||||
};
|
||||
|
||||
mod handlers;
|
||||
use handlers::handle_auth;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Unauthorized;
|
||||
impl warp::reject::Reject for Unauthorized {}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct AuthDBError(AuthError);
|
||||
impl warp::reject::Reject for AuthDBError {}
|
||||
|
||||
fn with_session(
|
||||
auth_ctx: Arc<AuthDB>,
|
||||
) -> impl Filter<Extract = (Username,), Error = warp::Rejection> + Clone {
|
||||
header("authentication").and_then({
|
||||
move |value: String| {
|
||||
let auth_ctx = auth_ctx.clone();
|
||||
async move {
|
||||
match auth_ctx.validate_session(SessionToken::from(value)).await {
|
||||
Ok(Some(username)) => Ok(username),
|
||||
Ok(None) => Err(warp::reject::custom(Unauthorized)),
|
||||
Err(err) => Err(warp::reject::custom(AuthDBError(err))),
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn route_echo_unauthenticated() -> impl Filter<Extract = (Json,), Error = warp::Rejection> + Clone {
|
||||
warp::path!("api" / "v1" / "echo" / String).map(|param: String| {
|
||||
println!("param: {}", param);
|
||||
warp::reply::json(&vec!["unauthenticated", param.as_str()])
|
||||
})
|
||||
}
|
||||
|
||||
fn route_authenticate(
|
||||
auth_ctx: Arc<AuthDB>,
|
||||
) -> impl Filter<Extract = (Json,), Error = warp::Rejection> + Clone {
|
||||
let auth_ctx = auth_ctx.clone();
|
||||
warp::path!("api" / "v1" / "auth")
|
||||
.and(warp::post())
|
||||
.and(warp::body::json())
|
||||
.map(move |param: AuthToken| {
|
||||
let res = handle_auth(&auth_ctx, param.clone());
|
||||
warp::reply::json(¶m)
|
||||
})
|
||||
}
|
||||
|
||||
fn route_echo_authenticated(
|
||||
auth_ctx: Arc<AuthDB>,
|
||||
) -> impl Filter<Extract = (Json,), Error = warp::Rejection> + Clone {
|
||||
warp::path!("api" / "v1" / "echo" / String)
|
||||
.and(with_session(auth_ctx.clone()))
|
||||
.map(move |param: String, username: Username| {
|
||||
println!("param: {:?}", username);
|
||||
println!("param: {}", param);
|
||||
warp::reply::json(&vec!["authenticated", username.as_str(), param.as_str()])
|
||||
})
|
||||
}
|
||||
|
||||
async fn handle_rejection(err: warp::Rejection) -> Result<impl Reply, Infallible> {
|
||||
if let Some(Unauthorized) = err.find() {
|
||||
Ok(warp::reply::with_status(
|
||||
"".to_owned(),
|
||||
StatusCode::UNAUTHORIZED,
|
||||
))
|
||||
} else {
|
||||
Ok(warp::reply::with_status(
|
||||
"".to_owned(),
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
pub async fn main() {
|
||||
let auth_db = AuthDB::new(PathBuf::from("./auth_db.sqlite"))
|
||||
.await
|
||||
.expect("AuthDB should initialize");
|
||||
let auth_ctx: Arc<AuthDB> = Arc::new(auth_db);
|
||||
|
||||
let filter = route_echo_authenticated(auth_ctx.clone())
|
||||
.or(route_authenticate(auth_ctx.clone()))
|
||||
.or(route_echo_unauthenticated())
|
||||
.recover(handle_rejection);
|
||||
|
||||
let server = warp::serve(filter);
|
||||
server
|
||||
.run(SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 8001))
|
||||
.await;
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
|
||||
release:
|
||||
NODE_ENV=production npm run build
|
||||
|
||||
dev:
|
||||
npm run build
|
||||
|
||||
server:
|
||||
npx http-server ./dist
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"name": "ui",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "webpack.config.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "webpack"
|
||||
},
|
||||
"author": "",
|
||||
"license": "GPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.2.8",
|
||||
"@types/react-dom": "^18.2.4",
|
||||
"copy-webpack-plugin": "^11.0.0",
|
||||
"ts-loader": "^9.4.3",
|
||||
"webpack": "^5.85.0",
|
||||
"webpack-cli": "^5.1.3"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,11 @@
|
|||
import React from "react";
|
||||
import ReactDOM from "react-dom";
|
||||
|
||||
const App = () => <div>App</div>;
|
||||
|
||||
ReactDOM.render(
|
||||
<React.StrictMode>
|
||||
<App />
|
||||
</React.StrictMode>,
|
||||
document.getElementById("root")
|
||||
);
|
|
@ -0,0 +1,24 @@
|
|||
const CopyWebpackPlugin = require('copy-webpack-plugin');
|
||||
|
||||
module.exports = {
|
||||
mode: "development",
|
||||
entry: {
|
||||
"main": "./src/main.tsx"
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{ test: /\.tsx?$/, use: "ts-loader", exclude: /node_modules/ }
|
||||
]
|
||||
},
|
||||
plugins: [
|
||||
new CopyWebpackPlugin({
|
||||
patterns: [
|
||||
{ from: "src/index.html" },
|
||||
{ from: "src/visions.css" },
|
||||
]
|
||||
})
|
||||
],
|
||||
resolve: {
|
||||
extensions: ['.ts', '.tsx'],
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue