Compare commits

..

1 Commits

Author SHA1 Message Date
1450ec5ae6 Set up an android project 2023-03-25 23:37:06 -04:00
518 changed files with 5141 additions and 88235 deletions

1
.envrc
View File

@ -1,2 +1 @@
mkdir .direnv
use flake use flake

View File

@ -1,31 +0,0 @@
name: Monorepo build
run-name: ${{ gitea.actor }} is testing out Gitea Actions
on: [push]
jobs:
# Explore-Gitea-Actions:
# runs-on: native
# steps:
# - run: echo "The job was automatically triggered by a ${{ gitea.event_name }} event."
# - run: echo "This job is now running on ${{ runner.os }} server hosted by Gitea!"
# - run: echo "The name of your branch is ${{ gitea.ref }} and your repository is ${{ gitea.repository }}."
# - name: Check out repository code
# uses: actions/checkout@v4
# - run: echo "The ${{ gitea.repository }} repository has been cloned to the runner."
# - run: echo "The workflow is now ready to test your code on the runner."
# - name: List files in the repository
# run: |
# ls ${{ gitea.workspace }}
# - run: echo "This job's status is ${{ job.status }}."
build-flake:
runs-on: nixos
defaults.run.working-directory: ${{ gitea.workspace }}
steps:
- name: Checkout repository code
uses: actions/checkout@v4
- name: Build the apps
run: /run/current-system/sw/bin/nix --extra-experimental-features "nix-command flakes" build .#all
- name: Check the end of the build
run: ls ${{ gitea.workspace }}/result/bin

7
.gitignore vendored
View File

@ -2,10 +2,3 @@ target
.direnv/ .direnv/
node_modules node_modules
dist dist
result
*.tgz
*.tar.gz
*.sqlite
*.sqlite-shm
*.sqlite-wal
file-service/var

View File

@ -1,3 +0,0 @@
{
"rust-analyzer.showUnlinkedFileNotification": false
}

2902
Cargo.lock generated

File diff suppressed because it is too large Load Diff

10986
Cargo.nix

File diff suppressed because it is too large Load Diff

View File

@ -1,74 +0,0 @@
[workspace]
resolver = "2"
members = [
# "authdb",
# "bike-lights/core",
# "bike-lights/simulator",
# "changeset",
# "config",
# "config-derive",
# "coordinates",
# "cyber-slides",
"cyberpunk",
"cyberpunk-splash",
# "dashboard",
# "emseries",
# "file-service",
"fitnesstrax/app",
# "fitnesstrax/core",
# "fluent-ergonomics",
# "geo-types",
# "gm-control-panel",
# "gm-dash/server",
# "hex-grid",
# "icon-test",
"l10n-db",
# "memorycache",
# "nom-training",
# "otg/core",
# "otg/gtk",
# "pico-st7789",
# "result-extended",
# "screenplay",
# "sgf",
# "timezone-testing",
# "tree",
"visions/server",
# "visions/types",
"visions/ui",
# "bike-lights/bike",
]
[workspace.dependencies]
adw = { version = "0.5", package = "libadwaita", features = [ "v1_4" ] }
async-channel = { version = "2.1" }
async-std = { version = "1.13" }
async-trait = { version = "0.1" }
axum = { version = "0.8", features = ["macros"] }
cairo-rs = { version = "0.18" }
chrono = { version = "0.4" }
chrono-tz = { version = "0.8" }
dimensioned = { version = "0.8", features = [ "serde" ] }
gdk = { version = "0.7", package = "gdk4" }
gio = { version = "0.18" }
glib = { version = "0.18" }
gloo-console = { version = "0.3.0" }
gloo-net = { version = "0.6.0" }
gtk = { version = "0.7", package = "gtk4", features = [ "v4_10" ] }
serde = { version = "1.0", features = ["derive", "serde_derive"] }
serde-wasm-bindgen = { version = "0.6.5" }
serde_json = { version = "1.0.138" }
thiserror = { version = "2.0" }
tokio = { version = "1.43", features = ["full", "rt"] }
tower-http = { version = "0.6", features = ["cors"] }
uuid = { version = "1.13", features = ["v4"] }
wasm-bindgen = { version = "0.2.100" }
wasm-bindgen-futures = { version = "0.4.50" }
web-sys = { version = "0.3.77" }
yew = { git = "https://github.com/yewstack/yew/", features = ["csr"] }
# cairo-rs = { version = "0.18" }
# gio = { version = "0.18" }
# glib = { version = "0.18" }
# gtk = { version = "0.7", package = "gtk4" }

37
Makefile Normal file
View File

@ -0,0 +1,37 @@
changeset-dev:
cd changeset && make dev
changeset-test:
cd changeset && make test
coordinates-dev:
cd coordinates && make dev
coordinates-test:
cd coordinates && make test
emseries-dev:
cd emseries && make dev
emseries-test:
cd emseries && make test
flow-dev:
cd flow && make dev
flow-test:
cd flow && make test
fluent-ergonomics-dev:
cd fluent-ergonomics && make dev
fluent-ergonomics-test:
cd fluent-ergonomics && make test
ifc-dev:
cd ifc && make dev
ifc-test:
cd ifc && make test

View File

@ -1,20 +0,0 @@
version: '3'
tasks:
build:
cmds:
- cargo build --release
update:
cmds:
- task build
- crate2nix generate
- nix build
lint:
cmds:
- cargo clippy
test:
cmds:
- cargo test

View File

@ -1,27 +0,0 @@
[package]
name = "authdb"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "authdb"
path = "src/lib.rs"
[[bin]]
name = "auth-cli"
path = "src/bin/cli.rs"
[dependencies]
base64ct = { version = "1", features = [ "alloc" ] }
clap = { version = "4", features = [ "derive" ] }
serde = { version = "1.0", features = ["derive"] }
sha2 = { version = "0.10" }
sqlx = { version = "0.8", features = [ "runtime-tokio", "sqlite" ] }
thiserror = { version = "1" }
tokio = { version = "1", features = [ "full" ] }
uuid = { version = "0.4", features = [ "serde", "v4" ] }
[dev-dependencies]
cool_asserts = "*"

View File

@ -1,11 +0,0 @@
CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY NOT NULL,
username TEXT NOT NULL,
token TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS sessions (
token TEXT NOT NULL,
user_id INTEGER,
FOREIGN KEY(user_id) REFERENCES users(id)
);

View File

@ -1,40 +0,0 @@
use authdb::{AuthDB, Username};
use clap::{Parser, Subcommand};
use std::path::PathBuf;
#[derive(Subcommand, Debug)]
enum Commands {
AddUser { username: String },
DeleteUser { username: String },
ListUsers,
}
#[derive(Parser, Debug)]
struct Args {
#[command(subcommand)]
command: Commands,
}
#[tokio::main]
pub async fn main() {
let args = Args::parse();
let authdb = AuthDB::new(PathBuf::from(&std::env::var("AUTHDB").unwrap()))
.await
.expect("to be able to open the database");
match args.command {
Commands::AddUser { username } => {
match authdb.add_user(Username::from(username.clone())).await {
Ok(token) => {
println!("User {} created. Auth token: {}", username, *token);
}
Err(err) => {
println!("Could not create user {}", username);
println!("\tError: {:?}", err);
}
}
}
Commands::DeleteUser { .. } => {}
Commands::ListUsers => {}
}
}

View File

@ -1,302 +0,0 @@
use base64ct::{Base64, Encoding};
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use sqlx::{
sqlite::{SqlitePool, SqliteRow},
Row,
};
use std::ops::Deref;
use std::path::PathBuf;
use thiserror::Error;
use uuid::Uuid;
#[derive(Debug, Error)]
pub enum AuthError {
#[error("authentication token is duplicated")]
DuplicateAuthToken,
#[error("session token is duplicated")]
DuplicateSessionToken,
#[error("database failed")]
SqlError(sqlx::Error),
}
impl From<sqlx::Error> for AuthError {
fn from(err: sqlx::Error) -> AuthError {
AuthError::SqlError(err)
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
pub struct Username(String);
impl From<String> for Username {
fn from(s: String) -> Self {
Self(s)
}
}
impl From<&str> for Username {
fn from(s: &str) -> Self {
Self(s.to_owned())
}
}
impl From<Username> for String {
fn from(s: Username) -> Self {
Self::from(&s)
}
}
impl From<&Username> for String {
fn from(s: &Username) -> Self {
let Username(s) = s;
Self::from(s)
}
}
impl Deref for Username {
type Target = String;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl sqlx::FromRow<'_, SqliteRow> for Username {
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
let name: String = row.try_get("username")?;
Ok(Username::from(name))
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
pub struct AuthToken(String);
impl From<String> for AuthToken {
fn from(s: String) -> Self {
Self(s)
}
}
impl From<&str> for AuthToken {
fn from(s: &str) -> Self {
Self(s.to_owned())
}
}
impl From<AuthToken> for PathBuf {
fn from(s: AuthToken) -> Self {
Self::from(&s)
}
}
impl From<&AuthToken> for PathBuf {
fn from(s: &AuthToken) -> Self {
let AuthToken(s) = s;
Self::from(s)
}
}
impl Deref for AuthToken {
type Target = String;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
pub struct SessionToken(String);
impl From<String> for SessionToken {
fn from(s: String) -> Self {
Self(s)
}
}
impl From<&str> for SessionToken {
fn from(s: &str) -> Self {
Self(s.to_owned())
}
}
impl From<SessionToken> for PathBuf {
fn from(s: SessionToken) -> Self {
Self::from(&s)
}
}
impl From<&SessionToken> for PathBuf {
fn from(s: &SessionToken) -> Self {
let SessionToken(s) = s;
Self::from(s)
}
}
impl Deref for SessionToken {
type Target = String;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Clone)]
pub struct AuthDB {
pool: SqlitePool,
}
impl AuthDB {
pub async fn new(path: PathBuf) -> Result<Self, sqlx::Error> {
let migrator = sqlx::migrate!("./migrations");
let pool = SqlitePool::connect(&format!("sqlite://{}", path.to_str().unwrap())).await?;
migrator.run(&pool).await?;
Ok(Self { pool })
}
pub async fn add_user(&self, username: Username) -> Result<AuthToken, AuthError> {
let mut hasher = Sha256::new();
hasher.update(Uuid::new_v4().hyphenated().to_string());
hasher.update(username.to_string());
let auth_token = Base64::encode_string(&hasher.finalize());
let _ = sqlx::query("INSERT INTO users (username, token) VALUES ($1, $2)")
.bind(username.to_string())
.bind(auth_token.clone())
.execute(&self.pool)
.await?;
Ok(AuthToken::from(auth_token))
}
pub async fn list_users(&self) -> Result<Vec<Username>, AuthError> {
let usernames = sqlx::query_as::<_, Username>("SELECT (username) FROM users")
.fetch_all(&self.pool)
.await?;
Ok(usernames)
}
pub async fn authenticate(&self, token: AuthToken) -> Result<Option<SessionToken>, AuthError> {
let results = sqlx::query("SELECT * FROM users WHERE token = $1")
.bind(token.to_string())
.fetch_all(&self.pool)
.await?;
if results.len() > 1 {
return Err(AuthError::DuplicateAuthToken);
}
if results.is_empty() {
return Ok(None);
}
let user_id: i64 = results[0].try_get("id")?;
let mut hasher = Sha256::new();
hasher.update(Uuid::new_v4().hyphenated().to_string());
hasher.update(token.to_string());
let session_token = Base64::encode_string(&hasher.finalize());
let _ = sqlx::query("INSERT INTO sessions (token, user_id) VALUES ($1, $2)")
.bind(session_token.clone())
.bind(user_id)
.execute(&self.pool)
.await?;
Ok(Some(SessionToken::from(session_token)))
}
pub async fn validate_session(
&self,
token: SessionToken,
) -> Result<Option<Username>, AuthError> {
let rows = sqlx::query(
"SELECT users.username FROM sessions INNER JOIN users ON sessions.user_id = users.id WHERE sessions.token = $1",
)
.bind(token.to_string())
.fetch_all(&self.pool)
.await?;
if rows.len() > 1 {
return Err(AuthError::DuplicateSessionToken);
}
if rows.is_empty() {
return Ok(None);
}
let username: String = rows[0].try_get("username")?;
Ok(Some(Username::from(username)))
}
}
#[cfg(test)]
mod tests {
use super::*;
use cool_asserts::assert_matches;
use std::collections::HashSet;
#[tokio::test]
async fn can_create_and_list_users() {
let db = AuthDB::new(PathBuf::from(":memory:"))
.await
.expect("a memory-only database will be created");
let _ = db
.add_user(Username::from("savanni"))
.await
.expect("user to be created");
assert_matches!(db.list_users().await, Ok(names) => {
let names = names.into_iter().collect::<HashSet<Username>>();
assert!(names.contains(&Username::from("savanni")));
})
}
#[tokio::test]
async fn unknown_auth_token_returns_nothing() {
let db = AuthDB::new(PathBuf::from(":memory:"))
.await
.expect("a memory-only database will be created");
let _ = db
.add_user(Username::from("savanni"))
.await
.expect("user to be created");
let token = AuthToken::from("0000000000");
assert_matches!(db.authenticate(token).await, Ok(None));
}
#[tokio::test]
async fn auth_token_becomes_session_token() {
let db = AuthDB::new(PathBuf::from(":memory:"))
.await
.expect("a memory-only database will be created");
let token = db
.add_user(Username::from("savanni"))
.await
.expect("user to be created");
assert_matches!(db.authenticate(token).await, Ok(_));
}
#[tokio::test]
async fn can_validate_session_token() {
let db = AuthDB::new(PathBuf::from(":memory:"))
.await
.expect("a memory-only database will be created");
let token = db
.add_user(Username::from("savanni"))
.await
.expect("user to be created");
let session = db
.authenticate(token)
.await
.expect("token authentication should succeed")
.expect("session token should be found");
assert_matches!(
db.validate_session(session).await,
Ok(Some(username)) => {
assert_eq!(username, Username::from("savanni"));
});
}
}

View File

@ -1,12 +0,0 @@
[build]
target = "thumbv6m-none-eabi"
[target.thumbv6m-none-eabi]
rustflags = [
"-C", "link-arg=--nmagic",
"-C", "link-arg=-Tlink.x",
"-C", "inline-threshold=5",
"-C", "no-vectorize-loops",
]
runner = "elf2uf2-rs -d"

View File

@ -1,18 +0,0 @@
[package]
name = "bike"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
az = { version = "1" }
cortex-m-rt = { version = "0.7.3" }
cortex-m = { version = "0.7.7" }
embedded-alloc = { version = "0.5.1" }
embedded-hal = { version = "0.2.7" }
fixed = { version = "1" }
fugit = { version = "0.3.7" }
lights-core = { path = "../core" }
panic-halt = { version = "0.2.0" }
rp-pico = { version = "0.8.0" }

View File

@ -1,244 +0,0 @@
#![no_main]
#![no_std]
extern crate alloc;
use alloc::boxed::Box;
use az::*;
use core::cell::RefCell;
use cortex_m::delay::Delay;
use embedded_alloc::Heap;
use embedded_hal::{blocking::spi::Write, digital::v2::InputPin, digital::v2::OutputPin};
use fixed::types::I16F16;
use fugit::RateExtU32;
use lights_core::{App, BodyPattern, DashboardPattern, Event, Instant, FPS, UI};
use panic_halt as _;
use rp_pico::{
entry,
hal::{
clocks::init_clocks_and_plls,
gpio::{FunctionSio, Pin, PinId, PullDown, PullUp, SioInput, SioOutput},
pac::{CorePeripherals, Peripherals},
spi::{Enabled, Spi, SpiDevice, ValidSpiPinout},
watchdog::Watchdog,
Clock, Sio,
},
Pins,
};
#[global_allocator]
static HEAP: Heap = Heap::empty();
const LIGHT_SCALE: I16F16 = I16F16::lit("256.0");
const DASHBOARD_BRIGHTESS: u8 = 1;
const BODY_BRIGHTNESS: u8 = 8;
struct DebouncedButton<P: PinId> {
debounce: Instant,
pin: Pin<P, FunctionSio<SioInput>, PullUp>,
}
impl<P: PinId> DebouncedButton<P> {
fn new(pin: Pin<P, FunctionSio<SioInput>, PullUp>) -> Self {
Self {
debounce: Instant((0 as u32).into()),
pin,
}
}
fn is_low(&self, time: Instant) -> bool {
if time <= self.debounce {
return false;
}
self.pin.is_low().unwrap_or(false)
}
fn set_debounce(&mut self, time: Instant) {
self.debounce = time + Instant((250 as u32).into());
}
}
struct BikeUI<
D: SpiDevice,
P: ValidSpiPinout<D>,
LeftId: PinId,
RightId: PinId,
PreviousId: PinId,
NextId: PinId,
BrakeId: PinId,
> {
spi: RefCell<Spi<Enabled, D, P, 8>>,
left_blinker_button: DebouncedButton<LeftId>,
right_blinker_button: DebouncedButton<RightId>,
previous_animation_button: DebouncedButton<PreviousId>,
next_animation_button: DebouncedButton<NextId>,
brake_sensor: Pin<BrakeId, FunctionSio<SioInput>, PullUp>,
brake_enabled: bool,
}
impl<
D: SpiDevice,
P: ValidSpiPinout<D>,
LeftId: PinId,
RightId: PinId,
PreviousId: PinId,
NextId: PinId,
BrakeId: PinId,
> BikeUI<D, P, LeftId, RightId, PreviousId, NextId, BrakeId>
{
fn new(
spi: Spi<Enabled, D, P, 8>,
left_blinker_button: Pin<LeftId, FunctionSio<SioInput>, PullUp>,
right_blinker_button: Pin<RightId, FunctionSio<SioInput>, PullUp>,
previous_animation_button: Pin<PreviousId, FunctionSio<SioInput>, PullUp>,
next_animation_button: Pin<NextId, FunctionSio<SioInput>, PullUp>,
brake_sensor: Pin<BrakeId, FunctionSio<SioInput>, PullUp>,
) -> Self {
Self {
spi: RefCell::new(spi),
left_blinker_button: DebouncedButton::new(left_blinker_button),
right_blinker_button: DebouncedButton::new(right_blinker_button),
previous_animation_button: DebouncedButton::new(previous_animation_button),
next_animation_button: DebouncedButton::new(next_animation_button),
brake_sensor,
brake_enabled: false,
}
}
}
impl<
D: SpiDevice,
P: ValidSpiPinout<D>,
LeftId: PinId,
RightId: PinId,
PreviousId: PinId,
NextId: PinId,
BrakeId: PinId,
> UI for BikeUI<D, P, LeftId, RightId, PreviousId, NextId, BrakeId>
{
fn check_event(&mut self, current_time: Instant) -> Option<Event> {
/*
if self.brake_sensor.is_high().unwrap_or(true) && !self.brake_enabled {
self.brake_enabled = true;
Some(Event::Brake)
} else if self.brake_sensor.is_low().unwrap_or(false) && self.brake_enabled {
self.brake_enabled = false;
Some(Event::BrakeRelease)
} else if self.left_blinker_button.is_low(current_time) {
*/
if self.left_blinker_button.is_low(current_time) {
self.left_blinker_button.set_debounce(current_time);
Some(Event::LeftBlinker)
} else if self.right_blinker_button.is_low(current_time) {
self.right_blinker_button.set_debounce(current_time);
Some(Event::RightBlinker)
} else if self.previous_animation_button.is_low(current_time) {
self.previous_animation_button.set_debounce(current_time);
Some(Event::PreviousPattern)
} else if self.next_animation_button.is_low(current_time) {
self.next_animation_button.set_debounce(current_time);
Some(Event::NextPattern)
} else {
None
}
}
fn update_lights(&self, dashboard_lights: DashboardPattern, body_lights: BodyPattern) {
let mut lights: [u8; 260] = [0; 260];
lights[256] = 0xff;
lights[257] = 0xff;
lights[258] = 0xff;
lights[259] = 0xff;
for (idx, rgb) in dashboard_lights.iter().enumerate() {
lights[(idx + 1) * 4 + 0] = 0xe0 + DASHBOARD_BRIGHTESS;
lights[(idx + 1) * 4 + 1] = (I16F16::from(rgb.r) * LIGHT_SCALE).saturating_as();
lights[(idx + 1) * 4 + 2] = (I16F16::from(rgb.b) * LIGHT_SCALE).saturating_as();
lights[(idx + 1) * 4 + 3] = (I16F16::from(rgb.g) * LIGHT_SCALE).saturating_as();
}
for (idx, rgb) in body_lights.iter().enumerate() {
lights[(idx + 4) * 4 + 0] = 0xe0 + BODY_BRIGHTNESS;
lights[(idx + 4) * 4 + 1] = (I16F16::from(rgb.b) * LIGHT_SCALE).saturating_as();
lights[(idx + 4) * 4 + 2] = (I16F16::from(rgb.g) * LIGHT_SCALE).saturating_as();
lights[(idx + 4) * 4 + 3] = (I16F16::from(rgb.r) * LIGHT_SCALE).saturating_as();
}
let mut spi = self.spi.borrow_mut();
spi.write(lights.as_slice());
}
}
#[entry]
fn main() -> ! {
{
use core::mem::MaybeUninit;
const HEAP_SIZE: usize = 8096;
static mut HEAP_MEM: [MaybeUninit<u8>; HEAP_SIZE] = [MaybeUninit::uninit(); HEAP_SIZE];
unsafe { HEAP.init(HEAP_MEM.as_ptr() as usize, HEAP_SIZE) }
}
let mut pac = Peripherals::take().unwrap();
let core = CorePeripherals::take().unwrap();
let sio = Sio::new(pac.SIO);
let mut watchdog = Watchdog::new(pac.WATCHDOG);
let pins = Pins::new(
pac.IO_BANK0,
pac.PADS_BANK0,
sio.gpio_bank0,
&mut pac.RESETS,
);
let clocks = init_clocks_and_plls(
12_000_000u32,
pac.XOSC,
pac.CLOCKS,
pac.PLL_SYS,
pac.PLL_USB,
&mut pac.RESETS,
&mut watchdog,
)
.ok()
.unwrap();
let mut delay = Delay::new(core.SYST, clocks.system_clock.freq().to_Hz());
let mut spi_clk = pins.gpio10.into_function();
let mut spi_sdo = pins.gpio11.into_function();
let spi = Spi::<_, _, _, 8>::new(pac.SPI1, (spi_sdo, spi_clk));
let mut spi = spi.init(
&mut pac.RESETS,
clocks.peripheral_clock.freq(),
1_u32.MHz(),
embedded_hal::spi::MODE_1,
);
let left_blinker_button = pins.gpio16.into_pull_up_input();
let right_blinker_button = pins.gpio17.into_pull_up_input();
let previous_animation_button = pins.gpio27.into_pull_up_input();
let next_animation_button = pins.gpio26.into_pull_up_input();
let brake_sensor = pins.gpio18.into_pull_up_input();
let mut led_pin = pins.led.into_push_pull_output();
let ui = BikeUI::new(
spi,
left_blinker_button,
right_blinker_button,
previous_animation_button,
next_animation_button,
brake_sensor,
);
let mut app = App::new(Box::new(ui));
led_pin.set_high();
let mut time = Instant::default();
let delay_ms = 1000 / (FPS as u32);
loop {
app.tick(time);
delay.delay_ms(delay_ms);
time = time + Instant(delay_ms.into());
}
}

View File

@ -1,158 +0,0 @@
$fn = 50;
threshold = 0.1;
half_threshold = threshold / 2;
bevel = 0.5;
wire_radius = 1;
wall_thickness = 2;
cutout_threshold = 1;
battery_length = 71;
battery_width = 18.75;
cell_holder_length = battery_length + wall_thickness * 2;
cell_holder_width = battery_width + wall_thickness * 2;
cell_holder_height = battery_width + wall_thickness;
battery_contact_thickness = .6;
// battery_contact_thickness = 1;
battery_contact_width = 11;
battery_contact_length = 12.8;
battery_contact_spring_height = 10.5;
battery_contact_flange_height = 1.9;
converter_width = 11.25;
converter_length = 22.25;
converter_height = 5;
include <./common.scad>;
// box(20, 10, 10);
// color("blue", 0.5) cube([10, 20, 10], center = true);
module cell_cradle(width, height) {
difference() {
translate([0, 0, -height / 2]) cube([width,
wall_thickness,
height],
center = true);
color("red", 1) translate([0, 0, 0])
rotate([90, 0, 0])
cylinder(h = wall_thickness + cutout_threshold,
r = width / 2,
center = true);
}
}
module cell_box() {
union() {
channel(cell_holder_length, cell_holder_width, cell_holder_height);
translate([0, -battery_length / 6, wall_thickness]) cell_cradle(cell_holder_width, cell_holder_height / 2);
translate([0, battery_length / 6, wall_thickness]) cell_cradle(cell_holder_width, cell_holder_height / 2);
}
}
module contact_box() {
contact_thickness = battery_contact_flange_height * .75;
cutout_width = battery_contact_width * .8;
// box_thickness = contact_thickness_ + wall_thickness * 2;
// box_height = width + wall_thickness;
difference() {
box(wall_thickness * 2 + contact_thickness, cell_holder_width, cell_holder_height);
translate([0, contact_thickness, wall_thickness * 2])
cube([battery_contact_width,
wall_thickness * 2,
battery_contact_length + threshold],
center = true);
color("red", 1) translate([0,
-(wall_thickness + contact_thickness + threshold) / 2,
cell_holder_height / 2])
cube([5, wall_thickness + threshold * 2, cell_holder_height], center = true);
translate([0,
-(wall_thickness + contact_thickness + threshold) / 2 - wire_radius,
0])
rotate([0, 90, 0])
cylinder(h = cell_holder_width, r = wire_radius, center = true);
color("green", 1) translate([-cell_holder_width / 2, 0, cell_holder_height / 2])
rotate([0, 90, 0])
cylinder(h = 5, r = contact_thickness / 2, center = true);
color("green", 1) translate([cell_holder_width / 2, 0, cell_holder_height / 2])
rotate([0, 90, 0])
cylinder(h = 5, r = contact_thickness / 2, center = true);
}
}
module battery_slot() {
difference() {
union() {
translate([0, -cell_holder_length / 2, 0]) contact_box();
translate([0, wall_thickness, 0]) cell_box();
translate([0, cell_holder_length / 2 + wall_thickness * 2, 0])
rotate([0, 0, 180])
contact_box();
}
translate([cell_holder_width / 2, 1, 0]) rotate([90, 0, 0]) cylinder(h = cell_holder_length + wall_thickness * 4 + battery_contact_flange_height * 2, r = wire_radius, center = true);
translate([-cell_holder_width / 2, 1, 0]) rotate([90, 0, 0]) cylinder(h = cell_holder_length + wall_thickness * 4 + battery_contact_flange_height * 2, r = wire_radius, center = true);
}
}
module converter_box() {
box_length = wall_thickness * 2 + converter_height;
box_width = cell_holder_width * 2 - wall_thickness;
difference() {
box(box_length, box_width, cell_holder_height);
translate([cell_holder_width - wire_radius, 0, 0])
rotate([90, 0, 0])
cylinder(h = box_length, r = wire_radius, center = true);
translate([cell_holder_width - wire_radius * 2, 0, 0])
rotate([0, 90, 0])
cylinder(h = wall_thickness + threshold, r = wire_radius, center = true);
translate([-cell_holder_width + wire_radius, 0, 0])
rotate([90, 0, 0])
cylinder(h = box_length, r = wire_radius, center = true);
translate([-cell_holder_width + wire_radius * 2, 0, 0])
rotate([0, 90, 0])
cylinder(h = wall_thickness + threshold, r = wire_radius, center = true);
translate([0, -box_length / 2, 0])
rotate([0, 90, 0])
cylinder(h = cell_holder_width * 2 + wall_thickness, r = wire_radius, center = true);
translate([-cell_holder_width * .75, (-box_length + wall_thickness) / 2, 0])
rotate([90, 0, 0])
cylinder(h = wall_thickness * 2, r = wire_radius, center = true);
translate([cell_holder_width * .75, (-box_length + wall_thickness) / 2, 0])
rotate([90, 0, 0])
cylinder(h = wall_thickness * 2, r = wire_radius, center = true);
color("red", 1) translate([-box_width / 4, -(converter_height + wall_thickness) / 2, cell_holder_height / 2])
cube([5, wall_thickness + threshold * 2, cell_holder_height], center = true);
color("red", 1) translate([box_width / 4, -(converter_height + wall_thickness) / 2, cell_holder_height / 2])
cube([5, wall_thickness + threshold * 2, cell_holder_height], center = true);
}
}
module battery_case() {
union() {
translate([-cell_holder_width / 2, 0, 0]) battery_slot();
translate([cell_holder_width / 2 - wall_thickness, 0, 0]) battery_slot();
translate([-wall_thickness / 2,
cell_holder_length / 2 + wall_thickness * 2 + battery_contact_flange_height + wall_thickness * 2 + wall_thickness / 2,
0])
converter_box();
}
}
battery_case();

View File

@ -1,174 +0,0 @@
width = 65;
length = 75;
height = 16;
wall_thickness = 2;
guide_thickness = 1;
power_width = 21;
output_width = 37.5;
half_wall_thickness = wall_thickness / 2;
standoff_thickness = 10;
hole_diameter = 3;
// The radius of a nut in mm. However, based on my measurements, I'm not actually sure I have this right. The short height of a nut is 7.86mm. Derive from there.
nut_radius = 8.5 * cos(30) / 2;
nut_height = 2.69; // mm
screw_radius = 2;
handlebar_radius = 15;
clasp_thickness = 4;
clasp_width = 35;
circular_face_count = 48;
module hexagon(r, h) {
pi = 3.1415926;
polyhedron(
points=[
[r, 0, 0],
[r * cos(60), r * sin(60), 0],
[r * cos(120), r * sin(120), 0],
[r * cos(180), r * sin(180), 0],
[r * cos(240), r * sin(240), 0],
[r * cos(300), r * sin(300), 0],
[r, 0, h],
[r * cos(60), r * sin(60), h],
[r * cos(120), r * sin(120), h],
[r * cos(180), r * sin(180), h],
[r * cos(240), r * sin(240), h],
[r * cos(300), r * sin(300), h],
],
faces=[
[0, 1, 2, 3, 4, 5],
[11, 10, 9, 8, 7, 6],
[6, 7, 1, 0],
[7, 8, 2, 1],
[8, 9, 3, 2],
[9, 10, 4, 3],
[10, 11, 5, 4],
[11, 6, 0, 5],
]
);
}
// Nut holders are blocks that have a hole drilled through them and a hexagonal-shaped cavity. The idea is to
module nut_holder() {
difference() {
translate([-4.5, -4.5, -2]) cube([9, 9, 4]);
union() {
translate([0, 0, -1]) hexagon(nut_radius, 2);
cylinder(h = 6, r = screw_radius, center = true, $fn = 24);
}
}
}
module screw_hole() {
union() {
translate([0, 0, 4]) cylinder(h = 2.1, r = screw_radius * 2, center = true, $fn = 24);
cylinder(h = 6, r = screw_radius, center = true, $fn = 24);
}
}
module base() {
cube([width, length, wall_thickness]);
}
module face() {
union() {
cube([width, length, wall_thickness / 2]);
translate([wall_thickness, wall_thickness, wall_thickness / 2]) cube([width-wall_thickness*2, length-wall_thickness*2, wall_thickness / 2]);
translate([4.5 + wall_thickness, 4.5 + wall_thickness, 4]) nut_holder();
translate([width - 4.5 - wall_thickness, 4.5 + wall_thickness, 4]) nut_holder();
translate([width - 4.5 - wall_thickness, length - 4.5 - wall_thickness, 4]) nut_holder();
translate([4.5 + wall_thickness, length - 4.5 - wall_thickness, 4]) nut_holder();
}
}
module wall(length) {
cube([length, height, wall_thickness]);
}
module power_wall() {
difference() {
wall(65);
translate([9, 2, -.5]) cube([power_width, height, wall_thickness + 1]);
}
}
module output_wall() {
difference() {
wall(65);
translate([9, 2, -.5]) cube([output_width, height, wall_thickness + 1]);
}
}
// Use hexagons as cutouts into which I can install a hex nut. This isn't quite right yet, but close.
// hexagon(nut_radius, 1);
// cube([standoff_thickness, standoff_thickness, 2]);
/*
difference() {
union() {
base();
rotate([90, 0, 90]) wall(75);
// translate([width - wall_thickness, 0, 0]) rotate([90, 0, 90]) wall(length);
// rotate([90, 0, 0]) power_wall();
// translate([0, length, 0]) rotate([90, 0, 0]) output_wall();
// translate([wall_thickness,
// wall_thickness,
// wall_thickness]) standoff();
// translate([width - wall_thickness - standoff_thickness,
// wall_thickness,
// wall_thickness]) standoff();
// translate([wall_thickness,
// length - wall_thickness - standoff_thickness,
// wall_thickness]) standoff();
// translate([width - wall_thickness - standoff_thickness,
// length - wall_thickness - standoff_thickness,
// wall_thickness]) standoff();
}
// translate([-half_wall_thickness, -wall_thickness - half_wall_thickness, height - half_wall_thickness]) cube([wall_thickness, length + wall_thickness * 2, wall_thickness]);
// translate([width - half_wall_thickness, -wall_thickness - half_wall_thickness, height - half_wall_thickness]) cube([wall_thickness, length + wall_thickness * 2, wall_thickness]);
// translate([-half_wall_thickness, -half_wall_thickness, height - half_wall_thickness]) rotate([0, 0, 270]) cube([wall_thickness, width + wall_thickness * 2, wall_thickness]);
// translate([-half_wall_thickness, length + half_wall_thickness, height - half_wall_thickness]) rotate([0, 0, 270]) cube([wall_thickness, width + wall_thickness * 2, wall_thickness]);
}
*/
module box() {
difference() {
union() {
cube([width, length, wall_thickness * 2]);
translate([0, 0, wall_thickness]) rotate([90, 0, 90]) wall(length);
translate([width - wall_thickness, 0, wall_thickness]) rotate([90, 0, 90]) wall(length);
translate([0, wall_thickness, wall_thickness]) rotate([90, 0, 0]) wall(width);
translate([0, length, wall_thickness]) rotate([90, 0, 0]) wall(width);
}
translate([4.5 + wall_thickness, 4.5 + wall_thickness, 4]) rotate([180, 0, 0]) screw_hole();
translate([width - 4.5 - wall_thickness, 4.5 + wall_thickness, 4]) rotate([180, 0, 0]) screw_hole();
translate([width - 4.5 - wall_thickness, length - 4.5 - wall_thickness, 4]) rotate([180, 0, 0]) screw_hole();
translate([4.5 + wall_thickness, length - 4.5 - wall_thickness, 4]) rotate([180, 0, 0]) screw_hole();
}
}
module top_clasp() {
difference() {
union() {
cylinder(h = clasp_width, r = handlebar_radius + clasp_thickness, center = true, $fn = circular_face_count);
translate([0, 0, -clasp_width / 2]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([0, 0, -clasp_width / 2 + 4]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([0, 0, clasp_width / 2]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([0, 0, clasp_width / 2 - 4]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([-handlebar_radius-5, -10, -clasp_width / 2 + 6]) cube([6, 20, clasp_width - 12]);
}
translate([-0.5, 0, 0]) cylinder(h = clasp_width+2, r = handlebar_radius + 1, center = true, $fn = circular_face_count);
translate([-0.5, -handlebar_radius - 10, -clasp_width / 2 - 1]) cube([handlebar_radius + 10, handlebar_radius * 2 + 20, clasp_width + 2]);
}
}
module body() {
union() {
box();
translate([width / 2, length / 2, -5 - handlebar_radius]) rotate([0, 90, 90]) top_clasp();
}
}
body();
translate([width + 10, 0, 0]) face();

View File

@ -1,21 +0,0 @@
handlebar_radius = 15;
clasp_thickness = 4;
circular_face_count = 48;
clasp_width = 35;
module top_clasp() {
difference() {
union() {
cylinder(h = clasp_width, r = handlebar_radius + clasp_thickness, center = true, $fn = circular_face_count);
translate([0, 0, -clasp_width / 2]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([0, 0, -clasp_width / 2 + 4]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([0, 0, clasp_width / 2]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([0, 0, clasp_width / 2 - 4]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([-handlebar_radius-5, -10, -clasp_width / 2 + 6]) cube([6, 20, clasp_width - 12]);
}
translate([-0.5, 0, 0]) cylinder(h = clasp_width+2, r = handlebar_radius + 1, center = true, $fn = circular_face_count);
translate([-0.5, -handlebar_radius - 10, -clasp_width / 2 - 1]) cube([handlebar_radius + 10, handlebar_radius * 2 + 20, clasp_width + 2]);
}
}
top_clasp();

View File

@ -1,92 +0,0 @@
module hexagon(r, h) {
cylinder(r = r, h = h, center = 2, $fn = 6);
}
module pill(length, bevel) {
hull() {
translate([0, 0, (-length / 2) + bevel]) sphere(r = bevel);
translate([0, 0, (length / 2) - bevel]) sphere(r = bevel);
}
}
module rounded_cube(dimensions, bevel = 0) {
x = dimensions[0];
y = dimensions[1];
z = dimensions[2];
if (bevel > 0) {
hull() {
translate([-x / 2 + bevel, -y / 2 + bevel, -z / 2 + bevel]) sphere(r = bevel);
translate([ x / 2 - bevel, -y / 2 + bevel, -z / 2 + bevel]) sphere(r = bevel);
translate([ x / 2 - bevel, y / 2 - bevel, -z / 2 + bevel]) sphere(r = bevel);
translate([-x / 2 + bevel, y / 2 - bevel, -z / 2 + bevel]) sphere(r = bevel);
translate([-x / 2 + bevel, -y / 2 + bevel, z / 2 - bevel]) sphere(r = bevel);
translate([ x / 2 - bevel, -y / 2 + bevel, z / 2 - bevel]) sphere(r = bevel);
translate([ x / 2 - bevel, y / 2 - bevel, z / 2 - bevel]) sphere(r = bevel);
translate([-x / 2 + bevel, y / 2 - bevel, z / 2 - bevel]) sphere(r = bevel);
}
} else {
cube(dimensions, center = true);
}
}
module box_face(dimensions, bevel = 0) {
x = dimensions[0];
y = dimensions[1];
z = dimensions[2];
if (bevel > 0) {
translate([0, 0, z / 2])
hull() {
pill(z, bevel);
translate([x, 0, 0])
pill(z, bevel);
translate([x, y, 0])
pill(z, bevel);
translate([0, y, 0])
pill(z, bevel);
}
} else {
cube(dimensions);
}
}
module channel(length, width, height, bevel) {
union() {
box_face([length, width, wall_thickness], bevel);
translate([0, wall_thickness - bevel, bevel])
rotate([90, 0, 0])
box_face([length, height, wall_thickness], bevel);
translate([0, width + bevel, bevel])
rotate([90, 0, 0])
box_face([length, height, wall_thickness], bevel);
}
}
module box(length, width, height, bevel = 0) {
union() {
channel(length, width, height, bevel);
translate([-bevel, 0, bevel])
rotate([90, 0, 0])
rotate([0, 90, 0])
box_face([width, height, wall_thickness], bevel);
translate([length - wall_thickness + bevel, 0, bevel])
rotate([90, 0, 0])
rotate([0, 90, 0])
box_face([width, height, wall_thickness], bevel);
}
}
module box_side_slider(length, width, height) {
difference() {
box_face([width - wall_thickness * 2 + 4, height, wall_thickness], bevel);
translate([-1, -1, 1]) cube([4-threshold, height+2, 4-threshold]);
color("red") translate([width - wall_thickness * 2 + 1, -1, 1]) cube([4-threshold, height+2, 4-threshold]);
}
}

View File

@ -1,210 +0,0 @@
$fn = 50;
threshold = 0.1;
board_length = 92;
board_width = 72;
board_height = 21.5;
wall_thickness = 4;
bevel = 0.5;
hinge_radius = 2.5;
case_width = board_width + wall_thickness * 2;
case_length = board_length + wall_thickness * 2;
case_height = board_height + wall_thickness;
handlebar_radius = 15;
clasp_thickness = 4;
circular_face_count = 48;
clasp_width = 35;
include <./common.scad>;
module top_clasp() {
difference() {
union() {
cylinder(h = clasp_width, r = handlebar_radius + clasp_thickness, center = true, $fn = circular_face_count);
translate([0, 0, -clasp_width / 2]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([0, 0, -clasp_width / 2 + 4]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([0, 0, clasp_width / 2]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([0, 0, clasp_width / 2 - 4]) cylinder(h = 1, r = handlebar_radius + clasp_thickness + 1, center = true, $fn = circular_face_count);
translate([-handlebar_radius-5, -10, -clasp_width / 2 + 6]) cube([6, 20, clasp_width - 12]);
}
translate([-0.5, 0, 0]) cylinder(h = clasp_width+2, r = handlebar_radius + 1, center = true, $fn = circular_face_count);
translate([-0.5, -handlebar_radius - 10, -clasp_width / 2 - 1]) cube([handlebar_radius + 10, handlebar_radius * 2 + 20, clasp_width + 2]);
}
}
module hinge(length) {
difference() {
union() {
cube([hinge_radius * 2, length, hinge_radius], center = true);
translate([0, 0, -1.5]) rotate([90, 0, 0]) cylinder(h = length, r = hinge_radius, center = true);
}
translate([0, threshold / 2, -1.5]) rotate([90, 0, 0]) cylinder(h = length + threshold * 2, r = 1, center = true);
}
}
module base_case(length, width, height, bevel = 0) {
difference() {
union() {
channel(length + wall_thickness / 2, width, height, bevel);
translate([-bevel, 0, bevel])
rotate([90, 0, 0])
rotate([0, 90, 0])
box_face([width, height, wall_thickness], bevel);
// These are the sleds at the bottom of the case that should hold the lower of the two boards down
color("blue") translate([0, wall_thickness - 2, wall_thickness + 4]) cube([length - 8, 4, wall_thickness / 2]);
color("blue") translate([wall_thickness - 2, wall_thickness - 4, wall_thickness + 4]) cube([4, width, wall_thickness / 2]);
color("blue") translate([length - 25, width - wall_thickness * 3 / 2, wall_thickness + 6]) cube([16, wall_thickness, wall_thickness / 2]);
}
// This makes an indent at the bottom to accomodate solder joins
translate([wall_thickness + 2, wall_thickness + 2, wall_thickness / 2]) cube([length, width - wall_thickness * 2 - 4, wall_thickness / 2 + threshold]);
// This creates a cutout that lets the power plug slide in better.
translate([wall_thickness, width - wall_thickness, wall_thickness]) cube([length, 2, 6]);
// These two put in the slots that should allow the fourth wall to be slotted into place.
color("red") translate([length - 1, wall_thickness - 2, 4]) cube([2, 2, height]);
color("red") translate([length - 1, width - wall_thickness, 4]) cube([2, 2, height]);
}
}
module main_case() {
hinge_length = board_length / 4;
hinge_y_offset = board_width + wall_thickness + hinge_radius;
hinge_z_offset = board_height;
difference() {
union() {
base_case(case_length,
case_width,
case_height,
bevel);
translate([-bevel, 0, bevel])
rotate([90, 0, 0])
rotate([0, 90, 0])
box_face([case_width, case_height, wall_thickness], bevel);
translate([0, -hinge_radius - bevel + threshold, hinge_z_offset + bevel])
rotate([90, 0, 0])
rotate([0, 90, 0])
hinge(case_length / 4);
translate([case_length - hinge_length, -hinge_radius - bevel + threshold, hinge_z_offset + bevel])
rotate([90, 0, 0])
rotate([0, 90, 0])
hinge(case_length / 4);
translate([43, case_width, wall_thickness + 8])
rotate([90, 0, 0])
rotate([0, 180, 0])
linear_extrude(1)
text("lights", size = 3);
translate([67, case_width, wall_thickness + 8])
rotate([90, 0, 0])
rotate([0, 180, 0])
linear_extrude(1)
text("left", size = 3);
translate([55, case_width, wall_thickness + 8])
rotate([90, 0, 0])
rotate([0, 180, 0])
linear_extrude(1)
text("right", size = 3);
// translate([case_length / 2, case_width / 2, -20]) rotate([0, 90, 0]) top_clasp();
}
translate([case_length / 2, case_width / 2, -threshold]) hexagon(4.5, 6);
# translate([8.5 + wall_thickness, case_width - wall_thickness - threshold, wall_thickness])
# cube([60, wall_thickness * 2, 7]);
}
}
module lamp() {
union() {
translate([0, 0, -0.5]) cube([12.9 + threshold, 8, 4], center = true);
translate([0, 0, .88]) cube([5 + threshold, 5 + threshold, 1.56], center = true);
/*
translate([0, 0, -1.56]) cube([12.9, 7.6, wall_thickness], center = true);
*/
}
}
module button() {
union() {
cube([3.5 + threshold, 6.1 + threshold, 4 + threshold], center = true);
translate([0, 0, -0.5]) cube([1.2, 7, 3 + threshold], center = true);
}
}
module lid() {
lid_width = case_width + hinge_radius * 2 + wall_thickness;
hinge_length = case_length / 4;
union() {
difference() {
rounded_cube([case_length,
lid_width,
wall_thickness],
bevel);
translate([0, lid_width / 5, 0.4]) lamp();
translate([-15, lid_width / 5, 0.4]) lamp();
translate([15, lid_width / 5, 0.4]) lamp();
translate([-30, lid_width / 5, 0]) button();
translate([30, lid_width / 5, 0]) button();
translate([0, lid_width / 5, -2]) cube([20, 7, 3], center = true);
color("black") translate([-2, lid_width / 5 - 5, -2]) rotate([0, 0, 90]) rotate([0, 90, 0]) cylinder(h=5, r = 1, center = true, $fn = circular_face_count);
color("black") translate([-17, lid_width / 5 - 5, -2]) rotate([0, 0, 90]) rotate([0, 90, 0]) cylinder(h=5, r = 1, center = true, $fn = circular_face_count);
color("black") translate([13, lid_width / 5 - 5, -2]) rotate([0, 0, 90]) rotate([0, 90, 0]) cylinder(h=5, r = 1, center = true, $fn = circular_face_count);
color("black") translate([-30, lid_width / 5 - 5, -2]) rotate([0, 0, 90]) rotate([0, 90, 0]) cylinder(h=5, r = 1, center = true, $fn = circular_face_count);
color("black") translate([30, lid_width / 5 - 5, -2]) rotate([0, 0, 90]) rotate([0, 90, 0]) cylinder(h=5, r = 1, center = true, $fn = circular_face_count);
color("black") translate([0, 10, -2]) rotate([0, 90, 0]) cylinder(h = 62, r = 1, center = true, $fn = circular_face_count);
color("red") translate([-33, 21, -2]) rotate([0, 90, 0]) cylinder(h = 5, r = 1, center = true, $fn = circular_face_count);
color("red") translate([-35, 13, -2]) rotate([0, 0, 90]) rotate([0, 90, 0]) cylinder(h = 18, r = 1, center = true, $fn = circular_face_count);
color("red") translate([33, 21, -2]) rotate([0, 90, 0]) cylinder(h = 5, r = 1, center = true, $fn = circular_face_count);
color("red") translate([35, 13, -2]) rotate([0, 0, 90]) rotate([0, 90, 0]) cylinder(h = 18, r = 1, center = true, $fn = circular_face_count);
color("red") translate([0, 5, -2]) rotate([0, 90, 0]) cylinder(h = 70, r = 1, center = true, $fn = circular_face_count);
}
translate([case_length / 2 - hinge_length / 2, lid_width / 2 - wall_thickness / 2 - 0.5, -wall_thickness / 2]) rotate([0, 0, 90]) hinge(hinge_length);
translate([-case_length / 2 + hinge_length / 2, lid_width / 2 - wall_thickness / 2 - 0.5, -wall_thickness / 2]) rotate([0, 0, 90]) hinge(hinge_length);
translate([0, -lid_width / 2 + bevel, -3]) rounded_cube([20, wall_thickness / 2, 10], bevel);
color("blue") translate([-9, -lid_width / 2 + 1.5, -6]) rotate([90, 0, 0]) rotate([0, 90, 0]) linear_extrude(18) circle(1, $fn = 3);
color("blue") translate([-9, -lid_width / 2 + 1.5, -7]) rotate([90, 0, 0]) rotate([0, 90, 0]) linear_extrude(18) circle(1, $fn = 3);
}
}
module box_side() {
box_side_slider(case_length, case_width, case_height);
}
module case_base() {
difference() {
rounded_cube([case_length, case_width, wall_thickness + 2], bevel = 0.5);
translate([wall_thickness, 0, 2]) rounded_cube([case_length + threshold, board_width + threshold, 2 + threshold]);
// These give a screw-hole in the center which will allow the clamp to be attached
translate([0, 0, -1]) hexagon(4.5, 2);
translate([0, 0, -wall_thickness / 2]) cylinder(r = 2, h = wall_thickness + threshold, center = true);
// and now a bit of an indentation to help the clip remain in place
translate([0, 0, -4.5]) cube([clasp_width + threshold, clasp_width + threshold, wall_thickness], center = true);
// here are some grooves along the edges that can be used to piece parts together
translate([wall_thickness / 2, case_width / 2 - wall_thickness / 2, wall_thickness / 2])
cube([board_length + wall_thickness, wall_thickness / 2, wall_thickness / 2 + threshold], center = true);
translate([wall_thickness / 2, -case_width / 2 + wall_thickness / 2, wall_thickness / 2])
cube([board_length + wall_thickness, wall_thickness / 2, wall_thickness / 2 + threshold], center = true);
}
}

View File

@ -1,11 +0,0 @@
include <./control_panel.scad>
/*
difference() {
color("blue") rounded_cube([5, 5, 5], bevel = 0.5);
translate([0, 0, 1]) rounded_cube([4, 4, 4]);
};
*/
case_base();

View File

@ -1,6 +0,0 @@
include <./control_panel.scad>
lid();
// lamp();

View File

@ -1,4 +0,0 @@
include <./control_panel.scad>
box_side();

View File

@ -1,10 +0,0 @@
[package]
name = "lights-core"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
az = { version = "1" }
fixed = { version = "1" }

View File

@ -1,472 +0,0 @@
#![no_std]
extern crate alloc;
use alloc::boxed::Box;
use az::*;
use core::{
clone::Clone,
cmp::PartialEq,
default::Default,
ops::{Add, Sub},
option::Option,
};
use fixed::types::{I48F16, I8F8, U128F0, U16F0};
mod patterns;
pub use patterns::*;
mod types;
pub use types::{BodyPattern, DashboardPattern, RGB};
fn calculate_frames(starting_time: U128F0, now: U128F0) -> U16F0 {
let frames_128 = (now - starting_time) / U128F0::from(FPS);
(frames_128 % U128F0::from(U16F0::MAX)).cast()
}
fn calculate_slope(start: I8F8, end: I8F8, frames: U16F0) -> I8F8 {
let slope_i16f16 = (I48F16::from(end) - I48F16::from(start)) / I48F16::from(frames);
slope_i16f16.saturating_as()
}
fn linear_ease(value: I8F8, frames: U16F0, slope: I8F8) -> I8F8 {
let value_i16f16 = I48F16::from(value) + I48F16::from(frames) * I48F16::from(slope);
value_i16f16.saturating_as()
}
#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq)]
pub struct Instant(pub U128F0);
impl Default for Instant {
fn default() -> Self {
Self(U128F0::from(0_u8))
}
}
impl Add for Instant {
type Output = Self;
fn add(self, r: Self) -> Self::Output {
Self(self.0 + r.0)
}
}
impl Sub for Instant {
type Output = Self;
fn sub(self, r: Self) -> Self::Output {
Self(self.0 - r.0)
}
}
pub const FPS: u8 = 30;
pub trait UI {
fn check_event(&mut self, current_time: Instant) -> Option<Event>;
fn update_lights(&self, dashboard_lights: DashboardPattern, body_lights: BodyPattern);
}
pub trait Animation {
fn tick(&mut self, time: Instant) -> (DashboardPattern, BodyPattern);
}
/*
pub struct DefaultAnimation {}
impl Animation for DefaultAnimation {
fn tick(&mut self, _: Instant) -> (DashboardPattern, BodyPattern) {
(WATER_DASHBOARD, WATER_BODY)
}
}
*/
pub struct Fade {
starting_dashboard: DashboardPattern,
starting_lights: BodyPattern,
start_time: Instant,
dashboard_slope: [RGB<I8F8>; 3],
body_slope: [RGB<I8F8>; 60],
frames: U16F0,
}
impl Fade {
fn new(
dashboard: DashboardPattern,
lights: BodyPattern,
ending_dashboard: DashboardPattern,
ending_lights: BodyPattern,
frames: U16F0,
time: Instant,
) -> Self {
let mut dashboard_slope = [Default::default(); 3];
let mut body_slope = [Default::default(); 60];
for i in 0..3 {
let slope = RGB {
r: calculate_slope(dashboard[i].r, ending_dashboard[i].r, frames),
g: calculate_slope(dashboard[i].g, ending_dashboard[i].g, frames),
b: calculate_slope(dashboard[i].b, ending_dashboard[i].b, frames),
};
dashboard_slope[i] = slope;
}
for i in 0..60 {
let slope = RGB {
r: calculate_slope(lights[i].r, ending_lights[i].r, frames),
g: calculate_slope(lights[i].g, ending_lights[i].g, frames),
b: calculate_slope(lights[i].b, ending_lights[i].b, frames),
};
body_slope[i] = slope;
}
Self {
starting_dashboard: dashboard,
starting_lights: lights,
start_time: time,
dashboard_slope,
body_slope,
frames,
}
}
}
impl Animation for Fade {
fn tick(&mut self, time: Instant) -> (DashboardPattern, BodyPattern) {
let mut frames = calculate_frames(self.start_time.0, time.0);
if frames > self.frames {
frames = self.frames
}
let mut dashboard_pattern: DashboardPattern = OFF_DASHBOARD;
let mut body_pattern: BodyPattern = OFF_BODY;
for i in 0..3 {
dashboard_pattern[i].r = linear_ease(
self.starting_dashboard[i].r,
frames,
self.dashboard_slope[i].r,
);
dashboard_pattern[i].g = linear_ease(
self.starting_dashboard[i].g,
frames,
self.dashboard_slope[i].g,
);
dashboard_pattern[i].b = linear_ease(
self.starting_dashboard[i].b,
frames,
self.dashboard_slope[i].b,
);
}
for i in 0..60 {
body_pattern[i].r =
linear_ease(self.starting_lights[i].r, frames, self.body_slope[i].r);
body_pattern[i].g =
linear_ease(self.starting_lights[i].g, frames, self.body_slope[i].g);
body_pattern[i].b =
linear_ease(self.starting_lights[i].b, frames, self.body_slope[i].b);
}
(dashboard_pattern, body_pattern)
}
}
#[derive(Debug)]
pub enum FadeDirection {
Transition,
FadeIn,
FadeOut,
}
pub enum BlinkerDirection {
Left,
Right,
}
pub struct Blinker {
transition: Fade,
fade_in: Fade,
fade_out: Fade,
direction: FadeDirection,
start_time: Instant,
frames: U16F0,
}
impl Blinker {
fn new(
starting_dashboard: DashboardPattern,
starting_body: BodyPattern,
direction: BlinkerDirection,
time: Instant,
) -> Self {
let mut ending_dashboard = OFF_DASHBOARD;
match direction {
BlinkerDirection::Left => {
ending_dashboard[0].r = LEFT_BLINKER_DASHBOARD[0].r;
ending_dashboard[0].g = LEFT_BLINKER_DASHBOARD[0].g;
ending_dashboard[0].b = LEFT_BLINKER_DASHBOARD[0].b;
}
BlinkerDirection::Right => {
ending_dashboard[2].r = RIGHT_BLINKER_DASHBOARD[2].r;
ending_dashboard[2].g = RIGHT_BLINKER_DASHBOARD[2].g;
ending_dashboard[2].b = RIGHT_BLINKER_DASHBOARD[2].b;
}
}
let mut ending_body = OFF_BODY;
match direction {
BlinkerDirection::Left => {
for i in 0..30 {
ending_body[i].r = LEFT_BLINKER_BODY[i].r;
ending_body[i].g = LEFT_BLINKER_BODY[i].g;
ending_body[i].b = LEFT_BLINKER_BODY[i].b;
}
}
BlinkerDirection::Right => {
for i in 30..60 {
ending_body[i].r = RIGHT_BLINKER_BODY[i].r;
ending_body[i].g = RIGHT_BLINKER_BODY[i].g;
ending_body[i].b = RIGHT_BLINKER_BODY[i].b;
}
}
}
Blinker {
transition: Fade::new(
starting_dashboard,
starting_body,
ending_dashboard,
ending_body,
BLINKER_FRAMES,
time,
),
fade_in: Fade::new(
OFF_DASHBOARD,
OFF_BODY,
ending_dashboard,
ending_body,
BLINKER_FRAMES,
time,
),
fade_out: Fade::new(
ending_dashboard,
ending_body,
OFF_DASHBOARD,
OFF_BODY,
BLINKER_FRAMES,
time,
),
direction: FadeDirection::Transition,
start_time: time,
frames: BLINKER_FRAMES,
}
}
}
impl Animation for Blinker {
fn tick(&mut self, time: Instant) -> (DashboardPattern, BodyPattern) {
let frames = calculate_frames(self.start_time.0, time.0);
if frames > self.frames {
match self.direction {
FadeDirection::Transition => {
self.direction = FadeDirection::FadeOut;
self.fade_out.start_time = time;
}
FadeDirection::FadeIn => {
self.direction = FadeDirection::FadeOut;
self.fade_out.start_time = time;
}
FadeDirection::FadeOut => {
self.direction = FadeDirection::FadeIn;
self.fade_in.start_time = time;
}
}
self.start_time = time;
}
match self.direction {
FadeDirection::Transition => self.transition.tick(time),
FadeDirection::FadeIn => self.fade_in.tick(time),
FadeDirection::FadeOut => self.fade_out.tick(time),
}
}
}
#[derive(Clone, Debug)]
pub enum Event {
Brake,
BrakeRelease,
LeftBlinker,
NextPattern,
PreviousPattern,
RightBlinker,
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum Pattern {
Water,
GayPride,
TransPride,
}
impl Pattern {
fn previous(&self) -> Pattern {
match self {
Pattern::Water => Pattern::TransPride,
Pattern::GayPride => Pattern::Water,
Pattern::TransPride => Pattern::GayPride,
}
}
fn next(&self) -> Pattern {
match self {
Pattern::Water => Pattern::GayPride,
Pattern::GayPride => Pattern::TransPride,
Pattern::TransPride => Pattern::Water,
}
}
fn dashboard(&self) -> DashboardPattern {
match self {
Pattern::Water => WATER_DASHBOARD,
Pattern::GayPride => PRIDE_DASHBOARD,
Pattern::TransPride => TRANS_PRIDE_DASHBOARD,
}
}
fn body(&self) -> BodyPattern {
match self {
Pattern::Water => WATER_BODY,
Pattern::GayPride => PRIDE_BODY,
Pattern::TransPride => TRANS_PRIDE_BODY,
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum State {
Pattern(Pattern),
Brake,
LeftBlinker,
RightBlinker,
BrakeLeftBlinker,
BrakeRightBlinker,
}
pub struct App {
ui: Box<dyn UI>,
state: State,
home_pattern: Pattern,
current_animation: Box<dyn Animation>,
dashboard_lights: DashboardPattern,
lights: BodyPattern,
}
impl App {
pub fn new(ui: Box<dyn UI>) -> Self {
let pattern = Pattern::Water;
Self {
ui,
state: State::Pattern(pattern),
home_pattern: pattern,
current_animation: Box::new(Fade::new(
OFF_DASHBOARD,
OFF_BODY,
pattern.dashboard(),
pattern.body(),
DEFAULT_FRAMES,
Instant(0_u32.into()),
)),
dashboard_lights: OFF_DASHBOARD,
lights: OFF_BODY,
}
}
fn update_animation(&mut self, time: Instant) {
match self.state {
State::Pattern(ref pattern) => {
self.current_animation = Box::new(Fade::new(
self.dashboard_lights,
self.lights,
pattern.dashboard(),
pattern.body(),
DEFAULT_FRAMES,
time,
))
}
State::Brake => {
self.current_animation = Box::new(Fade::new(
self.dashboard_lights,
self.lights,
BRAKES_DASHBOARD,
BRAKES_BODY,
BRAKES_FRAMES,
time,
));
}
State::LeftBlinker => {
self.current_animation = Box::new(Blinker::new(
self.dashboard_lights,
self.lights,
BlinkerDirection::Left,
time,
));
}
State::RightBlinker => {
self.current_animation = Box::new(Blinker::new(
self.dashboard_lights,
self.lights,
BlinkerDirection::Right,
time,
));
}
State::BrakeLeftBlinker => (),
State::BrakeRightBlinker => (),
}
}
fn update_state(&mut self, event: Event) {
match event {
Event::Brake => {
if self.state == State::Brake {
self.state = State::Pattern(self.home_pattern);
} else {
self.state = State::Brake;
}
}
Event::BrakeRelease => self.state = State::Pattern(self.home_pattern),
Event::LeftBlinker => match self.state {
State::Brake => self.state = State::BrakeLeftBlinker,
State::BrakeLeftBlinker => self.state = State::Brake,
State::LeftBlinker => self.state = State::Pattern(self.home_pattern),
_ => self.state = State::LeftBlinker,
},
Event::NextPattern => if let State::Pattern(ref pattern) = self.state {
self.home_pattern = pattern.next();
self.state = State::Pattern(self.home_pattern);
},
Event::PreviousPattern => if let State::Pattern(ref pattern) = self.state {
self.home_pattern = pattern.previous();
self.state = State::Pattern(self.home_pattern);
},
Event::RightBlinker => match self.state {
State::Brake => self.state = State::BrakeRightBlinker,
State::BrakeRightBlinker => self.state = State::Brake,
State::RightBlinker => self.state = State::Pattern(self.home_pattern),
_ => self.state = State::RightBlinker,
},
}
}
pub fn tick(&mut self, time: Instant) {
if let Some(event) = self.ui.check_event(time) {
self.update_state(event);
self.update_animation(time);
};
let (dashboard, lights) = self.current_animation.tick(time);
self.dashboard_lights = dashboard;
self.lights = lights;
self.ui.update_lights(dashboard, lights);
}
}

View File

@ -1,400 +0,0 @@
use crate::{BodyPattern, DashboardPattern, RGB};
use fixed::types::{I8F8, U16F0};
pub const RGB_OFF: RGB<I8F8> = RGB {
r: I8F8::lit("0"),
g: I8F8::lit("0"),
b: I8F8::lit("0"),
};
pub const RGB_WHITE: RGB<I8F8> = RGB {
r: I8F8::lit("1"),
g: I8F8::lit("1"),
b: I8F8::lit("1"),
};
pub const BRAKES_RED: RGB<I8F8> = RGB {
r: I8F8::lit("1"),
g: I8F8::lit("0"),
b: I8F8::lit("0"),
};
pub const BLINKER_AMBER: RGB<I8F8> = RGB {
r: I8F8::lit("1"),
g: I8F8::lit("0.15"),
b: I8F8::lit("0"),
};
pub const PRIDE_RED: RGB<I8F8> = RGB {
r: I8F8::lit("0.95"),
g: I8F8::lit("0.00"),
b: I8F8::lit("0.00"),
};
pub const PRIDE_ORANGE: RGB<I8F8> = RGB {
r: I8F8::lit("1.0"),
g: I8F8::lit("0.25"),
b: I8F8::lit("0"),
};
pub const PRIDE_YELLOW: RGB<I8F8> = RGB {
r: I8F8::lit("1.0"),
g: I8F8::lit("0.85"),
b: I8F8::lit("0"),
};
pub const PRIDE_GREEN: RGB<I8F8> = RGB {
r: I8F8::lit("0"),
g: I8F8::lit("0.95"),
b: I8F8::lit("0.05"),
};
pub const PRIDE_INDIGO: RGB<I8F8> = RGB {
r: I8F8::lit("0.04"),
g: I8F8::lit("0.15"),
b: I8F8::lit("0.55"),
};
pub const PRIDE_VIOLET: RGB<I8F8> = RGB {
r: I8F8::lit("0.75"),
g: I8F8::lit("0.0"),
b: I8F8::lit("0.80"),
};
pub const TRANS_BLUE: RGB<I8F8> = RGB {
r: I8F8::lit("0.06"),
g: I8F8::lit("0.41"),
b: I8F8::lit("0.98"),
};
pub const TRANS_PINK: RGB<I8F8> = RGB {
r: I8F8::lit("0.96"),
g: I8F8::lit("0.16"),
b: I8F8::lit("0.32"),
};
pub const WATER_1: RGB<I8F8> = RGB {
r: I8F8::lit("0.0"),
g: I8F8::lit("0.0"),
b: I8F8::lit("0.75"),
};
pub const WATER_2: RGB<I8F8> = RGB {
r: I8F8::lit("0.8"),
g: I8F8::lit("0.8"),
b: I8F8::lit("0.8"),
};
pub const WATER_3: RGB<I8F8> = RGB {
r: I8F8::lit("0.00"),
g: I8F8::lit("0.75"),
b: I8F8::lit("0.75"),
};
pub const OFF_DASHBOARD: DashboardPattern = [RGB_OFF; 3];
pub const OFF_BODY: BodyPattern = [RGB_OFF; 60];
pub const DEFAULT_FRAMES: U16F0 = U16F0::lit("30");
pub const WATER_DASHBOARD: DashboardPattern = [WATER_1, WATER_2, WATER_3];
pub const WATER_BODY: BodyPattern = [
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_3,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_2,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
WATER_1,
];
pub const PRIDE_DASHBOARD: DashboardPattern = [PRIDE_RED, PRIDE_GREEN, PRIDE_INDIGO];
pub const PRIDE_BODY: BodyPattern = [
// Left Side
// Red
PRIDE_RED,
PRIDE_RED,
PRIDE_RED,
PRIDE_RED,
PRIDE_RED,
// Orange
PRIDE_ORANGE,
PRIDE_ORANGE,
PRIDE_ORANGE,
PRIDE_ORANGE,
PRIDE_ORANGE,
// Yellow
PRIDE_YELLOW,
PRIDE_YELLOW,
PRIDE_YELLOW,
PRIDE_YELLOW,
PRIDE_YELLOW,
// Green
PRIDE_GREEN,
PRIDE_GREEN,
PRIDE_GREEN,
PRIDE_GREEN,
PRIDE_GREEN,
// Indigo
PRIDE_INDIGO,
PRIDE_INDIGO,
PRIDE_INDIGO,
PRIDE_INDIGO,
PRIDE_INDIGO,
// Violet
PRIDE_VIOLET,
PRIDE_VIOLET,
PRIDE_VIOLET,
PRIDE_VIOLET,
PRIDE_VIOLET,
// Right Side
// Violet
PRIDE_VIOLET,
PRIDE_VIOLET,
PRIDE_VIOLET,
PRIDE_VIOLET,
PRIDE_VIOLET,
// Indigo
PRIDE_INDIGO,
PRIDE_INDIGO,
PRIDE_INDIGO,
PRIDE_INDIGO,
PRIDE_INDIGO,
// Green
PRIDE_GREEN,
PRIDE_GREEN,
PRIDE_GREEN,
PRIDE_GREEN,
PRIDE_GREEN,
// Yellow
PRIDE_YELLOW,
PRIDE_YELLOW,
PRIDE_YELLOW,
PRIDE_YELLOW,
PRIDE_YELLOW,
// Orange
PRIDE_ORANGE,
PRIDE_ORANGE,
PRIDE_ORANGE,
PRIDE_ORANGE,
PRIDE_ORANGE,
// Red
PRIDE_RED,
PRIDE_RED,
PRIDE_RED,
PRIDE_RED,
PRIDE_RED,
];
pub const TRANS_PRIDE_DASHBOARD: DashboardPattern = [TRANS_BLUE, RGB_WHITE, TRANS_PINK];
pub const TRANS_PRIDE_BODY: BodyPattern = [
// Left Side
TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_PINK, TRANS_PINK,
TRANS_PINK, TRANS_PINK, TRANS_PINK, TRANS_PINK, RGB_WHITE, RGB_WHITE, RGB_WHITE, RGB_WHITE,
RGB_WHITE, RGB_WHITE, TRANS_PINK, TRANS_PINK, TRANS_PINK, TRANS_PINK, TRANS_PINK, TRANS_PINK,
TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE,
// Right side
TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_PINK, TRANS_PINK,
TRANS_PINK, TRANS_PINK, TRANS_PINK, TRANS_PINK, RGB_WHITE, RGB_WHITE, RGB_WHITE, RGB_WHITE,
RGB_WHITE, RGB_WHITE, TRANS_PINK, TRANS_PINK, TRANS_PINK, TRANS_PINK, TRANS_PINK, TRANS_PINK,
TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE, TRANS_BLUE,
];
pub const BRAKES_FRAMES: U16F0 = U16F0::lit("15");
pub const BRAKES_DASHBOARD: DashboardPattern = [BRAKES_RED; 3];
pub const BRAKES_BODY: BodyPattern = [BRAKES_RED; 60];
pub const BLINKER_FRAMES: U16F0 = U16F0::lit("10");
pub const LEFT_BLINKER_DASHBOARD: DashboardPattern = [BLINKER_AMBER, RGB_OFF, RGB_OFF];
pub const LEFT_BLINKER_BODY: BodyPattern = [
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
];
pub const RIGHT_BLINKER_DASHBOARD: DashboardPattern = [RGB_OFF, RGB_OFF, BLINKER_AMBER];
pub const RIGHT_BLINKER_BODY: BodyPattern = [
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
RGB_OFF,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
BLINKER_AMBER,
];

View File

@ -1,17 +0,0 @@
use core::default::Default;
use fixed::types::I8F8;
#[derive(Clone, Copy, Default, Debug)]
pub struct RGB<T> {
pub r: T,
pub g: T,
pub b: T,
}
const DASHBOARD_LIGHT_COUNT: usize = 3;
pub type DashboardPattern = [RGB<I8F8>; DASHBOARD_LIGHT_COUNT];
const BODY_LIGHT_COUNT: usize = 60;
pub type BodyPattern = [RGB<I8F8>; BODY_LIGHT_COUNT];

View File

@ -1,17 +0,0 @@
[package]
name = "simulator"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
adw = { version = "0.5", package = "libadwaita", features = [ "v1_2" ] }
async-std = "1.13.0"
cairo-rs = { version = "0.18" }
fixed = { version = "1" }
gio = { version = "0.18" }
glib = { version = "0.18" }
gtk = { version = "0.7", package = "gtk4", features = [ "v4_8" ] }
lights-core = { path = "../core" }
pango = { version = "*" }

View File

@ -1,315 +0,0 @@
use adw::prelude::*;
use async_std::channel::Sender;
use fixed::types::{I8F8, U128F0};
use glib::Object;
use gtk::subclass::prelude::*;
use lights_core::{
App, BodyPattern, DashboardPattern, Event, Instant, FPS, OFF_BODY, OFF_DASHBOARD, RGB, UI,
};
use std::{
cell::RefCell,
env,
rc::Rc,
sync::mpsc::{Receiver, TryRecvError},
};
const WIDTH: i32 = 640;
const HEIGHT: i32 = 480;
pub struct Update {
dashboard: DashboardPattern,
lights: BodyPattern,
}
pub struct DashboardLightsPrivate {
lights: Rc<RefCell<DashboardPattern>>,
}
#[glib::object_subclass]
impl ObjectSubclass for DashboardLightsPrivate {
const NAME: &'static str = "DashboardLights";
type Type = DashboardLights;
type ParentType = gtk::DrawingArea;
fn new() -> Self {
Self {
lights: Rc::new(RefCell::new(OFF_DASHBOARD)),
}
}
}
impl ObjectImpl for DashboardLightsPrivate {}
impl WidgetImpl for DashboardLightsPrivate {}
impl DrawingAreaImpl for DashboardLightsPrivate {}
glib::wrapper! {
pub struct DashboardLights(ObjectSubclass<DashboardLightsPrivate>) @extends gtk::DrawingArea, gtk::Widget;
}
impl Default for DashboardLights {
fn default() -> Self {
Self::new()
}
}
impl DashboardLights {
pub fn new() -> Self {
let s: Self = Object::builder().build();
s.set_width_request(WIDTH);
s.set_height_request(100);
s.set_draw_func({
let s = s.clone();
move |_, context, width, _| {
let start = width as f64 / 2. - 150.;
let lights = s.imp().lights.borrow();
for i in 0..3 {
context.set_source_rgb(
lights[i].r.into(),
lights[i].g.into(),
lights[i].b.into(),
);
context.rectangle(start + 100. * i as f64, 10., 80., 80.);
let _ = context.fill();
}
}
});
s
}
pub fn set_lights(&self, lights: DashboardPattern) {
*self.imp().lights.borrow_mut() = lights;
self.queue_draw();
}
}
pub struct BikeLightsPrivate {
lights: Rc<RefCell<BodyPattern>>,
}
#[glib::object_subclass]
impl ObjectSubclass for BikeLightsPrivate {
const NAME: &'static str = "BikeLights";
type Type = BikeLights;
type ParentType = gtk::DrawingArea;
fn new() -> Self {
Self {
lights: Rc::new(RefCell::new(OFF_BODY)),
}
}
}
impl ObjectImpl for BikeLightsPrivate {}
impl WidgetImpl for BikeLightsPrivate {}
impl DrawingAreaImpl for BikeLightsPrivate {}
glib::wrapper! {
pub struct BikeLights(ObjectSubclass<BikeLightsPrivate>) @extends gtk::DrawingArea, gtk::Widget;
}
impl Default for BikeLights {
fn default() -> Self {
Self::new()
}
}
impl BikeLights {
pub fn new() -> Self {
let s: Self = Object::builder().build();
s.set_width_request(WIDTH);
s.set_height_request(640);
let center = WIDTH as f64 / 2.;
s.set_draw_func({
let s = s.clone();
move |_, context, _, _| {
let lights = s.imp().lights.borrow();
for i in 0..30 {
context.set_source_rgb(
lights[i].r.into(),
lights[i].g.into(),
lights[i].b.into(),
);
context.rectangle(center - 45., 5. + 20. * i as f64, 15., 15.);
let _ = context.fill();
}
for i in 0..30 {
context.set_source_rgb(
lights[i + 30].r.into(),
lights[i + 30].g.into(),
lights[i + 30].b.into(),
);
context.rectangle(center + 15., 5. + 20. * (30. - (i + 1) as f64), 15., 15.);
let _ = context.fill();
}
}
});
s
}
pub fn set_lights(&self, lights: [RGB<I8F8>; 60]) {
*self.imp().lights.borrow_mut() = lights;
self.queue_draw();
}
}
struct GTKUI {
tx: Sender<Update>,
rx: Receiver<Event>,
}
impl UI for GTKUI {
fn check_event(&mut self, _: Instant) -> Option<Event> {
match self.rx.try_recv() {
Ok(event) => Some(event),
Err(TryRecvError::Empty) => None,
Err(TryRecvError::Disconnected) => None,
}
}
fn update_lights(&self, dashboard_lights: DashboardPattern, lights: BodyPattern) {
let tx = self.tx.clone();
glib::spawn_future(async move {
let _ = tx
.send(Update {
dashboard: dashboard_lights,
lights,
})
.await;
});
}
}
fn main() {
let adw_app = adw::Application::builder()
.application_id("com.luminescent-dreams.bike-light-simulator")
.build();
adw_app.connect_activate(move |adw_app| {
let (update_tx, update_rx) = async_std::channel::unbounded();
let (event_tx, event_rx) = std::sync::mpsc::channel();
std::thread::spawn(move || {
let mut bike_app = App::new(Box::new(GTKUI {
tx: update_tx,
rx: event_rx,
}));
loop {
bike_app.tick(Instant(U128F0::from(
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis(),
)));
std::thread::sleep(std::time::Duration::from_millis(1000 / (FPS as u64)));
}
});
let window = adw::ApplicationWindow::builder()
.application(adw_app)
.default_width(WIDTH)
.default_height(HEIGHT)
.build();
let layout = gtk::Box::builder()
.orientation(gtk::Orientation::Vertical)
.build();
let controls = gtk::Box::builder()
.orientation(gtk::Orientation::Horizontal)
.build();
let dashboard_lights = DashboardLights::new();
let bike_lights = BikeLights::new();
let left_button = gtk::Button::builder().label("L").build();
let brake_button = gtk::Button::builder().label("Brakes").build();
let right_button = gtk::Button::builder().label("R").build();
left_button.connect_clicked({
let event_tx = event_tx.clone();
move |_| {
let _ = event_tx.send(Event::LeftBlinker);
}
});
brake_button.connect_clicked({
let event_tx = event_tx.clone();
move |_| {
let _ = event_tx.send(Event::Brake);
}
});
right_button.connect_clicked({
let event_tx = event_tx.clone();
move |_| {
let _ = event_tx.send(Event::RightBlinker);
}
});
controls.append(&left_button);
controls.append(&brake_button);
controls.append(&right_button);
layout.append(&controls);
let pattern_controls = gtk::Box::builder()
.orientation(gtk::Orientation::Horizontal)
.build();
let previous_pattern = gtk::Button::builder().label("Previous").build();
let next_pattern = gtk::Button::builder().label("Next").build();
previous_pattern.connect_clicked({
let event_tx = event_tx.clone();
move |_| {
let _ = event_tx.send(Event::PreviousPattern);
}
});
next_pattern.connect_clicked({
let event_tx = event_tx.clone();
move |_| {
let _ = event_tx.send(Event::NextPattern);
}
});
pattern_controls.append(&previous_pattern);
pattern_controls.append(&next_pattern);
layout.append(&pattern_controls);
layout.append(&dashboard_lights);
layout.append(&bike_lights);
glib::spawn_future_local({
let dashboard_lights = dashboard_lights.clone();
let bike_lights = bike_lights.clone();
async move {
while let Ok(Update { dashboard, lights }) = update_rx.recv().await {
dashboard_lights.set_lights(dashboard);
bike_lights.set_lights(lights);
}
}
});
/*
update_rx.attach(None, {
let dashboard_lights = dashboard_lights.clone();
let bike_lights = bike_lights.clone();
move |Update { dashboard, lights }| {
dashboard_lights.set_lights(dashboard);
bike_lights.set_lights(lights);
glib::ControlFlow::Continue
}
});
*/
window.set_content(Some(&layout));
window.present();
});
let args: Vec<String> = env::args().collect();
ApplicationExtManual::run_with_args(&adw_app, &args);
}

View File

@ -3,6 +3,7 @@ name = "changeset"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
license = "GPL-3.0-only" license = "GPL-3.0-only"
license-file = "../COPYING"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

9
changeset/Makefile Normal file
View File

@ -0,0 +1,9 @@
dev:
cargo watch -x build
test:
cargo watch -x test
test-once:
cargo test

View File

@ -26,7 +26,7 @@ pub enum Change<Key: Eq + Hash, Value> {
NewRecord(Value), NewRecord(Value),
} }
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug)]
pub struct Changeset<Key: Clone + Eq + Hash, Value> { pub struct Changeset<Key: Clone + Eq + Hash, Value> {
delete: HashSet<Key>, delete: HashSet<Key>,
update: HashMap<Key, Value>, update: HashMap<Key, Value>,
@ -34,6 +34,14 @@ pub struct Changeset<Key: Clone + Eq + Hash, Value> {
} }
impl<Key: Clone + Constructable + Eq + Hash, Value> Changeset<Key, Value> { impl<Key: Clone + Constructable + Eq + Hash, Value> Changeset<Key, Value> {
pub fn new() -> Self {
Self {
delete: HashSet::new(),
update: HashMap::new(),
new: HashMap::new(),
}
}
pub fn add(&mut self, r: Value) -> Key { pub fn add(&mut self, r: Value) -> Key {
let k = Key::new(); let k = Key::new();
self.new.insert(k.clone(), r); self.new.insert(k.clone(), r);
@ -82,7 +90,7 @@ impl<Key: Clone + Eq + Hash, Value> From<Changeset<Key, Value>> for Vec<Change<K
.into_iter() .into_iter()
.map(|(k, v)| Change::UpdateRecord((k, v))), .map(|(k, v)| Change::UpdateRecord((k, v))),
) )
.chain(new.into_values().map(|v| Change::NewRecord(v))) .chain(new.into_iter().map(|(_, v)| Change::NewRecord(v)))
.collect() .collect()
} }
} }
@ -92,7 +100,7 @@ mod tests {
use super::*; use super::*;
use uuid::Uuid; use uuid::Uuid;
#[derive(Clone, PartialEq, Eq, Hash, Default)] #[derive(Clone, PartialEq, Eq, Hash)]
struct Id(Uuid); struct Id(Uuid);
impl Constructable for Id { impl Constructable for Id {
fn new() -> Self { fn new() -> Self {
@ -102,7 +110,7 @@ mod tests {
#[test] #[test]
fn it_generates_a_new_record() { fn it_generates_a_new_record() {
let mut set: Changeset<Id, String> = Changeset::default(); let mut set: Changeset<Id, String> = Changeset::new();
set.add("efgh".to_string()); set.add("efgh".to_string());
let changes = Vec::from(set.clone()); let changes = Vec::from(set.clone());
assert_eq!(changes.len(), 1); assert_eq!(changes.len(), 1);
@ -117,7 +125,7 @@ mod tests {
#[test] #[test]
fn it_generates_a_delete_record() { fn it_generates_a_delete_record() {
let mut set: Changeset<Id, String> = Changeset::default(); let mut set: Changeset<Id, String> = Changeset::new();
let id1 = Id::new(); let id1 = Id::new();
set.delete(id1.clone()); set.delete(id1.clone());
let changes = Vec::from(set.clone()); let changes = Vec::from(set.clone());
@ -134,7 +142,7 @@ mod tests {
#[test] #[test]
fn update_unrelated_records() { fn update_unrelated_records() {
let mut set: Changeset<Id, String> = Changeset::default(); let mut set: Changeset<Id, String> = Changeset::new();
let id1 = Id::new(); let id1 = Id::new();
let id2 = Id::new(); let id2 = Id::new();
set.update(id1.clone(), "abcd".to_owned()); set.update(id1.clone(), "abcd".to_owned());
@ -147,7 +155,7 @@ mod tests {
#[test] #[test]
fn delete_cancels_new() { fn delete_cancels_new() {
let mut set: Changeset<Id, String> = Changeset::default(); let mut set: Changeset<Id, String> = Changeset::new();
let key = set.add("efgh".to_string()); let key = set.add("efgh".to_string());
set.delete(key); set.delete(key);
let changes = Vec::from(set); let changes = Vec::from(set);
@ -156,7 +164,7 @@ mod tests {
#[test] #[test]
fn delete_cancels_update() { fn delete_cancels_update() {
let mut set: Changeset<Id, String> = Changeset::default(); let mut set: Changeset<Id, String> = Changeset::new();
let id = Id::new(); let id = Id::new();
set.update(id.clone(), "efgh".to_owned()); set.update(id.clone(), "efgh".to_owned());
set.delete(id.clone()); set.delete(id.clone());
@ -167,7 +175,7 @@ mod tests {
#[test] #[test]
fn update_atop_new_is_new() { fn update_atop_new_is_new() {
let mut set: Changeset<Id, String> = Changeset::default(); let mut set: Changeset<Id, String> = Changeset::new();
let key = set.add("efgh".to_owned()); let key = set.add("efgh".to_owned());
set.update(key, "wxyz".to_owned()); set.update(key, "wxyz".to_owned());
let changes = Vec::from(set); let changes = Vec::from(set);
@ -177,7 +185,7 @@ mod tests {
#[test] #[test]
fn updates_get_squashed() { fn updates_get_squashed() {
let mut set: Changeset<Id, String> = Changeset::default(); let mut set: Changeset<Id, String> = Changeset::new();
let id1 = Id::new(); let id1 = Id::new();
let id2 = Id::new(); let id2 = Id::new();
set.update(id1.clone(), "efgh".to_owned()); set.update(id1.clone(), "efgh".to_owned());

View File

@ -1,14 +0,0 @@
[package]
name = "config-derive"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
proc-macro = true
[dependencies]
quote = { version = "1" }
syn = { version = "1", features = [ "extra-traits" ] }

View File

@ -1,23 +0,0 @@
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, DeriveInput};
#[proc_macro_derive(ConfigOption)]
pub fn derive(input: TokenStream) -> TokenStream {
let DeriveInput { ident, .. } = parse_macro_input!(input as DeriveInput);
let result = quote! {
impl From<&Config> for Option<#ident> {
fn from(config: &Config) -> Self {
match config.values.get(&ConfigName::#ident) {
Some(ConfigOption::#ident(val)) => Some(val.clone()),
_ => None,
}
}
}
};
result.into()
}

View File

@ -1,16 +0,0 @@
[package]
name = "config"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
config-derive = { path = "../config-derive" }
serde_json = { version = "1" }
serde = { version = "1", features = [ "derive" ] }
thiserror = { version = "1" }
[dev-dependencies]
cool_asserts = { version = "2" }

View File

@ -1,166 +0,0 @@
/*
use std::{
collections::HashMap,
fs::File,
hash::Hash,
io::{ErrorKind, Read},
path::PathBuf,
};
*/
pub use config_derive::ConfigOption;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum ConfigReadError {
#[error("Cannot read the configuration file: {0}")]
CannotRead(std::io::Error),
#[error("Cannot open the configuration file for reading: {0}")]
CannotOpen(std::io::Error),
#[error("Invalid json data found in the configurationfile: {0}")]
InvalidJSON(serde_json::Error),
}
#[macro_export]
macro_rules! define_config {
($($name:ident($struct:ident),)+) => (
#[derive(Clone, Debug, Hash, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub enum ConfigName {
$($name),+
}
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
#[serde(untagged)]
pub enum ConfigOption {
$($name($struct)),+
}
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub struct Config {
values: std::collections::HashMap<ConfigName, ConfigOption>,
}
impl Default for Config {
fn default() -> Self {
Self::new()
}
}
impl Config {
pub fn new() -> Self {
Self {
values: std::collections::HashMap::new(),
}
}
pub fn from_path(config_path: std::path::PathBuf) -> Result<Self, $crate::ConfigReadError> {
let mut settings = config_path.clone();
settings.push("config");
match std::fs::File::open(settings) {
Ok(mut file) => {
let mut buf = String::new();
std::io::Read::read_to_string(&mut file, &mut buf)
.map_err(|err| $crate::ConfigReadError::CannotRead(err))?;
let values = serde_json::from_str(buf.as_ref())
.map_err(|err| $crate::ConfigReadError::InvalidJSON(err))?;
Ok(Self {
values,
})
}
Err(io_err) => {
match io_err.kind() {
std::io::ErrorKind::NotFound => {
/* create the path and an empty file */
Ok(Self {
values: std::collections::HashMap::new(),
})
}
_ => Err($crate::ConfigReadError::CannotOpen(io_err)),
}
}
}
}
pub fn set(&mut self, val: ConfigOption) {
let _ = match val {
$(ConfigOption::$struct(_) => self.values.insert(ConfigName::$name, val)),+
};
}
pub fn get<'a, T>(&'a self) -> Option<T>
where
Option<T>: From<&'a Self>,
{
self.into()
}
}
)
}
#[cfg(test)]
mod test {
use super::*;
use cool_asserts::assert_matches;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
define_config! {
DatabasePath(DatabasePath),
Me(Me),
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, ConfigOption)]
pub struct DatabasePath(PathBuf);
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
enum Rank {
Kyu(i8),
Dan(i8),
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, ConfigOption)]
pub struct Me {
name: String,
rank: Option<Rank>,
}
#[test]
fn it_can_set_and_get_options() {
let mut config: Config = Config::new();
config.set(ConfigOption::DatabasePath(DatabasePath(PathBuf::from(
"./fixtures/five_games",
))));
assert_eq!(
Some(DatabasePath(PathBuf::from("./fixtures/five_games"))),
config.get()
);
}
#[test]
fn it_can_serialize_and_deserialize() {
let mut config = Config::new();
config.set(ConfigOption::DatabasePath(DatabasePath(PathBuf::from(
"fixtures/five_games",
))));
config.set(ConfigOption::Me(Me {
name: "Savanni".to_owned(),
rank: Some(Rank::Kyu(10)),
}));
let s = serde_json::to_string(&config.values).unwrap();
println!("{}", s);
let values: HashMap<ConfigName, ConfigOption> = serde_json::from_str(s.as_ref()).unwrap();
println!("options: {:?}", values);
assert_matches!(values.get(&ConfigName::DatabasePath),
Some(ConfigOption::DatabasePath(ref db_path)) =>
assert_eq!(Some(db_path.clone()), config.get())
);
assert_matches!(values.get(&ConfigName::Me), Some(ConfigOption::Me(val)) =>
assert_eq!(Some(val.clone()), config.get())
);
}
}

View File

@ -3,6 +3,7 @@ name = "coordinates"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
license = "GPL-3.0-only" license = "GPL-3.0-only"
license-file = "../COPYING"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

9
coordinates/Makefile Normal file
View File

@ -0,0 +1,9 @@
dev:
cargo watch -x build
test:
cargo watch -x test
test-once:
cargo test

View File

@ -33,12 +33,12 @@ fn main() {
let filename = args let filename = args
.next() .next()
.map(PathBuf::from) .map(|p| PathBuf::from(p))
.expect("A filename is required"); .expect("A filename is required");
let size = args let size = args
.next() .next()
.and_then(|s| s.parse::<usize>().ok()) .and_then(|s| s.parse::<usize>().ok())
.unwrap_or(3); .unwrap_or(3);
let map: hex_map::Map<MapVal> = hex_map::Map::new_hexagonal(size); let map: hex_map::Map<MapVal> = hex_map::Map::new_hexagonal(size);
hex_map::write_file(filename, map).expect("to write file"); hex_map::write_file(filename, map);
} }

View File

@ -10,9 +10,10 @@ Luminescent Dreams Tools is distributed in the hope that it will be useful, but
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>. You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
*/ */
/// This module contains the elements of cube coordinates. /// Ĉi-tiu modulo enhavas la elementojn por kub-koordinato.
/// ///
/// This code is based on https://www.redblobgames.com/grids/hexagons/ /// This code is based on https://www.redblobgames.com/grids/hexagons/
use crate::Error;
use std::collections::HashSet; use std::collections::HashSet;
/// An address within the hex coordinate system /// An address within the hex coordinate system
@ -61,7 +62,7 @@ impl AxialAddr {
pub fn is_adjacent(&self, dest: &AxialAddr) -> bool { pub fn is_adjacent(&self, dest: &AxialAddr) -> bool {
dest.adjacencies() dest.adjacencies()
.collect::<Vec<AxialAddr>>() .collect::<Vec<AxialAddr>>()
.contains(self) .contains(&self)
} }
/// Measure the distance to a destination /// Measure the distance to a destination
@ -78,7 +79,7 @@ impl AxialAddr {
positions.push(item); positions.push(item);
while !positions.is_empty() { while positions.len() > 0 {
let elem = positions.remove(0); let elem = positions.remove(0);
for adj in elem.adjacencies() { for adj in elem.adjacencies() {
if self.distance(&adj) <= distance && !results.contains(&adj) { if self.distance(&adj) <= distance && !results.contains(&adj) {
@ -145,7 +146,7 @@ mod tests {
let coord2 = &lst1[idx]; let coord2 = &lst1[idx];
assert!(coord2.is_adjacent(&coord1)); assert!(coord2.is_adjacent(&coord1));
assert!(coord1.is_adjacent(coord2)); assert!(coord1.is_adjacent(&coord2));
} }
#[test] #[test]
@ -166,10 +167,10 @@ mod tests {
let hexaddr = AxialAddr::new(q, r); let hexaddr = AxialAddr::new(q, r);
let en_distancaj_hexaddr: Vec<AxialAddr> = hexaddr.addresses(distance).collect(); let en_distancaj_hexaddr: Vec<AxialAddr> = hexaddr.addresses(distance).collect();
let expected_cnt = (0..distance+1).map(|v| v * 6).fold(1, |acc, val| acc + val); let expected_cnt = ((0..distance+1).map(|v| v * 6).fold(1, |acc, val| acc + val)) as usize;
assert_eq!(en_distancaj_hexaddr.len(), expected_cnt); assert_eq!(en_distancaj_hexaddr.len(), expected_cnt);
for c in en_distancaj_hexaddr { for c in en_distancaj_hexaddr {
assert!(c.distance(&hexaddr) <= distance); assert!(c.distance(&hexaddr) <= distance as usize);
} }
} }
} }

View File

@ -14,6 +14,7 @@ use crate::{hex::AxialAddr, Error};
use nom::{ use nom::{
bytes::complete::tag, bytes::complete::tag,
character::complete::alphanumeric1, character::complete::alphanumeric1,
error::ParseError,
multi::many1, multi::many1,
sequence::{delimited, separated_pair}, sequence::{delimited, separated_pair},
Finish, IResult, Parser, Finish, IResult, Parser,
@ -80,7 +81,7 @@ pub fn parse_data<'a, A: Default + From<String>>(
} }
let cells = data let cells = data
.map(|line| parse_line::<A>(line).unwrap()) .map(|line| parse_line::<A>(&line).unwrap())
.collect::<Vec<(AxialAddr, A)>>(); .collect::<Vec<(AxialAddr, A)>>();
let cells = cells.into_iter().collect::<HashMap<AxialAddr, A>>(); let cells = cells.into_iter().collect::<HashMap<AxialAddr, A>>();
Map { cells } Map { cells }

View File

@ -9,9 +9,9 @@ Lumeto is distributed in the hope that it will be useful, but WITHOUT ANY WARRAN
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>. You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
*/ */
use thiserror::Error; use thiserror;
#[derive(Debug, Error)] #[derive(Debug, thiserror::Error)]
pub enum Error { pub enum Error {
#[error("IO error on reading or writing: {0}")] #[error("IO error on reading or writing: {0}")]
IO(std::io::Error), IO(std::io::Error),

View File

@ -1,4 +0,0 @@
{
"git+https://github.com/yewstack/yew/#yew-macro@0.21.0": "1g47mpyzd2mib73cjrbmcivrp7kr16f6hbrmpaap56kbc518khwf",
"git+https://github.com/yewstack/yew/#yew@0.21.0": "1g47mpyzd2mib73cjrbmcivrp7kr16f6hbrmpaap56kbc518khwf"
}

View File

@ -1,16 +0,0 @@
[package]
name = "cyber-slides"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
async-std = "1.13.0"
cairo-rs = "0.18"
cyberpunk = { path = "../cyberpunk" }
gio = "0.18"
glib = "0.18"
gtk = { version = "0.7", package = "gtk4" }
serde = { version = "1.0.210", features = ["derive"] }
serde_yml = "0.0.12"

View File

@ -1,410 +0,0 @@
use std::{
cell::RefCell,
collections::HashMap,
fs::File,
io::Read,
ops::Index,
path::Path,
rc::Rc,
sync::{Arc, RwLock},
time::{Duration, Instant},
};
use cairo::{Context, Rectangle};
use cyberpunk::{AsymLine, AsymLineCutout, GlowPen, Pen, Text};
use glib::Object;
use gtk::{
glib::{self},
prelude::*,
subclass::prelude::*,
EventControllerKey,
};
use serde::{Deserialize, Serialize};
const FPS: u64 = 60;
const PURPLE: (f64, f64, f64) = (0.7, 0., 1.);
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(rename_all = "lowercase")]
enum Position {
Top,
Middle,
Bottom,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
struct Step {
text: String,
position: Position,
transition: Duration,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Default)]
struct Script(Vec<Step>);
impl Script {
fn from_file(path: &Path) -> Result<Script, serde_yml::Error> {
let mut buf: Vec<u8> = Vec::new();
let mut f = File::open(path).unwrap();
f.read_to_end(&mut buf).unwrap();
let script = serde_yml::from_slice(&buf)?;
Ok(Self(script))
}
fn iter(&self) -> impl Iterator<Item = &'_ Step> {
self.0.iter()
}
fn len(&self) -> usize {
self.0.len()
}
}
impl Index<usize> for Script {
type Output = Step;
fn index(&self, index: usize) -> &Self::Output {
&self.0[index]
}
}
struct Fade {
text: String,
position: Position,
duration: Duration,
start_time: Instant,
}
trait Animation {
fn position(&self) -> Position;
fn tick(&self, now: Instant, context: &Context, width: f64);
}
impl Animation for Fade {
fn position(&self) -> Position {
self.position.clone()
}
fn tick(&self, now: Instant, context: &Context, width: f64) {
let total_frames = self.duration.as_secs() * FPS;
let alpha_rate: f64 = 1. / total_frames as f64;
let frames = (now - self.start_time).as_secs_f64() * FPS as f64;
let alpha = alpha_rate * frames;
let text_display = Text::new(self.text.clone(), context, 64., width);
context.move_to(0., text_display.extents().height());
context.set_source_rgba(PURPLE.0, PURPLE.1, PURPLE.2, alpha);
text_display.draw();
}
}
struct CrossFade {
old_text: String,
new_text: String,
position: Position,
duration: Duration,
start_time: Instant,
}
impl Animation for CrossFade {
fn position(&self) -> Position {
self.position.clone()
}
fn tick(&self, now: Instant, context: &Context, width: f64) {
let total_frames = self.duration.as_secs() * FPS;
let alpha_rate: f64 = 1. / total_frames as f64;
let frames = (now - self.start_time).as_secs_f64() * FPS as f64;
let alpha = alpha_rate * frames;
let text_display = Text::new(self.old_text.clone(), context, 64., width);
context.move_to(0., text_display.extents().height());
context.set_source_rgba(PURPLE.0, PURPLE.1, PURPLE.2, 1. - alpha);
text_display.draw();
let text_display = Text::new(self.new_text.clone(), context, 64., width);
context.move_to(0., text_display.extents().height());
context.set_source_rgba(PURPLE.0, PURPLE.1, PURPLE.2, alpha);
text_display.draw();
}
}
#[derive(Debug)]
pub struct CyberScreenState {
script: Script,
idx: Option<usize>,
top: Option<Step>,
middle: Option<Step>,
bottom: Option<Step>,
}
impl Default for CyberScreenState {
fn default() -> Self {
Self {
script: Script(vec![]),
idx: None,
top: None,
middle: None,
bottom: None,
}
}
}
impl CyberScreenState {
fn new(script: Script) -> CyberScreenState {
CyberScreenState { script, ..Default::default() }
}
fn next_page(&mut self) -> Box<dyn Animation> {
let idx = match self.idx {
None => 0,
Some(idx) => {
if idx < self.script.len() {
idx + 1
} else {
idx
}
}
};
self.idx = Some(idx);
let step = self.script[idx].clone();
let (old, new) = match step.position {
Position::Top => {
let old = self.top.replace(step.clone());
(old, step)
}
Position::Middle => {
let old = self.middle.replace(step.clone());
(old, step)
}
Position::Bottom => {
let old = self.bottom.replace(step.clone());
(old, step)
}
};
match old {
Some(old) => Box::new(CrossFade {
old_text: old.text.clone(),
new_text: new.text.clone(),
position: new.position,
duration: new.transition,
start_time: Instant::now(),
}),
None => Box::new(Fade {
text: new.text.clone(),
position: new.position,
duration: new.transition,
start_time: Instant::now(),
}),
}
}
}
#[derive(Default)]
pub struct CyberScreenPrivate {
state: Rc<RefCell<CyberScreenState>>,
// For crossfading to work, I have to detect that there is an old animation in a position, and
// replace it with the new one.
animations: Rc<RefCell<HashMap<Position, Box<dyn Animation>>>>,
}
#[glib::object_subclass]
impl ObjectSubclass for CyberScreenPrivate {
const NAME: &'static str = "CyberScreen";
type Type = CyberScreen;
type ParentType = gtk::DrawingArea;
}
impl ObjectImpl for CyberScreenPrivate {}
impl WidgetImpl for CyberScreenPrivate {}
impl DrawingAreaImpl for CyberScreenPrivate {}
impl CyberScreenPrivate {
fn set_script(&self, script: Script) {
*self.state.borrow_mut() = CyberScreenState::new(script);
}
fn next_page(&self) {
let transition = self.state.borrow_mut().next_page();
self.animations
.borrow_mut()
.insert(transition.position(), transition);
}
}
glib::wrapper! {
pub struct CyberScreen(ObjectSubclass<CyberScreenPrivate>) @extends gtk::DrawingArea, gtk::Widget;
}
impl CyberScreen {
fn new(script: Script) -> Self {
let s: Self = Object::builder().build();
s.imp().set_script(script);
s.set_draw_func({
let s = s.clone();
move |_, context, width, height| {
let now = Instant::now();
context.set_source_rgb(0., 0., 0.);
let _ = context.paint();
let pen = GlowPen::new(width, height, 2., 8., (0.7, 0., 1.));
AsymLineCutout {
orientation: gtk::Orientation::Horizontal,
start_x: 25.,
start_y: height as f64 / 7.,
start_length: width as f64 / 3.,
cutout_length: width as f64 / 3. - 100.,
height: 50.,
end_length: width as f64 / 3. - 50.,
invert: false,
}
.draw(&pen);
pen.stroke();
AsymLine {
orientation: gtk::Orientation::Horizontal,
start_x: width as f64 / 4.,
start_y: height as f64 * 6. / 7.,
start_length: width as f64 * 2. / 3. - 25.,
height: 50.,
end_length: 0.,
invert: false,
}
.draw(&pen);
pen.stroke();
let tracery = pen.finish();
let _ = context.set_source(tracery);
let _ = context.paint();
let animations = s.imp().animations.borrow_mut();
let lr_margin = 50.;
let max_width = width as f64 - lr_margin * 2.;
let region_height = height as f64 / 5.;
if let Some(animation) = animations.get(&Position::Top) {
let y = height as f64 * 1. / 5.;
let surface = context
.target()
.create_for_rectangle(Rectangle::new(20., y, max_width, region_height))
.unwrap();
let ctx = Context::new(&surface).unwrap();
animation.tick(now, &ctx, max_width);
}
if let Some(animation) = animations.get(&Position::Middle) {
let y = height as f64 * 2. / 5.;
let surface = context
.target()
.create_for_rectangle(Rectangle::new(20., y, max_width, region_height))
.unwrap();
let ctx = Context::new(&surface).unwrap();
animation.tick(now, &ctx, max_width);
}
if let Some(animation) = animations.get(&Position::Bottom) {
let y = height as f64 * 3. / 5.;
let surface = context
.target()
.create_for_rectangle(Rectangle::new(20., y, max_width, region_height))
.unwrap();
let ctx = Context::new(&surface).unwrap();
animation.tick(now, &ctx, max_width);
}
}
});
s
}
fn next_page(&self) {
self.imp().next_page();
self.queue_draw();
}
}
fn main() {
let script = Arc::new(RwLock::new(Script::default()));
let app = gtk::Application::builder()
.application_id("com.luminescent-dreams.cyberpunk-slideshow")
.build();
app.add_main_option(
"script",
glib::char::Char::from(b's'),
glib::OptionFlags::IN_MAIN,
glib::OptionArg::String,
"",
None,
);
app.connect_handle_local_options({
let script = script.clone();
move |_, options| {
if let Some(script_path) = options.lookup::<String>("script").unwrap() {
let mut script = script.write().unwrap();
*script = Script::from_file(Path::new(&script_path)).unwrap();
-1
} else {
1
}
}
});
app.connect_activate(move |app| {
let window = gtk::ApplicationWindow::new(app);
let screen = CyberScreen::new(script.read().unwrap().clone());
let events = EventControllerKey::new();
events.connect_key_released({
let app = app.clone();
let window = window.clone();
let screen = screen.clone();
move |_, key, _, _| {
let name = key
.name()
.map(|s| s.as_str().to_owned())
.unwrap_or("".to_owned());
match name.as_ref() {
"Right" => screen.next_page(),
"q" => app.quit(),
"Escape" => window.unfullscreen(),
_ => {}
}
}
});
window.add_controller(events);
window.set_child(Some(&screen));
window.set_width_request(800);
window.set_height_request(600);
window.present();
window.connect_maximized_notify(|window| {
window.fullscreen();
});
let _ = glib::spawn_future_local({
let screen = screen.clone();
async move {
loop {
screen.queue_draw();
async_std::task::sleep(Duration::from_millis(1000 / FPS)).await;
}
}
});
});
app.run();
}

View File

@ -1,15 +0,0 @@
[package]
name = "cyberpunk-splash"
version = "0.1.0"
edition = "2021"
license = "GPL-3.0-only"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
async-std = { workspace = true }
cairo-rs = { workspace = true }
cyberpunk = { path = "../cyberpunk" }
gio = { workspace = true }
glib = { workspace = true }
gtk = { workspace = true }

View File

@ -1,556 +0,0 @@
use cairo::{
Context, FontSlant, FontWeight, Format, ImageSurface, LinearGradient, Pattern,
TextExtents,
};
use cyberpunk::{AsymLine, AsymLineCutout, GlowPen, Pen, SlashMeter};
use glib::Object;
use gtk::{prelude::*, subclass::prelude::*, EventControllerKey};
use std::{
cell::RefCell,
rc::Rc,
sync::{Arc, RwLock},
time::{Duration, Instant},
};
const WIDTH: i32 = 1600;
const HEIGHT: i32 = 600;
#[derive(Clone, Copy, Debug)]
pub enum State {
Running {
last_update: Instant,
deadline: Instant,
timeout: Option<TimeoutAnimation>,
},
Paused {
time_remaining: Duration,
timeout: Option<TimeoutAnimation>,
},
}
impl State {
fn new(countdown: Duration) -> Self {
Self::Paused {
time_remaining: countdown,
timeout: None,
}
}
fn start(&mut self) {
if let Self::Paused {
time_remaining,
timeout,
} = self
{
*self = Self::Running {
last_update: Instant::now(),
deadline: Instant::now() + *time_remaining,
timeout: *timeout,
};
}
}
fn pause(&mut self) {
if let Self::Running {
deadline, timeout, ..
} = self
{
*self = Self::Paused {
time_remaining: *deadline - Instant::now(),
timeout: *timeout,
}
}
}
fn start_pause(&mut self) {
match self {
Self::Running { .. } => self.pause(),
Self::Paused { .. } => self.start(),
}
}
fn run(&mut self, now: Instant) {
if let Self::Running {
last_update,
deadline,
timeout,
} = self
{
*last_update = now;
if let Some(ref mut timeout) = timeout {
// TODO: figure out the actual number of frames
timeout.tick(1);
}
if *last_update > *deadline && timeout.is_none() {
*timeout = Some(TimeoutAnimation {
intensity: 1.,
duration: 1.,
ascending: false,
});
}
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct TimeoutAnimation {
intensity: f64,
duration: f64,
ascending: bool,
}
impl TimeoutAnimation {
fn tick(&mut self, frames_elapsed: u8) {
let step_size = 1. / (self.duration * 60.);
if self.ascending {
self.intensity += step_size * frames_elapsed as f64;
if self.intensity > 1. {
self.intensity = 1.0;
self.ascending = false;
}
} else {
self.intensity -= step_size * frames_elapsed as f64;
if self.intensity < 0. {
self.intensity = 0.0;
self.ascending = true;
}
}
}
}
pub struct SplashPrivate {
text: Rc<RefCell<String>>,
background: Rc<RefCell<Pattern>>,
time_extents: Rc<RefCell<Option<TextExtents>>>,
width: Rc<RefCell<i32>>,
height: Rc<RefCell<i32>>,
state: Rc<RefCell<State>>,
}
impl SplashPrivate {
fn set_text(&self, text: String) {
*self.text.borrow_mut() = text;
}
fn set_state(&self, state: State) {
*self.state.borrow_mut() = state;
}
fn redraw_background(&self) {
let pen = GlowPen::new(
*self.width.borrow(),
*self.height.borrow(),
2.,
8.,
(0.7, 0., 1.),
);
let background =
ImageSurface::create(Format::Rgb24, *self.width.borrow(), *self.height.borrow())
.unwrap();
let context = Context::new(background).unwrap();
context.push_group();
context.set_source_rgb(0., 0., 0.);
let _ = context.paint();
context.select_font_face("Alegreya Sans SC", FontSlant::Normal, FontWeight::Bold);
{
context.set_source_rgb(0.7, 0., 1.);
let hashtag = "#CodingTogether";
context.set_font_size(64.);
let extents = context.text_extents(hashtag).unwrap();
context.move_to(20., extents.height() + 40.);
let _ = context.show_text(hashtag);
AsymLine {
orientation: gtk::Orientation::Horizontal,
start_x: 10.,
start_y: extents.height() + 10.,
start_length: 0.,
height: extents.height() / 2.,
end_length: 0.,
invert: false,
}
.draw(&pen);
pen.stroke();
AsymLine {
orientation: gtk::Orientation::Horizontal,
start_x: 20.,
start_y: extents.height() + 60.,
start_length: extents.width(),
height: extents.height() / 2.,
end_length: 0.,
invert: false,
}
.draw(&pen);
pen.stroke();
}
{
context.set_font_size(128.);
let center_x = *self.width.borrow() as f64 / 2.;
let center_y = *self.height.borrow() as f64 / 2.;
let title_extents = context.text_extents(&self.text.borrow()).unwrap();
let title_width = title_extents.width();
let title_height = title_extents.height();
{
let start_length = center_x - title_width / 2. - title_height - 20.;
let title_cutout = AsymLineCutout {
orientation: gtk::Orientation::Horizontal,
start_x: 20.,
start_y: center_y - 20. - title_height / 2.,
start_length,
end_length: *self.width.borrow() as f64 - 120. - start_length,
cutout_length: title_width,
height: title_height,
invert: false,
};
title_cutout.draw(&pen);
pen.stroke();
}
{
let title_baseline_x = center_x - title_width / 2.;
let title_baseline_y = center_y - 20.;
let gradient = LinearGradient::new(
title_baseline_x,
title_baseline_y - title_height,
title_baseline_x,
title_baseline_y,
);
gradient.add_color_stop_rgb(0.2, 0.7, 0.0, 1.0);
gradient.add_color_stop_rgb(0.8, 0.2, 0.0, 1.0);
context.move_to(title_baseline_x, title_baseline_y);
let _ = context.set_source(gradient);
let _ = context.show_text(&self.text.borrow());
}
}
{
AsymLine {
orientation: gtk::Orientation::Horizontal,
start_x: 100.,
start_y: *self.height.borrow() as f64 / 2. + 100.,
start_length: 400.,
height: 50.,
end_length: 0.,
invert: true,
}
.draw(&pen);
pen.stroke();
}
{
context.set_source_rgb(0.7, 0., 1.);
AsymLine {
orientation: gtk::Orientation::Horizontal,
start_x: *self.width.borrow() as f64 / 2. + 100.,
start_y: *self.height.borrow() as f64 / 2. + 200.,
start_length: 600.,
height: 50.,
end_length: 0.,
invert: false,
}
.draw(&pen);
pen.stroke();
}
let tracery = pen.finish();
let _ = context.set_source(tracery);
let _ = context.paint();
let background = context.pop_group().unwrap();
*self.background.borrow_mut() = background;
}
}
#[glib::object_subclass]
impl ObjectSubclass for SplashPrivate {
const NAME: &'static str = "Splash";
type Type = Splash;
type ParentType = gtk::DrawingArea;
fn new() -> SplashPrivate {
// Set up a default plain black background
let background = ImageSurface::create(Format::Rgb24, WIDTH, HEIGHT).unwrap();
let context = Context::new(background).unwrap();
context.push_group();
context.set_source_rgb(0., 0., 0.);
let _ = context.paint();
let background = context.pop_group().unwrap();
SplashPrivate {
text: Rc::new(RefCell::new(String::from(""))),
background: Rc::new(RefCell::new(background)),
time_extents: Rc::new(RefCell::new(None)),
width: Rc::new(RefCell::new(WIDTH)),
height: Rc::new(RefCell::new(HEIGHT)),
state: Rc::new(RefCell::new(State::new(Duration::ZERO))),
}
}
}
impl ObjectImpl for SplashPrivate {}
impl WidgetImpl for SplashPrivate {}
impl DrawingAreaImpl for SplashPrivate {}
glib::wrapper! {
pub struct Splash(ObjectSubclass<SplashPrivate>) @extends gtk::DrawingArea, gtk::Widget;
}
impl Splash {
pub fn new(text: String, state: State) -> Self {
let s: Self = Object::builder().build();
s.set_width_request(WIDTH);
s.set_height_request(HEIGHT);
s.imp().set_text(text);
s.imp().set_state(state);
s.imp().redraw_background();
s.set_draw_func({
let s = s.clone();
move |_, context, width, height| {
let background = s.imp().background.borrow();
let _ = context.set_source(&*background);
let _ = context.paint();
let state = *s.imp().state.borrow();
let time = match state {
State::Running { deadline, .. } => deadline - Instant::now(),
State::Paused { time_remaining, .. } => time_remaining,
};
let minutes = time.as_secs() / 60;
let seconds = time.as_secs() % 60;
let center_x = width as f64 / 2.;
let center_y = height as f64 / 2.;
{
context.select_font_face(
"Alegreya Sans SC",
FontSlant::Normal,
FontWeight::Bold,
);
context.set_font_size(128.);
let time = format!("{:02}' {:02}\"", minutes, seconds);
let time_extents = context.text_extents(&time).unwrap();
let mut saved_extents = s.imp().time_extents.borrow_mut();
if saved_extents.is_none() {
*saved_extents = Some(time_extents);
}
let time_baseline_x = center_x - time_extents.width() / 2.;
let time_baseline_y = center_y + 100.;
let gradient = LinearGradient::new(
time_baseline_x,
time_baseline_y - time_extents.height(),
time_baseline_x,
time_baseline_y,
);
let (running, timeout_animation) = match state {
State::Running { timeout, .. } => (true, timeout),
State::Paused { timeout, .. } => (false, timeout),
};
match timeout_animation {
Some(ref animation) => {
gradient.add_color_stop_rgba(0.2, 0.2, 0.0, 1.0, animation.intensity);
gradient.add_color_stop_rgba(0.8, 0.7, 0.0, 1.0, animation.intensity);
let _ = context.set_source(gradient);
}
None => {
if running {
gradient.add_color_stop_rgb(0.2, 0.2, 0.0, 1.0);
gradient.add_color_stop_rgb(0.8, 0.7, 0.0, 1.0);
let _ = context.set_source(gradient);
} else {
context.set_source_rgb(0.3, 0.3, 0.3);
}
}
}
context.move_to(time_baseline_x, time_baseline_y);
let _ = context.show_text(&time);
};
if let Some(extents) = *s.imp().time_extents.borrow() {
context.set_source_rgb(0.7, 0.0, 1.0);
let time_meter = SlashMeter {
orientation: gtk::Orientation::Horizontal,
start_x: center_x + extents.width() / 2. + 50.,
start_y: center_y + 100.,
count: 5,
fill_count: minutes as u8,
height: 60.,
length: 100.,
};
time_meter.draw(context);
}
}
});
s.connect_resize(|s, width, height| {
*s.imp().width.borrow_mut() = width;
*s.imp().height.borrow_mut() = height;
s.imp().redraw_background();
});
s
}
pub fn set_state(&self, state: State) {
self.imp().set_state(state);
self.queue_draw();
}
}
fn main() {
let app = gtk::Application::builder()
.application_id("com.luminescent-dreams.cyberpunk-splash")
.flags(gio::ApplicationFlags::HANDLES_OPEN)
.build();
app.add_main_option(
"title",
glib::char::Char::from(b't'),
glib::OptionFlags::IN_MAIN,
glib::OptionArg::String,
"",
None,
);
app.add_main_option(
"countdown",
glib::char::Char::from(b'c'),
glib::OptionFlags::IN_MAIN,
glib::OptionArg::String,
"",
None,
);
let state = Arc::new(RwLock::new(State::new(Duration::from_secs(5 * 60))));
let title = Arc::new(RwLock::new("".to_owned()));
app.connect_command_line(|_, _args| {
println!("connect_command_line");
1
});
app.connect_handle_local_options({
let title = title.clone();
let state = state.clone();
move |_, options| {
println!("connect_handle_local_options");
*title.write().unwrap() = options.lookup::<String>("title").unwrap().unwrap();
let countdown = match options.lookup::<String>("countdown") {
Ok(Some(countdown_str)) => {
let parts = countdown_str.split(':').collect::<Vec<&str>>();
match parts.len() {
2 => {
let minutes = parts[0].parse::<u64>().unwrap();
let seconds = parts[1].parse::<u64>().unwrap();
Duration::from_secs(minutes * 60 + seconds)
}
1 => {
let seconds = parts[1].parse::<u64>().unwrap();
Duration::from_secs(seconds)
}
_ => Duration::from_secs(300),
}
}
_ => Duration::from_secs(300),
};
match *state.write().unwrap() {
State::Running {
ref mut deadline, ..
} => *deadline = Instant::now() + countdown,
State::Paused {
ref mut time_remaining,
..
} => *time_remaining = countdown,
}
-1
}
});
app.connect_open(move |app, files, args| {
println!("called open");
println!("files: {}", files.len());
println!("args: {}", args);
app.activate();
});
app.connect_activate(move |app| {
let (gtk_tx, gtk_rx) = async_std::channel::unbounded();
let window = gtk::ApplicationWindow::new(app);
window.present();
let splash = Splash::new(title.read().unwrap().clone(), *state.read().unwrap());
window.set_child(Some(&splash));
window.connect_maximized_notify(|window| {
window.fullscreen();
});
let keyboard_events = EventControllerKey::new();
keyboard_events.connect_key_released({
let window = window.clone();
let state = state.clone();
move |_, key, _, _| {
let name = key
.name()
.map(|s| s.as_str().to_owned())
.unwrap_or("".to_owned());
match name.as_ref() {
"Escape" => window.unfullscreen(),
"space" => state.write().unwrap().start_pause(),
_ => {}
}
}
});
window.add_controller(keyboard_events);
glib::spawn_future_local({
let splash = splash.clone();
async move {
while let Ok(state) = gtk_rx.recv().await {
println!("received state");
splash.set_state(state);
}
}
});
glib::spawn_future_local({
let state = state.clone();
async move {
state.write().unwrap().start();
loop {
async_std::task::sleep(Duration::from_millis(1000 / 60)).await;
state.write().unwrap().run(Instant::now());
println!("state: {:?}", state.read().unwrap());
let _ = gtk_tx.send(*state.read().unwrap()).await;
}
}
});
});
app.run();
}

View File

@ -1,12 +0,0 @@
[package]
name = "cyberpunk"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
cairo-rs = { workspace = true }
gio = { workspace = true }
glib = { workspace = true }
gtk = { workspace = true }

View File

@ -1,301 +0,0 @@
use cairo::{
Context, FontSlant, FontWeight, Format, ImageSurface, LineCap, Pattern,
TextExtents,
};
pub struct AsymLineCutout {
pub orientation: gtk::Orientation,
pub start_x: f64,
pub start_y: f64,
pub start_length: f64,
pub cutout_length: f64,
pub end_length: f64,
pub height: f64,
pub invert: bool,
}
impl AsymLineCutout {
pub fn draw(&self, pen: &impl Pen) {
let dodge = if self.invert {
self.height
} else {
-self.height
};
match self.orientation {
gtk::Orientation::Horizontal => {
pen.move_to(self.start_x, self.start_y);
pen.line_to(self.start_x + self.start_length, self.start_y);
pen.line_to(
self.start_x + self.start_length + self.height,
self.start_y + dodge,
);
pen.line_to(
self.start_x + self.start_length + self.height + self.cutout_length,
self.start_y + dodge,
);
pen.line_to(
self.start_x
+ self.start_length
+ self.height
+ self.cutout_length
+ (self.height / 2.),
self.start_y + dodge / 2.,
);
pen.line_to(
self.start_x
+ self.start_length
+ self.height
+ self.cutout_length
+ (self.height / 2.)
+ self.end_length,
self.start_y + dodge / 2.,
);
}
gtk::Orientation::Vertical => {
pen.move_to(self.start_x, self.start_y);
pen.line_to(self.start_x, self.start_y + self.start_length);
pen.line_to(
self.start_x + dodge,
self.start_y + self.start_length + self.height,
);
pen.line_to(
self.start_x + dodge,
self.start_y + self.start_length + self.height + self.cutout_length,
);
pen.line_to(
self.start_x + dodge / 2.,
self.start_y
+ self.start_length
+ self.height
+ self.cutout_length
+ (self.height / 2.),
);
pen.line_to(
self.start_x + dodge / 2.,
self.start_y
+ self.start_length
+ self.height
+ self.cutout_length
+ (self.height / 2.)
+ self.end_length,
);
}
_ => panic!("unknown orientation"),
}
}
}
// Represents an asymetrical line that starts at one location, then a 45-degree angle and then
// another line afterwards.
pub struct AsymLine {
// Will this be drawn left-to-right or up-to-down?
pub orientation: gtk::Orientation,
// Starting address
pub start_x: f64,
pub start_y: f64,
// Length of the first segment
pub start_length: f64,
// Height to dodge over to the next section
pub height: f64,
// Total length of the entire line.
pub end_length: f64,
// When normal, the angle dodge is upwards. When inverted, the angle dodge is downwards.
pub invert: bool,
}
impl AsymLine {
pub fn draw(&self, pen: &impl Pen) {
let dodge = if self.invert {
self.height
} else {
-self.height
};
match self.orientation {
gtk::Orientation::Horizontal => {
pen.move_to(self.start_x, self.start_y);
pen.line_to(self.start_x + self.start_length, self.start_y);
pen.line_to(
self.start_x + self.start_length + self.height,
self.start_y + dodge,
);
pen.line_to(
self.start_x + self.start_length + self.height + self.end_length,
self.start_y + dodge,
);
}
gtk::Orientation::Vertical => {}
_ => panic!("unknown orientation"),
}
}
}
pub struct SlashMeter {
pub orientation: gtk::Orientation,
pub start_x: f64,
pub start_y: f64,
pub count: u8,
pub fill_count: u8,
pub height: f64,
pub length: f64,
}
impl SlashMeter {
pub fn draw(&self, context: &Context) {
match self.orientation {
gtk::Orientation::Horizontal => {
let angle: f64 = 0.8;
let run = self.height / angle.tan();
let width = self.length / (self.count as f64 * 2.);
for c in 0..self.count {
context.set_line_width(1.);
let start_x = self.start_x + c as f64 * width * 2.;
context.move_to(start_x, self.start_y);
context.line_to(start_x + run, self.start_y - self.height);
context.line_to(start_x + run + width, self.start_y - self.height);
context.line_to(start_x + width, self.start_y);
context.line_to(start_x, self.start_y);
if c < self.fill_count {
let _ = context.fill();
} else {
let _ = context.stroke();
}
}
}
gtk::Orientation::Vertical => {}
_ => panic!("unknown orientation"),
}
}
}
/// Represents a pen for drawing a pattern. This is good for complex patterns that may require
/// multiple identical steps.
pub trait Pen {
/// Move the pen to a location.
fn move_to(&self, x: f64, y: f64);
/// Draw a line from the current location to the specified destination.
fn line_to(&self, x: f64, y: f64);
/// Instantiate the line.
fn stroke(&self);
/// Convert all of the drawing into a pattern that can be painted to a drawing context.
fn finish(self) -> Pattern;
}
pub struct GlowPen {
blur_context: Context,
draw_context: Context,
}
impl GlowPen {
pub fn new(
width: i32,
height: i32,
line_width: f64,
blur_line_width: f64,
color: (f64, f64, f64),
) -> Self {
let blur_context =
Context::new(ImageSurface::create(Format::Rgb24, width, height).unwrap()).unwrap();
blur_context.set_line_width(blur_line_width);
blur_context.set_source_rgba(color.0, color.1, color.2, 0.5);
blur_context.push_group();
blur_context.set_line_cap(LineCap::Round);
let draw_context =
Context::new(ImageSurface::create(Format::Rgb24, width, height).unwrap()).unwrap();
draw_context.set_line_width(line_width);
draw_context.set_source_rgb(color.0, color.1, color.2);
draw_context.push_group();
draw_context.set_line_cap(LineCap::Round);
Self {
blur_context,
draw_context,
}
}
}
impl Pen for GlowPen {
fn move_to(&self, x: f64, y: f64) {
self.blur_context.move_to(x, y);
self.draw_context.move_to(x, y);
}
fn line_to(&self, x: f64, y: f64) {
self.blur_context.line_to(x, y);
self.draw_context.line_to(x, y);
}
fn stroke(&self) {
self.blur_context.stroke().expect("to draw the blur line");
self.draw_context
.stroke()
.expect("to draw the regular line");
}
fn finish(self) -> Pattern {
let foreground = self.draw_context.pop_group().unwrap();
self.blur_context.set_source(foreground).unwrap();
self.blur_context.paint().unwrap();
self.blur_context.pop_group().unwrap()
}
}
pub struct Text<'a> {
content: Vec<String>,
context: &'a Context,
}
impl<'a> Text<'a> {
pub fn new(content: String, context: &'a Context, size: f64, width: f64) -> Self {
context.select_font_face("Alegreya Sans SC", FontSlant::Normal, FontWeight::Bold);
context.set_font_size(size);
let lines = word_wrap(content, context, width);
Self { content: lines, context }
}
pub fn extents(&self) -> TextExtents {
self.context.text_extents(&self.content[0]).unwrap()
}
pub fn draw(&self) {
let mut baseline = 0.;
for line in self.content.iter() {
baseline += self.context.text_extents(line).unwrap().height() + 10.;
self.context.move_to(0., baseline);
let _ = self.context.show_text(line);
}
}
}
fn word_wrap(content: String, context: &Context, max_width: f64) -> Vec<String> {
let mut lines = vec![];
let words: Vec<&str> = content.split_whitespace().collect();
let mut start: usize = 0;
let mut line = String::new();
for idx in 0..words.len() + 1 {
line = words[start..idx].join(" ");
let extents = context.text_extents(&line).unwrap();
if extents.width() > max_width {
let line = words[start..idx-1].join(" ");
start = idx-1;
lines.push(line.clone());
}
}
if !line.is_empty() {
lines.push(line);
}
lines
}

1763
dashboard/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,31 +0,0 @@
[package]
name = "dashboard"
version = "0.1.3"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
adw = { version = "0.5", package = "libadwaita", features = [ "v1_2" ] }
async-std = { version = "1.13" }
cairo-rs = { version = "0.18" }
chrono = { version = "0.4", features = ["serde"] }
fluent-ergonomics = { path = "../fluent-ergonomics/" }
fluent = { version = "0.16" }
futures = { version = "0.3" }
geo-types = { path = "../geo-types/" }
gio = { version = "0.18" }
glib = { version = "0.18" }
gdk = { version = "0.7", package = "gdk4" }
gtk = { version = "0.7", package = "gtk4" }
lazy_static = { version = "1.4" }
memorycache = { path = "../memorycache/" }
reqwest = { version = "0.11", features = ["json"] }
serde_json = { version = "1" }
serde = { version = "1", features = [ "derive" ] }
tokio = { version = "1", features = ["full"] }
unic-langid = { version = "0.9" }
[build-dependencies]
glib-build-tools = "0.18"

View File

@ -1,7 +0,0 @@
fn main() {
glib_build_tools::compile_resources(
&["resources"],
"gresources.xml",
"com.luminescent-dreams.dashboard.gresource",
);
}

View File

@ -1,6 +0,0 @@
[Desktop Entry]
Type=Application
Version=1.0
Name=dashboard
Comment=My personal system dashboard
Exec=dashboard

View File

@ -1,12 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
VERSION=`cat Cargo.toml | grep "^version =" | sed -r 's/^version = "(.+)"$/\1/'`
mkdir -p dist
cp dashboard.desktop dist
cp ../target/release/dashboard dist
strip dist/dashboard
tar -czf dashboard-${VERSION}.tgz dist/

View File

@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<gresources>
<gresource prefix="/com/luminescent-dreams/dashboard/">
<file>style.css</file>
</gresource>
</gresources>

View File

@ -1,9 +0,0 @@
label {
font-size: 200%;
padding: 4px;
}
.highlight {
color: @accent_fg_color;
background-color: @accent_bg_color;
}

View File

@ -1,79 +0,0 @@
use crate::{
components::{Date, Events, TransitCard, TransitClock},
types::State,
};
use adw::prelude::AdwApplicationWindowExt;
use gio::resources_lookup_data;
use gtk::{prelude::*, STYLE_PROVIDER_PRIORITY_USER};
#[derive(Clone)]
pub struct ApplicationWindow {
pub window: adw::ApplicationWindow,
pub date_label: Date,
pub events: Events,
pub transit_card: TransitCard,
pub transit_clock: TransitClock,
}
impl ApplicationWindow {
pub fn new(app: &adw::Application) -> Self {
let window = adw::ApplicationWindow::new(app);
let stylesheet = String::from_utf8(
resources_lookup_data(
"/com/luminescent-dreams/dashboard/style.css",
gio::ResourceLookupFlags::NONE,
)
.expect("stylesheet should just be available")
.to_vec(),
)
.expect("to parse stylesheet");
let provider = gtk::CssProvider::new();
provider.load_from_data(&stylesheet);
#[allow(deprecated)]
let context = window.style_context();
#[allow(deprecated)]
context.add_provider(&provider, STYLE_PROVIDER_PRIORITY_USER);
let layout = gtk::Box::builder()
.orientation(gtk::Orientation::Vertical)
.hexpand(true)
.vexpand(true)
.build();
let date_label = Date::default();
let header = adw::HeaderBar::builder()
.title_widget(&date_label)
.build();
layout.append(&header);
let events = Events::default();
layout.append(&events);
let transit_card = TransitCard::default();
layout.append(&transit_card);
let transit_clock = TransitClock::default();
layout.append(&transit_clock);
window.set_content(Some(&layout));
Self {
window,
date_label,
events,
transit_card,
transit_clock,
}
}
pub fn update_state(&self, state: State) {
self.date_label.update_date(state.date);
self.events.set_events(state.events, state.next_event);
if let Some(transit) = state.transit {
self.transit_card.update_transit(&transit);
self.transit_clock.update_transit(transit);
}
}
}

View File

@ -1,65 +0,0 @@
use std::{cell::RefCell, rc::Rc};
use chrono::NaiveDate;
use glib::Object;
use gtk::{prelude::*, subclass::prelude::*};
pub struct DatePrivate {
date: Rc<RefCell<NaiveDate>>,
label: Rc<RefCell<gtk::Label>>,
}
impl Default for DatePrivate {
fn default() -> Self {
let date = chrono::Local::now().date_naive();
Self {
date: Rc::new(RefCell::new(date)),
label: Rc::new(RefCell::new(gtk::Label::new(None))),
}
}
}
#[glib::object_subclass]
impl ObjectSubclass for DatePrivate {
const NAME: &'static str = "Date";
type Type = Date;
type ParentType = gtk::Box;
}
impl ObjectImpl for DatePrivate {}
impl WidgetImpl for DatePrivate {}
impl BoxImpl for DatePrivate {}
glib::wrapper! {
pub struct Date(ObjectSubclass<DatePrivate>) @extends gtk::Box, gtk::Widget;
}
impl Default for Date {
fn default() -> Self {
let s: Self = Object::builder().build();
s.set_margin_bottom(8);
s.set_margin_top(8);
s.set_margin_start(8);
s.set_margin_end(8);
s.append(&*s.imp().label.borrow());
s.redraw();
s
}
}
impl Date {
pub fn update_date(&self, date: NaiveDate) {
*self.imp().date.borrow_mut() = date;
self.redraw();
}
fn redraw(&self) {
let date = self.imp().date.borrow();
self.imp()
.label
.borrow_mut()
.set_text(&date.format("%Y %B %d").to_string());
}
}

View File

@ -1,95 +0,0 @@
use crate::{
components::Date,
solstices::{self, YearlyEvents},
};
use glib::Object;
use gtk::{prelude::*, subclass::prelude::*};
/*
#[derive(PartialEq)]
pub enum UpcomingEvent {
SpringEquinox,
SummerSolstice,
AutumnEquinox,
WinterSolstice,
}
*/
#[derive(Default)]
pub struct EventsPrivate {
spring_equinox: Date,
summer_solstice: Date,
autumn_equinox: Date,
winter_solstice: Date,
// next: UpcomingEvent,
}
#[glib::object_subclass]
impl ObjectSubclass for EventsPrivate {
const NAME: &'static str = "Events";
type Type = Events;
type ParentType = gtk::Box;
}
impl ObjectImpl for EventsPrivate {}
impl WidgetImpl for EventsPrivate {}
impl BoxImpl for EventsPrivate {}
glib::wrapper! {
pub struct Events(ObjectSubclass<EventsPrivate>) @extends gtk::Widget, gtk::Box, @implements gtk::Orientable;
}
impl Default for Events {
fn default() -> Self {
let s: Self = Object::builder().build();
s.set_orientation(gtk::Orientation::Horizontal);
s.set_spacing(8);
s.append(&s.imp().spring_equinox);
s.append(&s.imp().summer_solstice);
s.append(&s.imp().autumn_equinox);
s.append(&s.imp().winter_solstice);
s
}
}
impl Events {
pub fn set_events(&self, events: YearlyEvents, next_event: solstices::Event) {
self.imp()
.spring_equinox
.update_date(events.spring_equinox.date_naive());
self.imp()
.summer_solstice
.update_date(events.summer_solstice.date_naive());
self.imp()
.autumn_equinox
.update_date(events.autumn_equinox.date_naive());
self.imp()
.winter_solstice
.update_date(events.winter_solstice.date_naive());
self.imp().spring_equinox.remove_css_class("highlight");
self.imp().summer_solstice.remove_css_class("highlight");
self.imp().autumn_equinox.remove_css_class("highlight");
self.imp().winter_solstice.remove_css_class("highlight");
match next_event {
solstices::Event::SpringEquinox(_) => {
self.imp().spring_equinox.add_css_class("highlight")
}
solstices::Event::SummerSolstice(_) => {
self.imp().summer_solstice.add_css_class("highlight")
}
solstices::Event::AutumnEquinox(_) => {
self.imp().autumn_equinox.add_css_class("highlight")
}
solstices::Event::WinterSolstice(_) => {
self.imp().winter_solstice.add_css_class("highlight")
}
}
}
}

View File

@ -1,57 +0,0 @@
use glib::Object;
use gtk::{prelude::*, subclass::prelude::*};
#[derive(Default)]
pub struct LabelPrivate {
label: gtk::Label,
icon: gtk::Image,
}
#[glib::object_subclass]
impl ObjectSubclass for LabelPrivate {
const NAME: &'static str = "Label";
type Type = Label;
type ParentType = gtk::Box;
}
impl ObjectImpl for LabelPrivate {}
impl WidgetImpl for LabelPrivate {}
impl BoxImpl for LabelPrivate {}
glib::wrapper! {
pub struct Label(ObjectSubclass<LabelPrivate>) @extends gtk::Box, gtk::Widget,
@implements gtk::Orientable;
}
impl Label {
pub fn new(text: Option<&str>, icon: Option<gio::ThemedIcon>) -> Self {
let s: Self = Object::builder().build();
s.set_orientation(gtk::Orientation::Horizontal);
s.set_spacing(8);
s.set_margin_bottom(8);
s.set_margin_top(8);
s.set_margin_start(8);
s.set_margin_end(8);
s.append(&s.imp().icon);
s.append(&s.imp().label);
if let Some(text) = text {
s.set_text(text);
}
if let Some(icon) = icon {
s.set_icon(icon);
}
s
}
pub fn set_text(&self, text: &str) {
self.imp().label.set_text(text);
}
pub fn set_icon(&self, icon: gio::ThemedIcon) {
self.imp().icon.set_from_gicon(&icon);
}
}

View File

@ -1,14 +0,0 @@
mod date;
pub use date::Date;
mod events;
pub use events::Events;
mod label;
pub use label::Label;
mod transit_card;
pub use transit_card::TransitCard;
mod transit_clock;
pub use transit_clock::TransitClock;

View File

@ -1,74 +0,0 @@
use crate::{components::Label, soluna_client::SunMoon};
use glib::Object;
use gtk::{prelude::*, subclass::prelude::*};
pub struct TransitCardPrivate {
sunrise: Label,
sunset: Label,
moonrise: Label,
moonset: Label,
}
impl Default for TransitCardPrivate {
fn default() -> Self {
Self {
sunrise: Label::new(None, Some(gio::ThemedIcon::new("daytime-sunrise-symbolic"))),
sunset: Label::new(None, Some(gio::ThemedIcon::new("daytime-sunset-symbolic"))),
moonrise: Label::new(None, Some(gio::ThemedIcon::new("moon-outline-symbolic"))),
moonset: Label::new(None, Some(gio::ThemedIcon::new("moon-outline-symbolic"))),
}
}
}
#[glib::object_subclass]
impl ObjectSubclass for TransitCardPrivate {
const NAME: &'static str = "TransitCard";
type Type = TransitCard;
type ParentType = gtk::Grid;
}
impl ObjectImpl for TransitCardPrivate {}
impl WidgetImpl for TransitCardPrivate {}
impl GridImpl for TransitCardPrivate {}
glib::wrapper! {
pub struct TransitCard(ObjectSubclass<TransitCardPrivate>) @extends gtk::Grid, gtk::Widget;
}
impl Default for TransitCard {
fn default() -> Self {
let s: Self = Object::builder().build();
s.add_css_class("card");
s.set_column_homogeneous(true);
s.attach(&s.imp().sunrise, 0, 0, 1, 1);
s.attach(&s.imp().sunset, 0, 1, 1, 1);
s.attach(&s.imp().moonrise, 1, 0, 1, 1);
s.attach(&s.imp().moonset, 1, 1, 1, 1);
s
}
}
impl TransitCard {
pub fn update_transit(&self, transit_info: &SunMoon) {
self.imp()
.sunrise
.set_text(format!("{}", transit_info.sunrise.format("%H:%M")).as_ref());
self.imp()
.sunset
.set_text(format!("{}", transit_info.sunset.format("%H:%M")).as_ref());
self.imp().moonrise.set_text(
&transit_info
.moonrise
.map(|time| format!("{}", time.format("%H:%M")))
.unwrap_or("".to_owned()),
);
self.imp().moonset.set_text(
&transit_info
.moonset
.map(|time| format!("{}", time.format("%H:%M")))
.unwrap_or("".to_owned()),
);
}
}

View File

@ -1,106 +0,0 @@
use crate::{
drawing::{Color, PieChart, Wedge},
soluna_client::SunMoon,
};
use chrono::{Duration, NaiveTime};
use glib::Object;
use gtk::{prelude::*, subclass::prelude::*};
use std::{cell::RefCell, f64::consts::PI, rc::Rc};
#[derive(Default)]
pub struct TransitClockPrivate {
info: Rc<RefCell<Option<SunMoon>>>,
}
#[glib::object_subclass]
impl ObjectSubclass for TransitClockPrivate {
const NAME: &'static str = "TransitClock";
type Type = TransitClock;
type ParentType = gtk::DrawingArea;
}
impl ObjectImpl for TransitClockPrivate {}
impl WidgetImpl for TransitClockPrivate {}
impl DrawingAreaImpl for TransitClockPrivate {}
glib::wrapper! {
pub struct TransitClock(ObjectSubclass<TransitClockPrivate>) @extends gtk::DrawingArea, gtk::Widget;
}
impl Default for TransitClock {
fn default() -> Self {
let s: Self = Object::builder().build();
s.set_width_request(500);
s.set_height_request(500);
s.set_draw_func({
let s = s.clone();
move |_, context, width, height| {
#[allow(deprecated)]
let style_context = WidgetExt::style_context(&s);
let center_x = width as f64 / 2.;
let center_y = height as f64 / 2.;
let radius = width.min(height) as f64 / 2. * 0.9;
if let Some(ref info) = *s.imp().info.borrow() {
let full_day = Duration::days(1).num_seconds() as f64;
let sunrise = info.sunrise - NaiveTime::from_hms_opt(0, 0, 0).unwrap();
let sunset = info.sunset - NaiveTime::from_hms_opt(0, 0, 0).unwrap();
#[allow(deprecated)]
let night_color = style_context.lookup_color("dark_5").unwrap();
#[allow(deprecated)]
let day_color = style_context.lookup_color("blue_1").unwrap();
PieChart::new(&style_context)
.center(center_x, center_y)
.radius(radius)
.rotation(-PI / 2.)
.wedges(
vec![
Wedge {
start_angle: (PI * 2.) * sunset.num_seconds() as f64 / full_day,
end_angle: (PI * 2.) * sunrise.num_seconds() as f64 / full_day,
color: Color {
r: night_color.red() as f64,
g: night_color.green() as f64,
b: night_color.blue() as f64,
},
},
Wedge {
start_angle: (PI * 2.) * sunrise.num_seconds() as f64
/ full_day,
end_angle: (PI * 2.) * sunset.num_seconds() as f64 / full_day,
color: Color {
r: day_color.red() as f64,
g: day_color.green() as f64,
b: day_color.blue() as f64,
},
},
]
.into_iter(),
)
.draw(context);
(0..24).for_each(|tick| {
context.set_source_rgb(0., 0., 0.);
context.translate(center_x, center_y);
context.rotate(tick as f64 * (PI / 12.));
context.move_to(radius - 5., 0.);
context.line_to(radius - 10., 0.);
let _ = context.stroke();
context.identity_matrix();
});
}
}
});
s
}
}
impl TransitClock {
pub fn update_transit(&self, transit_info: SunMoon) {
*self.imp().info.borrow_mut() = Some(transit_info);
self.queue_draw();
}
}

View File

@ -1,2 +0,0 @@
mod pie_chart;
pub use pie_chart::{Color, PieChart, Wedge};

View File

@ -1,100 +0,0 @@
use cairo::Context;
use gtk::{gdk::RGBA, prelude::*};
#[allow(deprecated)]
use gtk::StyleContext;
use std::f64::consts::PI;
#[derive(Clone, Debug)]
pub struct Color {
pub r: f64,
pub g: f64,
pub b: f64,
}
#[derive(Clone, Debug)]
pub struct Wedge {
pub start_angle: f64,
pub end_angle: f64,
pub color: Color,
}
pub struct PieChart {
rotation: f64,
wedges: Vec<Wedge>,
center_x: f64,
center_y: f64,
radius: f64,
border_color: RGBA,
}
impl PieChart {
#[allow(deprecated)]
pub fn new(style_context: &StyleContext) -> Self {
#[allow(deprecated)]
Self {
rotation: 0.,
wedges: vec![],
center_x: 0.,
center_y: 0.,
radius: 0.,
border_color: style_context.lookup_color("theme_fg_color").unwrap(),
}
}
pub fn rotation(mut self, rotation: f64) -> Self {
self.rotation = rotation;
self
}
pub fn wedges(mut self, wedge: impl Iterator<Item = Wedge>) -> Self {
let mut wedges: Vec<Wedge> = wedge.collect();
self.wedges.append(&mut wedges);
self
}
pub fn center(mut self, center_x: f64, center_y: f64) -> Self {
self.center_x = center_x;
self.center_y = center_y;
self
}
pub fn radius(mut self, radius: f64) -> Self {
self.radius = radius;
self
}
pub fn draw(self, context: &Context) {
context.set_source_rgba(0., 0., 0., 0.);
let _ = context.paint();
context.set_line_width(2.);
self.wedges.iter().for_each(
|Wedge {
start_angle,
end_angle,
color,
}| {
context.move_to(self.center_x, self.center_y);
context.set_source_rgb(color.r, color.g, color.b);
context.arc(
self.center_x,
self.center_y,
self.radius,
start_angle + self.rotation,
end_angle + self.rotation,
);
let _ = context.fill();
},
);
context.set_source_rgb(
self.border_color.red() as f64,
self.border_color.green() as f64,
self.border_color.blue() as f64,
);
context.arc(self.center_x, self.center_y, self.radius, 0., 2. * PI);
let _ = context.stroke();
}
}

View File

@ -1,143 +0,0 @@
use std::{
env,
sync::{Arc, RwLock},
};
use async_std::channel::Sender;
use chrono::{Datelike, Local, Utc};
use geo_types::{Latitude, Longitude};
use gtk::prelude::*;
mod app_window;
use app_window::ApplicationWindow;
mod components;
mod drawing;
mod soluna_client;
use soluna_client::SolunaClient;
mod solstices;
use solstices::EVENTS;
mod types;
use types::State;
/*
const EO_TEXT: &'static str = "
day = {$day ->
*[Sunday] Dimanĉo
[Monday] Lundo
[Tuesday] Mardo
[Wednesday] Merkredo
[Thursday] Ĵaŭdo
[Friday] Vendredo
[Saturday] Sabato
[LeapDay] Leap Day
[YearDay] Year Day
}
month = {$month ->
*[January] Januaro
[February] Februaro
[March] Marto
[April] Aprilo
[May] Mayo
[June] Junio
[Sol] Solo
[July] Julio
[August] Aŭgusto
[September] Septembro
[October] Oktobro
[November] Novembro
[December] Decembro
}
spring_equinox = Printempa Ekvinokso
summer_solstice = Somera Solstico
autumn_equinox = Aŭtuna Ekvinokso
winter_solstice = Vintra Solstico
";
*/
#[derive(Clone, Debug)]
pub enum Message {
Refresh(State),
}
#[derive(Clone)]
pub struct Core {
tx: Arc<RwLock<Option<Sender<Message>>>>,
}
pub fn main() {
gio::resources_register_include!("com.luminescent-dreams.dashboard.gresource")
.expect("Failed to register resources");
let app = adw::Application::builder()
.application_id("com.luminescent-dreams.dashboard")
.resource_base_path("/com/luminescent-dreams/dashboard")
.build();
let latitude = Latitude::from(41.78);
let longitude = Longitude::from(-71.41);
let runtime = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap();
let core = Core {
tx: Arc::new(RwLock::new(None)),
};
runtime.spawn({
let core = core.clone();
async move {
let soluna_client = SolunaClient::new();
loop {
let transit = soluna_client
.request(latitude.clone(), longitude.clone(), Local::now())
.await;
let now = Local::now();
let state = State {
date: now.date_naive(),
next_event: EVENTS.next_event(now.with_timezone(&Utc)).unwrap(),
events: EVENTS.yearly_events(now.year()).unwrap(),
transit: Some(transit),
};
let gtk_tx = core.tx.read().unwrap().clone();
if let Some(gtk_tx) = gtk_tx {
let state = state.clone();
let _ = gtk_tx.send(Message::Refresh(state)).await;
std::thread::sleep(std::time::Duration::from_secs(60));
} else {
std::thread::sleep(std::time::Duration::from_secs(1));
}
}
}
});
app.connect_activate(move |app| {
let (gtk_tx, gtk_rx) = async_std::channel::unbounded();
*core.tx.write().unwrap() = Some(gtk_tx);
let window = ApplicationWindow::new(app);
window.window.present();
glib::spawn_future_local(async move {
loop {
let Message::Refresh(state) = gtk_rx.recv().await.unwrap();
window.update_state(state);
}
});
});
let args: Vec<String> = env::args().collect();
ApplicationExtManual::run_with_args(&app, &args);
runtime.shutdown_background();
}

View File

@ -1,14 +0,0 @@
use svg::{
node::element::{Circle, Image},
Document,
};
pub fn moon() -> Document {
/*
svg(width="100%", height="100%", xmlns="http://www.w3.org/2000/svg") {
circle(cx="50", cy="50", r="50", stroke="green", fill="none");
}
*/
let img = Image::new().set("href", "/moon-small.png");
Document::new().add(img)
}

View File

@ -1,202 +0,0 @@
use std::collections::HashMap;
use chrono::prelude::*;
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
// http://astropixels.com/ephemeris/soleq2001.html
const SOLSTICE_TEXT: &str = "
2001 Mar 20 13:31 Jun 21 07:38 Sep 22 23:05 Dec 21 19:22
2002 Mar 20 19:16 Jun 21 13:25 Sep 23 04:56 Dec 22 01:15
2003 Mar 21 01:00 Jun 21 19:11 Sep 23 10:47 Dec 22 07:04
2004 Mar 20 06:49 Jun 21 00:57 Sep 22 16:30 Dec 21 12:42
2005 Mar 20 12:34 Jun 21 06:46 Sep 22 22:23 Dec 21 18:35
2006 Mar 20 18:25 Jun 21 12:26 Sep 23 04:04 Dec 22 00:22
2007 Mar 21 00:07 Jun 21 18:06 Sep 23 09:51 Dec 22 06:08
2008 Mar 20 05:49 Jun 21 00:00 Sep 22 15:45 Dec 21 12:04
2009 Mar 20 11:44 Jun 21 05:45 Sep 22 21:18 Dec 21 17:47
2010 Mar 20 17:32 Jun 21 11:28 Sep 23 03:09 Dec 21 23:38
2011 Mar 20 23:21 Jun 21 17:16 Sep 23 09:05 Dec 22 05:30
2012 Mar 20 05:15 Jun 20 23:08 Sep 22 14:49 Dec 21 11:12
2013 Mar 20 11:02 Jun 21 05:04 Sep 22 20:44 Dec 21 17:11
2014 Mar 20 16:57 Jun 21 10:52 Sep 23 02:30 Dec 21 23:03
2015 Mar 20 22:45 Jun 21 16:38 Sep 23 08:20 Dec 22 04:48
2016 Mar 20 04:31 Jun 20 22:35 Sep 22 14:21 Dec 21 10:45
2017 Mar 20 10:29 Jun 21 04:25 Sep 22 20:02 Dec 21 16:29
2018 Mar 20 16:15 Jun 21 10:07 Sep 23 01:54 Dec 21 22:22
2019 Mar 20 21:58 Jun 21 15:54 Sep 23 07:50 Dec 22 04:19
2020 Mar 20 03:50 Jun 20 21:43 Sep 22 13:31 Dec 21 10:03
2021 Mar 20 09:37 Jun 21 03:32 Sep 22 19:21 Dec 21 15:59
2022 Mar 20 15:33 Jun 21 09:14 Sep 23 01:04 Dec 21 21:48
2023 Mar 20 21:25 Jun 21 14:58 Sep 23 06:50 Dec 22 03:28
2024 Mar 20 03:07 Jun 20 20:51 Sep 22 12:44 Dec 21 09:20
2025 Mar 20 09:02 Jun 21 02:42 Sep 22 18:20 Dec 21 15:03
2026 Mar 20 14:46 Jun 21 08:25 Sep 23 00:06 Dec 21 20:50
2027 Mar 20 20:25 Jun 21 14:11 Sep 23 06:02 Dec 22 02:43
2028 Mar 20 02:17 Jun 20 20:02 Sep 22 11:45 Dec 21 08:20
2029 Mar 20 08:01 Jun 21 01:48 Sep 22 17:37 Dec 21 14:14
2030 Mar 20 13:51 Jun 21 07:31 Sep 22 23:27 Dec 21 20:09
2031 Mar 20 19:41 Jun 21 13:17 Sep 23 05:15 Dec 22 01:56
2032 Mar 20 01:23 Jun 20 19:09 Sep 22 11:11 Dec 21 07:57
2033 Mar 20 07:23 Jun 21 01:01 Sep 22 16:52 Dec 21 13:45
2034 Mar 20 13:18 Jun 21 06:45 Sep 22 22:41 Dec 21 19:35
2035 Mar 20 19:03 Jun 21 12:33 Sep 23 04:39 Dec 22 01:31
2036 Mar 20 01:02 Jun 20 18:31 Sep 22 10:23 Dec 21 07:12
2037 Mar 20 06:50 Jun 21 00:22 Sep 22 16:13 Dec 21 13:08
2038 Mar 20 12:40 Jun 21 06:09 Sep 22 22:02 Dec 21 19:01
2039 Mar 20 18:32 Jun 21 11:58 Sep 23 03:50 Dec 22 00:41
2040 Mar 20 00:11 Jun 20 17:46 Sep 22 09:44 Dec 21 06:33
2041 Mar 20 06:07 Jun 20 23:37 Sep 22 15:27 Dec 21 12:19
2042 Mar 20 11:53 Jun 21 05:16 Sep 22 21:11 Dec 21 18:04
2043 Mar 20 17:29 Jun 21 10:59 Sep 23 03:07 Dec 22 00:02
2044 Mar 19 23:20 Jun 20 16:50 Sep 22 08:47 Dec 21 05:43
2045 Mar 20 05:08 Jun 20 22:34 Sep 22 14:33 Dec 21 11:36
2046 Mar 20 10:58 Jun 21 04:15 Sep 22 20:22 Dec 21 17:28
2047 Mar 20 16:52 Jun 21 10:02 Sep 23 02:07 Dec 21 23:07
2048 Mar 19 22:34 Jun 20 15:54 Sep 22 08:01 Dec 21 05:02
2049 Mar 20 04:28 Jun 20 21:47 Sep 22 13:42 Dec 21 10:51
2050 Mar 20 10:20 Jun 21 03:33 Sep 22 19:29 Dec 21 16:39
";
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub struct YearlyEvents {
pub year: i32,
pub spring_equinox: chrono::DateTime<chrono::Utc>,
pub summer_solstice: chrono::DateTime<chrono::Utc>,
pub autumn_equinox: chrono::DateTime<chrono::Utc>,
pub winter_solstice: chrono::DateTime<chrono::Utc>,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub enum Event {
SpringEquinox(chrono::DateTime<chrono::Utc>),
SummerSolstice(chrono::DateTime<chrono::Utc>),
AutumnEquinox(chrono::DateTime<chrono::Utc>),
WinterSolstice(chrono::DateTime<chrono::Utc>),
}
impl Event {
pub fn date(&self) -> chrono::DateTime<chrono::Utc> {
match *self {
Event::SpringEquinox(d) => d,
Event::SummerSolstice(d) => d,
Event::AutumnEquinox(d) => d,
Event::WinterSolstice(d) => d,
}
}
}
fn parse_time<'a>(
year: &str,
iter: impl Iterator<Item = &'a str>,
) -> chrono::DateTime<chrono::Utc> {
let parts = iter.collect::<Vec<&str>>();
let p = format!("{} {} {} {}", year, parts[0], parts[1], parts[2]);
NaiveDateTime::parse_from_str(&p, "%Y %b %d %H:%M")
.unwrap()
.and_utc()
}
fn parse_line(year: &str, rest: &[&str]) -> YearlyEvents {
let spring = parse_time(year, rest.iter().take(3).cloned());
let summer = parse_time(year, rest.iter().skip(3).take(3).cloned());
let autumn = parse_time(year, rest.iter().skip(6).take(3).cloned());
let winter = parse_time(year, rest.iter().skip(9).take(3).cloned());
YearlyEvents {
year: year.parse::<i32>().unwrap(),
spring_equinox: spring,
summer_solstice: summer,
autumn_equinox: autumn,
winter_solstice: winter,
}
}
fn parse_events() -> Vec<Option<YearlyEvents>> {
SOLSTICE_TEXT
.lines()
.map(|line| {
match line
.split(' ')
.filter(|elem| !elem.is_empty())
.collect::<Vec<&str>>()
.as_slice()
{
[year, rest @ ..] => Some(parse_line(year, rest)),
_ => None,
}
})
.collect()
}
pub struct Solstices(HashMap<i32, YearlyEvents>);
impl Solstices {
pub fn yearly_events(&self, year: i32) -> Option<YearlyEvents> {
self.0.get(&year).copied()
}
pub fn next_event(&self, date: chrono::DateTime<chrono::Utc>) -> Option<Event> {
let year_events = self.0.get(&date.year());
match year_events {
Some(year_events) => {
if date <= year_events.spring_equinox {
Some(Event::SpringEquinox(year_events.spring_equinox))
} else if date <= year_events.summer_solstice {
Some(Event::SummerSolstice(year_events.summer_solstice))
} else if date <= year_events.autumn_equinox {
Some(Event::AutumnEquinox(year_events.autumn_equinox))
} else if date <= year_events.winter_solstice {
Some(Event::WinterSolstice(year_events.winter_solstice))
} else {
self.0
.get(&(date.year() + 1))
.map(|_| Event::SpringEquinox(year_events.spring_equinox))
}
}
None => None,
}
}
}
impl From<Vec<Option<YearlyEvents>>> for Solstices {
fn from(event_list: Vec<Option<YearlyEvents>>) -> Self {
Solstices(event_list.iter().fold(HashMap::new(), |mut m, record| {
match record {
Some(record) => {
m.insert(record.year, *record);
}
None => (),
}
m
}))
}
}
lazy_static! {
pub static ref EVENTS: Solstices = Solstices::from(parse_events());
}
#[cfg(test)]
mod test {
use chrono::{NaiveDate, NaiveDateTime};
#[test]
fn it_can_parse_a_solstice_time() {
let p = "2001 Mar 20 13:31".to_owned();
let parsed_date = NaiveDateTime::parse_from_str(&p, "%Y %b %d %H:%M")
.unwrap()
.and_utc();
assert_eq!(
parsed_date,
NaiveDate::from_ymd_opt(2001, 03, 20)
.unwrap()
.and_hms_opt(13, 31, 0)
.unwrap()
.and_utc()
);
}
}

View File

@ -1,154 +0,0 @@
// 41.78, -71.41
// https://api.solunar.org/solunar/41.78,-71.41,20211029,-4
use chrono::{DateTime, Duration, Local, NaiveTime, Offset, TimeZone, Timelike, Utc};
use geo_types::{Latitude, Longitude};
use memorycache::MemoryCache;
use serde::Deserialize;
const ENDPOINT: &str = "https://api.solunar.org/solunar";
#[derive(Clone, Debug, PartialEq)]
pub struct SunMoon {
pub sunrise: NaiveTime,
pub sunset: NaiveTime,
pub moonrise: Option<NaiveTime>,
pub moonset: Option<NaiveTime>,
pub moon_phase: LunarPhase,
}
impl SunMoon {
fn from_js(val: SunMoonJs) -> Self {
fn parse_time(val: String) -> Option<NaiveTime> {
NaiveTime::parse_from_str(&val, "%H:%M").ok()
}
let sunrise = parse_time(val.sunrise).unwrap();
let sunset = parse_time(val.sunset).unwrap();
let moonrise = val.moonrise.and_then(parse_time);
let moonset = val.moonset.and_then(parse_time);
Self {
sunrise,
sunset,
moonrise,
moonset,
moon_phase: val.moon_phase,
}
}
}
#[derive(Clone, Debug, Deserialize)]
pub(crate) struct SunMoonJs {
#[serde(alias = "sunRise")]
sunrise: String,
#[serde(alias = "sunSet")]
sunset: String,
#[serde(alias = "moonRise")]
moonrise: Option<String>,
#[serde(alias = "moonSet")]
moonset: Option<String>,
#[serde(alias = "moonPhase")]
moon_phase: LunarPhase,
}
#[derive(Clone, Debug, Deserialize, PartialEq)]
pub enum LunarPhase {
#[serde(alias = "New Moon")]
NewMoon,
#[serde(alias = "Waxing Crescent")]
WaxingCrescent,
#[serde(alias = "First Quarter")]
FirstQuarter,
#[serde(alias = "Waxing Gibbous")]
WaxingGibbous,
#[serde(alias = "Full Moon")]
FullMoon,
#[serde(alias = "Waning Gibbous")]
WaningGibbous,
#[serde(alias = "Last Quarter")]
LastQuarter,
#[serde(alias = "Waning Crescent")]
WaningCrescent,
}
pub struct SolunaClient {
client: reqwest::Client,
memory_cache: MemoryCache<SunMoonJs>,
}
impl SolunaClient {
pub fn new() -> Self {
Self {
client: reqwest::Client::new(),
memory_cache: MemoryCache::default(),
}
}
pub async fn request<Tz: TimeZone>(
&self,
latitude: Latitude,
longitude: Longitude,
day: DateTime<Tz>,
) -> SunMoon {
let date = day.date_naive().format("%Y%m%d");
let url = format!(
"{}/{},{},{},{}",
ENDPOINT,
latitude,
longitude,
date,
day.offset().fix().local_minus_utc() / 3600
);
let js = self
.memory_cache
.find(&url, async {
let response = self.client.get(&url).send().await.unwrap();
let expiration = response
.headers()
.get(reqwest::header::EXPIRES)
.and_then(|header| header.to_str().ok())
.and_then(|expiration| DateTime::parse_from_rfc2822(expiration).ok())
.map(DateTime::<Utc>::from)
.unwrap_or(
Local::now()
.with_hour(0)
.and_then(|dt| dt.with_minute(0))
.and_then(|dt| dt.with_second(0))
.and_then(|dt| dt.with_nanosecond(0))
.map(|dt| dt.with_timezone(&Utc))
.unwrap()
+ Duration::days(1),
);
let soluna: SunMoonJs = response.json().await.unwrap();
(expiration, soluna)
})
.await;
SunMoon::from_js(js)
}
}
#[cfg(test)]
mod test {
use super::*;
const EXAMPLE: &str = "{\"sunRise\":\"7:15\",\"sunTransit\":\"12:30\",\"sunSet\":\"17:45\",\"moonRise\":null,\"moonTransit\":\"7:30\",\"moonUnder\":\"19:54\",\"moonSet\":\"15:02\",\"moonPhase\":\"Waning Crescent\",\"moonIllumination\":0.35889454647387764,\"sunRiseDec\":7.25,\"sunTransitDec\":12.5,\"sunSetDec\":17.75,\"moonRiseDec\":null,\"moonSetDec\":15.033333333333333,\"moonTransitDec\":7.5,\"moonUnderDec\":19.9,\"minor1Start\":null,\"minor1Stop\":null,\"minor2StartDec\":14.533333333333333,\"minor2Start\":\"14:32\",\"minor2StopDec\":15.533333333333333,\"minor2Stop\":\"15:32\",\"major1StartDec\":6.5,\"major1Start\":\"06:30\",\"major1StopDec\":8.5,\"major1Stop\":\"08:30\",\"major2StartDec\":18.9,\"major2Start\":\"18:54\",\"major2StopDec\":20.9,\"major2Stop\":\"20:54\",\"dayRating\":1,\"hourlyRating\":{\"0\":20,\"1\":20,\"2\":0,\"3\":0,\"4\":0,\"5\":0,\"6\":20,\"7\":40,\"8\":40,\"9\":20,\"10\":0,\"11\":0,\"12\":0,\"13\":0,\"14\":0,\"15\":20,\"16\":20,\"17\":20,\"18\":40,\"19\":20,\"20\":20,\"21\":20,\"22\":0,\"23\":0}}";
#[test]
fn it_parses_a_response() {
let sun_moon_js: SunMoonJs = serde_json::from_str(EXAMPLE).unwrap();
let sun_moon = SunMoon::from_js(sun_moon_js);
assert_eq!(
sun_moon,
SunMoon {
sunrise: NaiveTime::from_hms_opt(7, 15, 0).unwrap(),
sunset: NaiveTime::from_hms_opt(17, 45, 0).unwrap(),
moonrise: None,
moonset: Some(NaiveTime::from_hms_opt(15, 02, 0).unwrap()),
moon_phase: LunarPhase::WaningCrescent,
}
);
}
}

View File

@ -1,13 +0,0 @@
use crate::{
solstices::{Event, YearlyEvents},
soluna_client::SunMoon,
};
use chrono::NaiveDate;
#[derive(Clone, Debug)]
pub struct State {
pub date: NaiveDate,
pub next_event: Event,
pub events: YearlyEvents,
pub transit: Option<SunMoon>,
}

83
emseries/Cargo.lock generated
View File

@ -36,9 +36,9 @@ dependencies = [
[[package]] [[package]]
name = "chrono-tz" name = "chrono-tz"
version = "0.8.2" version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf9cc2b23599e6d7479755f3594285efb3f74a1bdca7a7374948bc831e23a552" checksum = "58549f1842da3080ce63002102d5bc954c7bc843d4f47818e642abdc36253552"
dependencies = [ dependencies = [
"chrono", "chrono",
"chrono-tz-build", "chrono-tz-build",
@ -48,9 +48,9 @@ dependencies = [
[[package]] [[package]]
name = "chrono-tz-build" name = "chrono-tz-build"
version = "0.1.0" version = "0.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9998fb9f7e9b2111641485bf8beb32f92945f97f92a3d061f744cfef335f751" checksum = "db058d493fb2f65f41861bfed7e3fe6335264a9f0f92710cab5bdf01fef09069"
dependencies = [ dependencies = [
"parse-zoneinfo", "parse-zoneinfo",
"phf", "phf",
@ -71,7 +71,7 @@ dependencies = [
[[package]] [[package]]
name = "emseries" name = "emseries"
version = "0.6.0" version = "0.5.1"
dependencies = [ dependencies = [
"chrono", "chrono",
"chrono-tz", "chrono-tz",
@ -82,6 +82,7 @@ dependencies = [
"tempfile", "tempfile",
"thiserror", "thiserror",
"uuid", "uuid",
"yaml-rust",
] ]
[[package]] [[package]]
@ -134,6 +135,12 @@ version = "0.2.124"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21a41fed9d98f27ab1c6d161da622a4fa35e8a54a8adc24bbf3ddd0ef70b0e50" checksum = "21a41fed9d98f27ab1c6d161da622a4fa35e8a54a8adc24bbf3ddd0ef70b0e50"
[[package]]
name = "linked-hash-map"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
[[package]] [[package]]
name = "num-integer" name = "num-integer"
version = "0.1.44" version = "0.1.44"
@ -164,18 +171,18 @@ dependencies = [
[[package]] [[package]]
name = "phf" name = "phf"
version = "0.11.1" version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "928c6535de93548188ef63bb7c4036bd415cd8f36ad25af44b9789b2ee72a48c" checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259"
dependencies = [ dependencies = [
"phf_shared", "phf_shared",
] ]
[[package]] [[package]]
name = "phf_codegen" name = "phf_codegen"
version = "0.11.1" version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a56ac890c5e3ca598bbdeaa99964edb5b0258a583a9eb6ef4e89fc85d9224770" checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd"
dependencies = [ dependencies = [
"phf_generator", "phf_generator",
"phf_shared", "phf_shared",
@ -183,9 +190,9 @@ dependencies = [
[[package]] [[package]]
name = "phf_generator" name = "phf_generator"
version = "0.11.1" version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1181c94580fa345f50f19d738aaa39c0ed30a600d95cb2d3e23f94266f14fbf" checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6"
dependencies = [ dependencies = [
"phf_shared", "phf_shared",
"rand", "rand",
@ -193,13 +200,20 @@ dependencies = [
[[package]] [[package]]
name = "phf_shared" name = "phf_shared"
version = "0.11.1" version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1fb5f6f826b772a8d4c0394209441e7d37cbbb967ae9c7e0e8134365c9ee676" checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096"
dependencies = [ dependencies = [
"siphasher", "siphasher",
"uncased",
] ]
[[package]]
name = "ppv-lite86"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.37" version = "1.0.37"
@ -224,6 +238,18 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [ dependencies = [
"libc",
"rand_chacha",
"rand_core",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core", "rand_core",
] ]
@ -232,6 +258,9 @@ name = "rand_core"
version = "0.6.3" version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
dependencies = [
"getrandom",
]
[[package]] [[package]]
name = "redox_syscall" name = "redox_syscall"
@ -363,9 +392,18 @@ dependencies = [
[[package]] [[package]]
name = "typenum" name = "typenum"
version = "1.16.0" version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
[[package]]
name = "uncased"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5baeed7327e25054889b9bd4f975f32e5f4c5d434042d59ab6cd4142c0a76ed0"
dependencies = [
"version_check",
]
[[package]] [[package]]
name = "unicode-xid" name = "unicode-xid"
@ -383,6 +421,12 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]] [[package]]
name = "wasi" name = "wasi"
version = "0.10.2+wasi-snapshot-preview1" version = "0.10.2+wasi-snapshot-preview1"
@ -410,3 +454,12 @@ name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0" version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "yaml-rust"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
"linked-hash-map",
]

View File

@ -1,15 +1,14 @@
[package] [package]
name = "emseries" name = "emseries"
version = "0.6.0" version = "0.5.1"
authors = ["Savanni D'Gerinel <savanni@luminescent-dreams.com>"] authors = ["Savanni D'Gerinel <savanni@luminescent-dreams.com>"]
description = "an Embedded Time Series database" description = "an Embedded Time Series database"
license = "GPL-3.0-only" license = "GPL-3.0-only"
# license-file = "../COPYING" license-file = "../COPYING"
documentation = "https://docs.rs/emseries" documentation = "https://docs.rs/emseries"
homepage = "https://github.com/luminescent-dreams/emseries" homepage = "https://github.com/luminescent-dreams/emseries"
repository = "https://github.com/luminescent-dreams/emseries" repository = "https://github.com/luminescent-dreams/emseries"
categories = ["database-implementations"] categories = ["database-implementations"]
edition = "2021"
include = [ include = [
"**/*.rs", "**/*.rs",
@ -19,13 +18,14 @@ include = [
[dependencies] [dependencies]
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
chrono-tz = { version = "0.8", features = ["serde"] } chrono-tz = { version = "0.6", features = ["serde"] }
dimensioned = { version = "0.7", features = ["serde"] }
serde = "1" serde = "1"
serde_derive = "1" serde_derive = "1"
serde_json = "1.0" serde_json = "1.0"
thiserror = "1.0" thiserror = "1.0"
uuid = { version = "0.8", features = ["v4", "serde"] } uuid = { version = "0.8", features = ["v4", "serde"] }
yaml-rust = "0.4"
[dev-dependencies] [dev-dependencies]
tempfile = "3.1" tempfile = "3.1"
dimensioned = { version = "0.7", features = ["serde"] }

9
emseries/Makefile Normal file
View File

@ -0,0 +1,9 @@
dev:
cargo watch -x build
test:
cargo watch -x test
test-once:
cargo test

View File

@ -0,0 +1,2 @@
{"data":{"weight":77.79109,"date":"2003-11-10T06:00:00.000000000000Z"},"id":"3330c5b0-783f-4919-b2c4-8169c38f65ff"}
{"data":{"weight":77.56429,"date":"2003-11-11T06:00:00.000000000000Z"},"id":"54c10502-030e-43d2-9ca6-df2f9a5a5ddf"}

View File

@ -10,7 +10,8 @@ Luminescent Dreams Tools is distributed in the hope that it will be useful, but
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>. You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
*/ */
use crate::types::{Recordable, Timestamp}; use date_time_tz::DateTimeTz;
use types::Recordable;
/// This trait is used for constructing queries for searching the database. /// This trait is used for constructing queries for searching the database.
pub trait Criteria { pub trait Criteria {
@ -44,7 +45,7 @@ pub struct Or<A: Criteria, B: Criteria> {
/// Specify the starting time for a search. This consists of a UTC timestamp and a specifier as to /// Specify the starting time for a search. This consists of a UTC timestamp and a specifier as to
/// whether the exact time is included in the search criteria. /// whether the exact time is included in the search criteria.
pub struct StartTime { pub struct StartTime {
pub time: Timestamp, pub time: DateTimeTz,
pub incl: bool, pub incl: bool,
} }
@ -61,7 +62,7 @@ impl Criteria for StartTime {
/// Specify the ending time for a search. This consists of a UTC timestamp and a specifier as to /// Specify the ending time for a search. This consists of a UTC timestamp and a specifier as to
/// whether the exact time is included in the search criteria. /// whether the exact time is included in the search criteria.
pub struct EndTime { pub struct EndTime {
pub time: Timestamp, pub time: DateTimeTz,
pub incl: bool, pub incl: bool,
} }
@ -88,7 +89,7 @@ impl Criteria for Tags {
} }
/// Specify a criteria that searches for records matching an exact time. /// Specify a criteria that searches for records matching an exact time.
pub fn exact_time(time: Timestamp) -> And<StartTime, EndTime> { pub fn exact_time(time: DateTimeTz) -> And<StartTime, EndTime> {
And { And {
lside: StartTime { lside: StartTime {
time: time.clone(), time: time.clone(),
@ -100,9 +101,9 @@ pub fn exact_time(time: Timestamp) -> And<StartTime, EndTime> {
/// Specify a criteria that searches for all records within a time range. /// Specify a criteria that searches for all records within a time range.
pub fn time_range( pub fn time_range(
start: Timestamp, start: DateTimeTz,
start_incl: bool, start_incl: bool,
end: Timestamp, end: DateTimeTz,
end_incl: bool, end_incl: bool,
) -> And<StartTime, EndTime> { ) -> And<StartTime, EndTime> {
And { And {

View File

@ -0,0 +1,166 @@
/*
Copyright 2020-2023, Savanni D'Gerinel <savanni@luminescent-dreams.com>
This file is part of the Luminescent Dreams Tools.
Luminescent Dreams Tools is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
Luminescent Dreams Tools is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
*/
extern crate chrono;
extern crate chrono_tz;
use chrono::SecondsFormat;
use chrono_tz::Etc::UTC;
use serde::de::{self, Deserialize, Deserializer, Visitor};
use serde::ser::{Serialize, Serializer};
use std::fmt;
/// This is a wrapper around date time objects, using timezones from the chroon-tz database and
/// providing string representation and parsing of the form "<RFC3339> <Timezone Name>", i.e.,
/// "2019-05-15T14:30:00Z US/Central". The to_string method, and serde serialization will
/// produce a string of this format. The parser will accept an RFC3339-only string of the forms
/// "2019-05-15T14:30:00Z", "2019-05-15T14:30:00+00:00", and also an "RFC3339 Timezone Name"
/// string.
///
/// The function here is to generate as close to unambiguous time/date strings, (for earth's
/// gravitational frame of reference), as possible. Clumping together the time, offset from UTC,
/// and the named time zone allows future parsers to know the exact interpretation of the time in
/// the frame of reference of the original recording.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct DateTimeTz(pub chrono::DateTime<chrono_tz::Tz>);
impl DateTimeTz {
pub fn map<F>(&self, f: F) -> DateTimeTz
where
F: FnOnce(chrono::DateTime<chrono_tz::Tz>) -> chrono::DateTime<chrono_tz::Tz>,
{
DateTimeTz(f(self.0))
}
pub fn to_string(&self) -> String {
if self.0.timezone() == UTC {
self.0.to_rfc3339_opts(SecondsFormat::Secs, true)
} else {
format!(
"{} {}",
self.0
.with_timezone(&chrono_tz::Etc::UTC)
.to_rfc3339_opts(SecondsFormat::Secs, true,),
self.0.timezone().name()
)
}
}
pub fn from_str(s: &str) -> Result<DateTimeTz, chrono::ParseError> {
let v: Vec<&str> = s.split_terminator(" ").collect();
if v.len() == 2 {
let tz = v[1].parse::<chrono_tz::Tz>().unwrap();
chrono::DateTime::parse_from_rfc3339(v[0]).map(|ts| DateTimeTz(ts.with_timezone(&tz)))
} else {
chrono::DateTime::parse_from_rfc3339(v[0]).map(|ts| DateTimeTz(ts.with_timezone(&UTC)))
}
}
}
struct DateTimeTzVisitor;
impl<'de> Visitor<'de> for DateTimeTzVisitor {
type Value = DateTimeTz;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string date time representation that can be parsed")
}
fn visit_str<E: de::Error>(self, s: &str) -> Result<Self::Value, E> {
DateTimeTz::from_str(s).or(Err(E::custom(format!(
"string is not a parsable datetime representation"
))))
}
}
impl Serialize for DateTimeTz {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for DateTimeTz {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
deserializer.deserialize_str(DateTimeTzVisitor)
}
}
#[cfg(test)]
mod test {
extern crate serde_json;
use chrono::TimeZone;
use chrono_tz::America::Phoenix;
use chrono_tz::Etc::UTC;
use chrono_tz::US::{Arizona, Central};
use date_time_tz::DateTimeTz;
#[test]
fn it_creates_timestamp_with_z() {
let t = DateTimeTz(UTC.ymd(2019, 5, 15).and_hms(12, 0, 0));
assert_eq!(t.to_string(), "2019-05-15T12:00:00Z");
}
#[test]
fn it_parses_utc_rfc3339_z() {
let t = DateTimeTz::from_str("2019-05-15T12:00:00Z").unwrap();
assert_eq!(t, DateTimeTz(UTC.ymd(2019, 5, 15).and_hms(12, 0, 0)));
}
#[test]
fn it_parses_rfc3339_with_offset() {
let t = DateTimeTz::from_str("2019-05-15T12:00:00-06:00").unwrap();
assert_eq!(t, DateTimeTz(UTC.ymd(2019, 5, 15).and_hms(18, 0, 0)));
}
#[test]
fn it_parses_rfc3339_with_tz() {
let t = DateTimeTz::from_str("2019-06-15T19:00:00Z US/Arizona").unwrap();
assert_eq!(t, DateTimeTz(UTC.ymd(2019, 6, 15).and_hms(19, 0, 0)));
assert_eq!(t, DateTimeTz(Arizona.ymd(2019, 6, 15).and_hms(12, 0, 0)));
assert_eq!(t, DateTimeTz(Central.ymd(2019, 6, 15).and_hms(14, 0, 0)));
assert_eq!(t.to_string(), "2019-06-15T19:00:00Z US/Arizona");
}
#[derive(Serialize)]
struct DemoStruct {
id: String,
dt: DateTimeTz,
}
// I used Arizona here specifically because large parts of Arizona do not honor DST, and so
// that adds in more ambiguity of the -0700 offset with Pacific time.
#[test]
fn it_json_serializes() {
let t = DateTimeTz::from_str("2019-06-15T19:00:00Z America/Phoenix").unwrap();
assert_eq!(
serde_json::to_string(&t).unwrap(),
"\"2019-06-15T19:00:00Z America/Phoenix\""
);
let demo = DemoStruct {
id: String::from("abcdefg"),
dt: t,
};
assert_eq!(
serde_json::to_string(&demo).unwrap(),
"{\"id\":\"abcdefg\",\"dt\":\"2019-06-15T19:00:00Z America/Phoenix\"}"
);
}
#[test]
fn it_json_parses() {
let t =
serde_json::from_str::<DateTimeTz>("\"2019-06-15T19:00:00Z America/Phoenix\"").unwrap();
assert_eq!(t, DateTimeTz(Phoenix.ymd(2019, 6, 15).and_hms(12, 0, 0)));
}
}

View File

@ -71,9 +71,11 @@ extern crate thiserror;
extern crate uuid; extern crate uuid;
mod criteria; mod criteria;
mod date_time_tz;
mod series; mod series;
mod types; mod types;
pub use criteria::*; pub use criteria::*;
pub use date_time_tz::DateTimeTz;
pub use series::Series; pub use series::Series;
pub use types::{EmseriesReadError, EmseriesWriteError, Record, RecordId, Recordable, Timestamp}; pub use types::{EmseriesReadError, EmseriesWriteError, Recordable, UniqueId};

View File

@ -10,55 +10,21 @@ Luminescent Dreams Tools is distributed in the hope that it will be useful, but
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>. You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
*/ */
extern crate serde;
extern crate serde_json;
extern crate uuid;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
use serde::ser::Serialize; use serde::ser::Serialize;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::TryFrom;
use std::fs::File; use std::fs::File;
use std::fs::OpenOptions; use std::fs::OpenOptions;
use std::io::{BufRead, BufReader, LineWriter, Write}; use std::io::{BufRead, BufReader, LineWriter, Write};
use std::iter::Iterator; use std::iter::Iterator;
use crate::criteria::Criteria; use criteria::Criteria;
use crate::types::{EmseriesReadError, EmseriesWriteError, Record, RecordId, Recordable}; use types::{EmseriesReadError, EmseriesWriteError, Record, Recordable, UniqueId};
// A RecordOnDisk, a private data structure, is useful for handling all of the on-disk
// representations of a record. Unlike [Record], this one can accept an empty data value to
// represent that the data may have been deleted. This is not made public because, so far as the
// user is concerned, any record in the system must have data associated with it.
#[derive(Clone, Deserialize, Serialize)]
struct RecordOnDisk<T: Clone + Recordable> {
id: RecordId,
data: Option<T>,
}
/*
impl<T> FromStr for RecordOnDisk<T>
where
T: Clone + Recordable + DeserializeOwned + Serialize,
{
type Err = EmseriesReadError;
fn from_str(line: &str) -> Result<Self, Self::Err> {
serde_json::from_str(line).map_err(EmseriesReadError::JSONParseError)
}
}
*/
impl<T: Clone + Recordable> TryFrom<RecordOnDisk<T>> for Record<T> {
type Error = EmseriesReadError;
fn try_from(disk_record: RecordOnDisk<T>) -> Result<Self, Self::Error> {
match disk_record.data {
Some(data) => Ok(Record {
id: disk_record.id,
data,
}),
None => Err(Self::Error::RecordDeleted(disk_record.id)),
}
}
}
/// An open time series database. /// An open time series database.
/// ///
@ -67,7 +33,7 @@ impl<T: Clone + Recordable> TryFrom<RecordOnDisk<T>> for Record<T> {
pub struct Series<T: Clone + Recordable + DeserializeOwned + Serialize> { pub struct Series<T: Clone + Recordable + DeserializeOwned + Serialize> {
//path: String, //path: String,
writer: LineWriter<File>, writer: LineWriter<File>,
records: HashMap<RecordId, Record<T>>, records: HashMap<UniqueId, T>,
} }
impl<T> Series<T> impl<T> Series<T>
@ -76,12 +42,12 @@ where
{ {
/// Open a time series database at the specified path. `path` is the full path and filename for /// Open a time series database at the specified path. `path` is the full path and filename for
/// the database. /// the database.
pub fn open<P: AsRef<std::path::Path>>(path: P) -> Result<Series<T>, EmseriesReadError> { pub fn open(path: &str) -> Result<Series<T>, EmseriesReadError> {
let f = OpenOptions::new() let f = OpenOptions::new()
.read(true) .read(true)
.append(true) .append(true)
.create(true) .create(true)
.open(path) .open(&path)
.map_err(EmseriesReadError::IOError)?; .map_err(EmseriesReadError::IOError)?;
let records = Series::load_file(&f)?; let records = Series::load_file(&f)?;
@ -96,18 +62,20 @@ where
} }
/// Load a file and return all of the records in it. /// Load a file and return all of the records in it.
fn load_file(f: &File) -> Result<HashMap<RecordId, Record<T>>, EmseriesReadError> { fn load_file(f: &File) -> Result<HashMap<UniqueId, T>, EmseriesReadError> {
let mut records: HashMap<RecordId, Record<T>> = HashMap::new(); let mut records: HashMap<UniqueId, T> = HashMap::new();
let reader = BufReader::new(f); let reader = BufReader::new(f);
for line in reader.lines() { for line in reader.lines() {
match line { match line {
Ok(line_) => { Ok(line_) => {
match serde_json::from_str::<RecordOnDisk<T>>(line_.as_ref()) /* Can't create a JSONParseError because I can't actually create the underlying error.
.map_err(EmseriesReadError::JSONParseError) fail_point!("parse-line", Err(Error::JSONParseError()))
.and_then(Record::try_from) */
{ match line_.parse::<Record<T>>() {
Ok(record) => records.insert(record.id, record.clone()), Ok(record) => match record.data {
Err(EmseriesReadError::RecordDeleted(id)) => records.remove(&id), Some(val) => records.insert(record.id.clone(), val),
None => records.remove(&record.id.clone()),
},
Err(err) => return Err(err), Err(err) => return Err(err),
}; };
} }
@ -119,20 +87,18 @@ where
/// Put a new record into the database. A unique id will be assigned to the record and /// Put a new record into the database. A unique id will be assigned to the record and
/// returned. /// returned.
pub fn put(&mut self, entry: T) -> Result<RecordId, EmseriesWriteError> { pub fn put(&mut self, entry: T) -> Result<UniqueId, EmseriesWriteError> {
let id = RecordId::default(); let uuid = UniqueId::new();
let record = Record { id, data: entry }; self.update(uuid.clone(), entry).and_then(|_| Ok(uuid))
self.update(record)?;
Ok(id)
} }
/// Update an existing record. The [RecordId] of the record passed into this function must match /// Update an existing record. The `UniqueId` of the record passed into this function must match
/// the [RecordId] of a record already in the database. /// the `UniqueId` of a record already in the database.
pub fn update(&mut self, record: Record<T>) -> Result<(), EmseriesWriteError> { pub fn update(&mut self, uuid: UniqueId, entry: T) -> Result<(), EmseriesWriteError> {
self.records.insert(record.id, record.clone()); self.records.insert(uuid.clone(), entry.clone());
let write_res = match serde_json::to_string(&RecordOnDisk { let write_res = match serde_json::to_string(&Record {
id: record.id, id: uuid,
data: Some(record.data), data: Some(entry),
}) { }) {
Ok(rec_str) => self Ok(rec_str) => self
.writer .writer
@ -152,14 +118,14 @@ where
/// Future note: while this deletes a record from the view, it only adds an entry to the /// Future note: while this deletes a record from the view, it only adds an entry to the
/// database that indicates `data: null`. If record histories ever become important, the record /// database that indicates `data: null`. If record histories ever become important, the record
/// and its entire history (including this delete) will still be available. /// and its entire history (including this delete) will still be available.
pub fn delete(&mut self, uuid: &RecordId) -> Result<(), EmseriesWriteError> { pub fn delete(&mut self, uuid: &UniqueId) -> Result<(), EmseriesWriteError> {
if !self.records.contains_key(uuid) { if !self.records.contains_key(uuid) {
return Ok(()); return Ok(());
}; };
self.records.remove(uuid); self.records.remove(uuid);
let rec: RecordOnDisk<T> = RecordOnDisk { let rec: Record<T> = Record {
id: *uuid, id: uuid.clone(),
data: None, data: None,
}; };
match serde_json::to_string(&rec) { match serde_json::to_string(&rec) {
@ -172,8 +138,8 @@ where
} }
/// Get all of the records in the database. /// Get all of the records in the database.
pub fn records(&self) -> impl Iterator<Item = &Record<T>> { pub fn records<'s>(&'s self) -> impl Iterator<Item = (&'s UniqueId, &'s T)> + 's {
self.records.values() self.records.iter()
} }
/* The point of having Search is so that a lot of internal optimizations can happen once the /* The point of having Search is so that a lot of internal optimizations can happen once the
@ -182,29 +148,29 @@ where
pub fn search<'s>( pub fn search<'s>(
&'s self, &'s self,
criteria: impl Criteria + 's, criteria: impl Criteria + 's,
) -> impl Iterator<Item = &'s Record<T>> + 's { ) -> impl Iterator<Item = (&'s UniqueId, &'s T)> + 's {
self.records().filter(move |&tr| criteria.apply(&tr.data)) self.records().filter(move |&tr| criteria.apply(tr.1))
} }
/// Perform a search and sort the resulting records based on the comparison. /// Perform a search and sort the resulting records based on the comparison.
pub fn search_sorted<'s, C, CMP>(&'s self, criteria: C, compare: CMP) -> Vec<&'s Record<T>> pub fn search_sorted<'s, C, CMP>(&'s self, criteria: C, compare: CMP) -> Vec<(&UniqueId, &T)>
where where
C: Criteria + 's, C: Criteria + 's,
CMP: FnMut(&&Record<T>, &&Record<T>) -> Ordering, CMP: FnMut(&(&UniqueId, &T), &(&UniqueId, &T)) -> Ordering,
{ {
let search_iter = self.search(criteria); let search_iter = self.search(criteria);
let mut records: Vec<&Record<T>> = search_iter.collect(); let mut records: Vec<(&UniqueId, &T)> = search_iter.collect();
records.sort_by(compare); records.sort_by(compare);
records records
} }
/// Get an exact record from the database based on unique id. /// Get an exact record from the database based on unique id.
pub fn get(&self, uuid: &RecordId) -> Option<Record<T>> { pub fn get(&self, uuid: &UniqueId) -> Option<T> {
self.records.get(uuid).cloned() self.records.get(uuid).map(|v| v.clone())
} }
/* /*
pub fn remove(&self, uuid: RecordId) -> Result<(), EmseriesError> { pub fn remove(&self, uuid: UniqueId) -> Result<(), EmseriesError> {
unimplemented!() unimplemented!()
} }
*/ */

View File

@ -10,8 +10,10 @@ Luminescent Dreams Tools is distributed in the hope that it will be useful, but
You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>. You should have received a copy of the GNU General Public License along with Lumeto. If not, see <https://www.gnu.org/licenses/>.
*/ */
use chrono::{DateTime, FixedOffset, NaiveDate}; use date_time_tz::DateTimeTz;
use std::{cmp::Ordering, fmt, io, str}; use serde::de::DeserializeOwned;
use serde::ser::Serialize;
use std::{fmt, io, str};
use thiserror::Error; use thiserror::Error;
use uuid::Uuid; use uuid::Uuid;
@ -25,9 +27,6 @@ pub enum EmseriesReadError {
#[error("Error parsing JSON: {0}")] #[error("Error parsing JSON: {0}")]
JSONParseError(serde_json::error::Error), JSONParseError(serde_json::error::Error),
#[error("Record was deleted")]
RecordDeleted(RecordId),
/// Indicates a general IO error /// Indicates a general IO error
#[error("IO Error: {0}")] #[error("IO Error: {0}")]
IOError(io::Error), IOError(io::Error),
@ -44,74 +43,11 @@ pub enum EmseriesWriteError {
JSONWriteError(serde_json::error::Error), JSONWriteError(serde_json::error::Error),
} }
#[derive(Debug, Clone, PartialEq, Eq)]
/// A Timestamp, stored with reference to human reckoning. This could be either a Naive Date or a
/// date and a time with a timezone. The idea of the "human reckoning" is that, no matter what
/// timezone the record was created in, we want to group things based on the date that the human
/// was perceiving at the time it was recorded.
pub enum Timestamp {
DateTime(DateTime<FixedOffset>),
Date(NaiveDate),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum TimestampJS {
DateTime(String),
Date(String),
}
impl From<Timestamp> for TimestampJS {
fn from(s: Timestamp) -> TimestampJS {
match s {
Timestamp::DateTime(ts) => TimestampJS::DateTime(ts.to_rfc3339()),
Timestamp::Date(ts) => TimestampJS::Date(ts.to_string()),
}
}
}
impl From<TimestampJS> for Timestamp {
fn from(s: TimestampJS) -> Timestamp {
match s {
TimestampJS::DateTime(ts) => {
Timestamp::DateTime(DateTime::parse_from_rfc3339(&ts).unwrap())
}
TimestampJS::Date(ts) => Timestamp::Date(ts.parse::<NaiveDate>().unwrap()),
}
}
}
impl str::FromStr for Timestamp {
type Err = chrono::ParseError;
fn from_str(line: &str) -> Result<Self, Self::Err> {
DateTime::parse_from_rfc3339(line)
.map(Timestamp::DateTime)
.or(NaiveDate::from_str(line).map(Timestamp::Date))
}
}
impl PartialOrd for Timestamp {
fn partial_cmp(&self, other: &Timestamp) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Timestamp {
fn cmp(&self, other: &Timestamp) -> Ordering {
match (self, other) {
(Timestamp::DateTime(dt1), Timestamp::DateTime(dt2)) => dt1.cmp(dt2),
(Timestamp::DateTime(dt1), Timestamp::Date(dt2)) => dt1.date_naive().cmp(dt2),
(Timestamp::Date(dt1), Timestamp::DateTime(dt2)) => dt1.cmp(&dt2.date_naive()),
(Timestamp::Date(dt1), Timestamp::Date(dt2)) => dt1.cmp(dt2),
}
}
}
/// Any element to be put into the database needs to be Recordable. This is the common API that /// Any element to be put into the database needs to be Recordable. This is the common API that
/// will aid in searching and later in indexing records. /// will aid in searching and later in indexing records.
pub trait Recordable { pub trait Recordable {
/// The timestamp for the record. /// The timestamp for the record.
fn timestamp(&self) -> Timestamp; fn timestamp(&self) -> DateTimeTz;
/// A list of string tags that can be used for indexing. This list defined per-type. /// A list of string tags that can be used for indexing. This list defined per-type.
fn tags(&self) -> Vec<String>; fn tags(&self) -> Vec<String>;
@ -120,88 +56,78 @@ pub trait Recordable {
/// Uniquely identifies a record. /// Uniquely identifies a record.
/// ///
/// This is a wrapper around a basic uuid with some extra convenience methods. /// This is a wrapper around a basic uuid with some extra convenience methods.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)] #[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct RecordId(Uuid); pub struct UniqueId(Uuid);
impl Default for RecordId { impl UniqueId {
fn default() -> Self { /// Create a new V4 UUID (this is the most common type in use these days).
Self(Uuid::new_v4()) pub fn new() -> UniqueId {
let id = Uuid::new_v4();
UniqueId(id)
} }
} }
impl str::FromStr for RecordId { impl str::FromStr for UniqueId {
type Err = EmseriesReadError; type Err = EmseriesReadError;
/// Parse a RecordId from a string. Raise UUIDParseError if the parsing fails. /// Parse a UniqueId from a string. Raise UUIDParseError if the parsing fails.
fn from_str(val: &str) -> Result<Self, Self::Err> { fn from_str(val: &str) -> Result<Self, Self::Err> {
Uuid::parse_str(val) Uuid::parse_str(val)
.map(RecordId) .map(UniqueId)
.map_err(EmseriesReadError::UUIDParseError) .map_err(|err| EmseriesReadError::UUIDParseError(err))
} }
} }
impl fmt::Display for RecordId { impl fmt::Display for UniqueId {
/// Convert to a hyphenated string /// Convert to a hyphenated string
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
write!(f, "{}", self.0.to_hyphenated()) write!(f, "{}", self.0.to_hyphenated().to_string())
} }
} }
/// A record represents data that actually exists in the database. Users cannot make the record /// Every record contains a unique ID and then the primary data, which itself must implementd the
/// directly, as the database will create them. /// Recordable trait.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] #[derive(Clone, Deserialize, Serialize)]
pub struct Record<T: Clone + Recordable> { pub struct Record<T: Clone + Recordable> {
pub id: RecordId, pub id: UniqueId,
pub data: T, pub data: Option<T>,
} }
impl<T: Clone + Recordable> Record<T> { impl<T> str::FromStr for Record<T>
pub fn date(&self) -> NaiveDate { where
match self.data.timestamp() { T: Clone + Recordable + DeserializeOwned + Serialize,
Timestamp::DateTime(dt) => dt.date_naive(), {
Timestamp::Date(dt) => dt, type Err = EmseriesReadError;
}
}
pub fn timestamp(&self) -> Timestamp { fn from_str(line: &str) -> Result<Self, Self::Err> {
self.data.timestamp() serde_json::from_str(&line).map_err(|err| EmseriesReadError::JSONParseError(err))
}
pub fn map<Map, U>(self, map: Map) -> Record<U>
where
Map: Fn(T) -> U,
U: Clone + Recordable,
{
Record {
id: self.id,
data: map(self.data),
}
} }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
extern crate dimensioned; extern crate dimensioned;
extern crate serde_json; extern crate serde_json;
use self::dimensioned::si::{Kilogram, KG}; use self::dimensioned::si::{Kilogram, KG};
use super::*; use super::{Record, Recordable};
use chrono::TimeZone; use chrono::TimeZone;
use chrono_tz::Etc::UTC; use chrono_tz::Etc::UTC;
use chrono_tz::US::Central;
use date_time_tz::DateTimeTz;
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] #[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub struct Weight(Kilogram<f64>); pub struct Weight(Kilogram<f64>);
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] #[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub struct WeightRecord { pub struct WeightRecord {
pub date: NaiveDate, pub date: DateTimeTz,
pub weight: Weight, pub weight: Weight,
} }
impl Recordable for WeightRecord { impl Recordable for WeightRecord {
fn timestamp(&self) -> Timestamp { fn timestamp(&self) -> DateTimeTz {
Timestamp::Date(self.date) self.date.clone()
} }
fn tags(&self) -> Vec<String> { fn tags(&self) -> Vec<String> {
@ -209,31 +135,10 @@ mod test {
} }
} }
#[test] const WEIGHT_ENTRY: &str = "{\"data\":{\"weight\":77.79109,\"date\":\"2003-11-10T06:00:00.000000000000Z\"},\"id\":\"3330c5b0-783f-4919-b2c4-8169c38f65ff\"}";
fn timestamp_parses_utc_time() {
assert_eq!(
"2003-11-10T06:00:00Z".parse::<Timestamp>().unwrap(),
Timestamp::DateTime(
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
),
);
}
#[test] #[test]
fn timestamp_parses_date() { pub fn legacy_deserialization() {
assert_eq!(
"2023-11-10".parse::<Timestamp>().unwrap(),
Timestamp::Date(NaiveDate::from_ymd_opt(2023, 11, 10).unwrap())
);
}
/*
#[ignore]
fn v_alpha_serialization() {
const WEIGHT_ENTRY: &str = "{\"data\":{\"weight\":77.79109},\"date\":\"2003-11-10\",\"id\":\"3330c5b0-783f-4919-b2c4-8169c38f65ff\"}";
let rec: Record<WeightRecord> = WEIGHT_ENTRY let rec: Record<WeightRecord> = WEIGHT_ENTRY
.parse() .parse()
.expect("should successfully parse the record"); .expect("should successfully parse the record");
@ -243,65 +148,31 @@ mod test {
); );
assert_eq!( assert_eq!(
rec.data, rec.data,
WeightRecord { Some(WeightRecord {
date: NaiveDate::from_ymd_opt(2003, 11, 10).unwrap(), date: DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0)),
weight: Weight(77.79109 * KG), weight: Weight(77.79109 * KG),
} })
); );
} }
*/
#[test] #[test]
fn serialization_output() { pub fn serialization_output() {
let rec = WeightRecord { let rec = WeightRecord {
date: NaiveDate::from_ymd_opt(2003, 11, 10).unwrap(), date: DateTimeTz(UTC.ymd(2003, 11, 10).and_hms(6, 0, 0)),
weight: Weight(77.0 * KG), weight: Weight(77.0 * KG),
}; };
assert_eq!( assert_eq!(
serde_json::to_string(&rec).unwrap(), serde_json::to_string(&rec).unwrap(),
"{\"date\":\"2003-11-10\",\"weight\":77.0}" "{\"date\":\"2003-11-10T06:00:00Z\",\"weight\":77.0}"
); );
let rec2 = WeightRecord { let rec2 = WeightRecord {
date: NaiveDate::from_ymd_opt(2003, 11, 10).unwrap(), date: DateTimeTz(Central.ymd(2003, 11, 10).and_hms(0, 0, 0)),
weight: Weight(77.0 * KG), weight: Weight(77.0 * KG),
}; };
assert_eq!( assert_eq!(
serde_json::to_string(&rec2).unwrap(), serde_json::to_string(&rec2).unwrap(),
"{\"date\":\"2003-11-10\",\"weight\":77.0}" "{\"date\":\"2003-11-10T06:00:00Z US/Central\",\"weight\":77.0}"
); );
} }
#[test]
fn two_datetimes_can_be_compared() {
let time1 = Timestamp::DateTime(
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
);
let time2 = Timestamp::DateTime(
UTC.with_ymd_and_hms(2003, 11, 11, 6, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
);
assert!(time1 < time2);
}
#[test]
fn two_dates_can_be_compared() {
let time1: Timestamp = Timestamp::Date(NaiveDate::from_ymd_opt(2003, 11, 10).unwrap());
let time2: Timestamp = Timestamp::Date(NaiveDate::from_ymd_opt(2003, 11, 11).unwrap());
assert!(time1 < time2);
}
#[test]
fn datetime_and_date_can_be_compared() {
let time1 = Timestamp::DateTime(
UTC.with_ymd_and_hms(2003, 11, 10, 6, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
);
let time2 = Timestamp::Date(NaiveDate::from_ymd_opt(2003, 11, 11).unwrap());
assert!(time1 < time2)
}
} }

View File

@ -20,9 +20,9 @@ extern crate emseries;
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use chrono::{prelude::*}; use chrono::prelude::*;
use chrono_tz::Etc::UTC; use chrono_tz::Etc::UTC;
use dimensioned::si::{Kilogram, Meter, Second, M, S}; use dimensioned::si::{Kilogram, Meter, Second, KG, M, S};
use emseries::*; use emseries::*;
@ -34,15 +34,15 @@ mod test {
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] #[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
struct BikeTrip { struct BikeTrip {
datetime: DateTime<FixedOffset>, datetime: DateTimeTz,
distance: Distance, distance: Distance,
duration: Duration, duration: Duration,
comments: String, comments: String,
} }
impl Recordable for BikeTrip { impl Recordable for BikeTrip {
fn timestamp(&self) -> Timestamp { fn timestamp(&self) -> DateTimeTz {
Timestamp::DateTime(self.datetime) self.datetime.clone()
} }
fn tags(&self) -> Vec<String> { fn tags(&self) -> Vec<String> {
Vec::new() Vec::new()
@ -52,46 +52,31 @@ mod test {
fn mk_trips() -> [BikeTrip; 5] { fn mk_trips() -> [BikeTrip; 5] {
[ [
BikeTrip { BikeTrip {
datetime: UTC datetime: DateTimeTz(UTC.ymd(2011, 10, 29).and_hms(0, 0, 0)),
.with_ymd_and_hms(2011, 10, 29, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
distance: Distance(58741.055 * M), distance: Distance(58741.055 * M),
duration: Duration(11040.0 * S), duration: Duration(11040.0 * S),
comments: String::from("long time ago"), comments: String::from("long time ago"),
}, },
BikeTrip { BikeTrip {
datetime: UTC datetime: DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)),
.with_ymd_and_hms(2011, 10, 31, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
distance: Distance(17702.0 * M), distance: Distance(17702.0 * M),
duration: Duration(2880.0 * S), duration: Duration(2880.0 * S),
comments: String::from("day 2"), comments: String::from("day 2"),
}, },
BikeTrip { BikeTrip {
datetime: UTC datetime: DateTimeTz(UTC.ymd(2011, 11, 02).and_hms(0, 0, 0)),
.with_ymd_and_hms(2011, 11, 02, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
distance: Distance(41842.945 * M), distance: Distance(41842.945 * M),
duration: Duration(7020.0 * S), duration: Duration(7020.0 * S),
comments: String::from("Do Some Distance!"), comments: String::from("Do Some Distance!"),
}, },
BikeTrip { BikeTrip {
datetime: UTC datetime: DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)),
.with_ymd_and_hms(2011, 11, 04, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
distance: Distance(34600.895 * M), distance: Distance(34600.895 * M),
duration: Duration(5580.0 * S), duration: Duration(5580.0 * S),
comments: String::from("I did a lot of distance back then"), comments: String::from("I did a lot of distance back then"),
}, },
BikeTrip { BikeTrip {
datetime: UTC datetime: DateTimeTz(UTC.ymd(2011, 11, 05).and_hms(0, 0, 0)),
.with_ymd_and_hms(2011, 11, 05, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
distance: Distance(6437.376 * M), distance: Distance(6437.376 * M),
duration: Duration(960.0 * S), duration: Duration(960.0 * S),
comments: String::from("day 5"), comments: String::from("day 5"),
@ -99,7 +84,7 @@ mod test {
] ]
} }
fn run_test<T>(test: T) fn run_test<T>(test: T) -> ()
where where
T: FnOnce(tempfile::TempPath), T: FnOnce(tempfile::TempPath),
{ {
@ -108,14 +93,14 @@ mod test {
test(tmp_path); test(tmp_path);
} }
fn run<T>(test: T) fn run<T>(test: T) -> ()
where where
T: FnOnce(Series<BikeTrip>), T: FnOnce(Series<BikeTrip>),
{ {
let tmp_file = tempfile::NamedTempFile::new().expect("temporary path created"); let tmp_file = tempfile::NamedTempFile::new().expect("temporary path created");
let tmp_path = tmp_file.into_temp_path(); let tmp_path = tmp_file.into_temp_path();
let ts: Series<BikeTrip> = let ts: Series<BikeTrip> = Series::open(&tmp_path.to_string_lossy())
Series::open(&tmp_path).expect("the time series should open correctly"); .expect("the time series should open correctly");
test(ts); test(ts);
} }
@ -137,15 +122,11 @@ mod test {
Some(tr) => { Some(tr) => {
assert_eq!( assert_eq!(
tr.timestamp(), tr.timestamp(),
Timestamp::DateTime( DateTimeTz(UTC.ymd(2011, 10, 29).and_hms(0, 0, 0))
UTC.with_ymd_and_hms(2011, 10, 29, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
)
); );
assert_eq!(tr.data.duration, Duration(11040.0 * S)); assert_eq!(tr.duration, Duration(11040.0 * S));
assert_eq!(tr.data.comments, String::from("long time ago")); assert_eq!(tr.comments, String::from("long time ago"));
assert_eq!(tr.data, trips[0]); assert_eq!(tr, trips[0]);
} }
} }
}) })
@ -155,22 +136,20 @@ mod test {
pub fn can_search_for_an_entry_with_exact_time() { pub fn can_search_for_an_entry_with_exact_time() {
run_test(|path| { run_test(|path| {
let trips = mk_trips(); let trips = mk_trips();
let mut ts: Series<BikeTrip> = let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
for trip in &trips[0..=4] { for trip in &trips[0..=4] {
ts.put(trip.clone()).expect("expect a successful put"); ts.put(trip.clone()).expect("expect a successful put");
} }
let v: Vec<&Record<BikeTrip>> = ts let v: Vec<(&UniqueId, &BikeTrip)> = ts
.search(exact_time(Timestamp::DateTime( .search(exact_time(DateTimeTz(
UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0) UTC.ymd(2011, 10, 31).and_hms(0, 0, 0),
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
))) )))
.collect(); .collect();
assert_eq!(v.len(), 1); assert_eq!(v.len(), 1);
assert_eq!(v[0].data, trips[1]); assert_eq!(*v[0].1, trips[1]);
}) })
} }
@ -178,34 +157,26 @@ mod test {
pub fn can_get_entries_in_time_range() { pub fn can_get_entries_in_time_range() {
run_test(|path| { run_test(|path| {
let trips = mk_trips(); let trips = mk_trips();
let mut ts: Series<BikeTrip> = let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
for trip in &trips[0..=4] { for trip in &trips[0..=4] {
ts.put(trip.clone()).expect("expect a successful put"); ts.put(trip.clone()).expect("expect a successful put");
} }
let v: Vec<&Record<BikeTrip>> = ts.search_sorted( let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
time_range( time_range(
Timestamp::DateTime( DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)),
UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
),
true, true,
Timestamp::DateTime( DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)),
UTC.with_ymd_and_hms(2011, 11, 04, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
),
true, true,
), ),
|l, r| l.timestamp().cmp(&r.timestamp()), |l, r| l.1.timestamp().cmp(&r.1.timestamp()),
); );
assert_eq!(v.len(), 3); assert_eq!(v.len(), 3);
assert_eq!(v[0].data, trips[1]); assert_eq!(*v[0].1, trips[1]);
assert_eq!(v[1].data, trips[2]); assert_eq!(*v[1].1, trips[2]);
assert_eq!(v[2].data, trips[3]); assert_eq!(*v[2].1, trips[3]);
}) })
} }
@ -215,8 +186,8 @@ mod test {
let trips = mk_trips(); let trips = mk_trips();
{ {
let mut ts: Series<BikeTrip> = let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
for trip in &trips[0..=4] { for trip in &trips[0..=4] {
ts.put(trip.clone()).expect("expect a successful put"); ts.put(trip.clone()).expect("expect a successful put");
@ -224,29 +195,21 @@ mod test {
} }
{ {
let ts: Series<BikeTrip> = let ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
let v: Vec<&Record<BikeTrip>> = ts.search_sorted( let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
time_range( time_range(
Timestamp::DateTime( DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)),
UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
),
true, true,
Timestamp::DateTime( DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)),
UTC.with_ymd_and_hms(2011, 11, 04, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
),
true, true,
), ),
|l, r| l.timestamp().cmp(&r.timestamp()), |l, r| l.1.timestamp().cmp(&r.1.timestamp()),
); );
assert_eq!(v.len(), 3); assert_eq!(v.len(), 3);
assert_eq!(v[0].data, trips[1]); assert_eq!(*v[0].1, trips[1]);
assert_eq!(v[1].data, trips[2]); assert_eq!(*v[1].1, trips[2]);
assert_eq!(v[2].data, trips[3]); assert_eq!(*v[2].1, trips[3]);
} }
}) })
} }
@ -257,8 +220,8 @@ mod test {
let trips = mk_trips(); let trips = mk_trips();
{ {
let mut ts: Series<BikeTrip> = let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
for trip in &trips[0..=2] { for trip in &trips[0..=2] {
ts.put(trip.clone()).expect("expect a successful put"); ts.put(trip.clone()).expect("expect a successful put");
@ -266,57 +229,41 @@ mod test {
} }
{ {
let mut ts: Series<BikeTrip> = let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
let v: Vec<&Record<BikeTrip>> = ts.search_sorted( let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
time_range( time_range(
Timestamp::DateTime( DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)),
UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
),
true, true,
Timestamp::DateTime( DateTimeTz(UTC.ymd(2011, 11, 04).and_hms(0, 0, 0)),
UTC.with_ymd_and_hms(2011, 11, 04, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
),
true, true,
), ),
|l, r| l.timestamp().cmp(&r.timestamp()), |l, r| l.1.timestamp().cmp(&r.1.timestamp()),
); );
assert_eq!(v.len(), 2); assert_eq!(v.len(), 2);
assert_eq!(v[0].data, trips[1]); assert_eq!(*v[0].1, trips[1]);
assert_eq!(v[1].data, trips[2]); assert_eq!(*v[1].1, trips[2]);
ts.put(trips[3].clone()).expect("expect a successful put"); ts.put(trips[3].clone()).expect("expect a successful put");
ts.put(trips[4].clone()).expect("expect a successful put"); ts.put(trips[4].clone()).expect("expect a successful put");
} }
{ {
let ts: Series<BikeTrip> = let ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
let v: Vec<&Record<BikeTrip>> = ts.search_sorted( let v: Vec<(&UniqueId, &BikeTrip)> = ts.search_sorted(
time_range( time_range(
Timestamp::DateTime( DateTimeTz(UTC.ymd(2011, 10, 31).and_hms(0, 0, 0)),
UTC.with_ymd_and_hms(2011, 10, 31, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
),
true, true,
Timestamp::DateTime( DateTimeTz(UTC.ymd(2011, 11, 05).and_hms(0, 0, 0)),
UTC.with_ymd_and_hms(2011, 11, 05, 0, 0, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
),
true, true,
), ),
|l, r| l.timestamp().cmp(&r.timestamp()), |l, r| l.1.timestamp().cmp(&r.1.timestamp()),
); );
assert_eq!(v.len(), 4); assert_eq!(v.len(), 4);
assert_eq!(v[0].data, trips[1]); assert_eq!(*v[0].1, trips[1]);
assert_eq!(v[1].data, trips[2]); assert_eq!(*v[1].1, trips[2]);
assert_eq!(v[2].data, trips[3]); assert_eq!(*v[2].1, trips[3]);
assert_eq!(v[3].data, trips[4]); assert_eq!(*v[3].1, trips[4]);
} }
}) })
} }
@ -326,8 +273,8 @@ mod test {
run_test(|path| { run_test(|path| {
let trips = mk_trips(); let trips = mk_trips();
let mut ts: Series<BikeTrip> = let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
ts.put(trips[0].clone()).expect("expect a successful put"); ts.put(trips[0].clone()).expect("expect a successful put");
ts.put(trips[1].clone()).expect("expect a successful put"); ts.put(trips[1].clone()).expect("expect a successful put");
@ -336,8 +283,9 @@ mod test {
match ts.get(&trip_id) { match ts.get(&trip_id) {
None => assert!(false, "record not found"), None => assert!(false, "record not found"),
Some(mut trip) => { Some(mut trip) => {
trip.data.distance = Distance(50000.0 * M); trip.distance = Distance(50000.0 * M);
ts.update(trip).expect("expect record to update"); ts.update(trip_id.clone(), trip)
.expect("expect record to update");
} }
}; };
@ -345,12 +293,12 @@ mod test {
None => assert!(false, "record not found"), None => assert!(false, "record not found"),
Some(trip) => { Some(trip) => {
assert_eq!( assert_eq!(
trip.data.datetime, trip.datetime,
UTC.with_ymd_and_hms(2011, 11, 02, 0, 0, 0).unwrap() DateTimeTz(UTC.ymd(2011, 11, 02).and_hms(0, 0, 0))
); );
assert_eq!(trip.data.distance, Distance(50000.0 * M)); assert_eq!(trip.distance, Distance(50000.0 * M));
assert_eq!(trip.data.duration, Duration(7020.0 * S)); assert_eq!(trip.duration, Duration(7020.0 * S));
assert_eq!(trip.data.comments, String::from("Do Some Distance!")); assert_eq!(trip.comments, String::from("Do Some Distance!"));
} }
} }
}) })
@ -362,8 +310,8 @@ mod test {
let trips = mk_trips(); let trips = mk_trips();
{ {
let mut ts: Series<BikeTrip> = let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
ts.put(trips[0].clone()).expect("expect a successful put"); ts.put(trips[0].clone()).expect("expect a successful put");
ts.put(trips[1].clone()).expect("expect a successful put"); ts.put(trips[1].clone()).expect("expect a successful put");
@ -372,36 +320,32 @@ mod test {
match ts.get(&trip_id) { match ts.get(&trip_id) {
None => assert!(false, "record not found"), None => assert!(false, "record not found"),
Some(mut trip) => { Some(mut trip) => {
trip.data.distance = Distance(50000.0 * M); trip.distance = Distance(50000.0 * M);
ts.update(trip).expect("expect record to update"); ts.update(trip_id, trip).expect("expect record to update");
} }
}; };
} }
{ {
let ts: Series<BikeTrip> = let ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
let trips: Vec<&Record<BikeTrip>> = ts.records().collect(); let trips: Vec<(&UniqueId, &BikeTrip)> = ts.records().collect();
assert_eq!(trips.len(), 3); assert_eq!(trips.len(), 3);
let trips: Vec<&Record<BikeTrip>> = ts let trips: Vec<(&UniqueId, &BikeTrip)> = ts
.search(exact_time(Timestamp::DateTime( .search(exact_time(DateTimeTz(
UTC.with_ymd_and_hms(2011, 11, 02, 0, 0, 0) UTC.ymd(2011, 11, 02).and_hms(0, 0, 0),
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
))) )))
.collect(); .collect();
assert_eq!(trips.len(), 1); assert_eq!(trips.len(), 1);
assert_eq!( assert_eq!(
trips[0].data.datetime, trips[0].1.datetime,
UTC.with_ymd_and_hms(2011, 11, 02, 0, 0, 0) DateTimeTz(UTC.ymd(2011, 11, 02).and_hms(0, 0, 0))
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
); );
assert_eq!(trips[0].data.distance, Distance(50000.0 * M)); assert_eq!(trips[0].1.distance, Distance(50000.0 * M));
assert_eq!(trips[0].data.duration, Duration(7020.0 * S)); assert_eq!(trips[0].1.duration, Duration(7020.0 * S));
assert_eq!(trips[0].data.comments, String::from("Do Some Distance!")); assert_eq!(trips[0].1.comments, String::from("Do Some Distance!"));
} }
}) })
} }
@ -412,21 +356,22 @@ mod test {
let trips = mk_trips(); let trips = mk_trips();
{ {
let mut ts: Series<BikeTrip> = let mut ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
let trip_id = ts.put(trips[0].clone()).expect("expect a successful put"); let trip_id = ts.put(trips[0].clone()).expect("expect a successful put");
ts.put(trips[1].clone()).expect("expect a successful put"); ts.put(trips[1].clone()).expect("expect a successful put");
ts.put(trips[2].clone()).expect("expect a successful put"); ts.put(trips[2].clone()).expect("expect a successful put");
ts.delete(&trip_id).expect("successful delete"); ts.delete(&trip_id).expect("successful delete");
let recs: Vec<&Record<BikeTrip>> = ts.records().collect(); let recs: Vec<(&UniqueId, &BikeTrip)> = ts.records().collect();
assert_eq!(recs.len(), 2); assert_eq!(recs.len(), 2);
} }
{ {
let ts: Series<BikeTrip> = let ts: Series<BikeTrip> = Series::open(&path.to_string_lossy())
Series::open(&path).expect("expect the time series to open correctly"); .expect("expect the time series to open correctly");
let recs: Vec<&Record<BikeTrip>> = ts.records().collect(); let recs: Vec<(&UniqueId, &BikeTrip)> = ts.records().collect();
assert_eq!(recs.len(), 2); assert_eq!(recs.len(), 2);
} }
}) })
@ -437,13 +382,13 @@ mod test {
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] #[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub struct WeightRecord { pub struct WeightRecord {
pub date: chrono::NaiveDate, pub date: DateTimeTz,
pub weight: Weight, pub weight: Weight,
} }
impl Recordable for WeightRecord { impl Recordable for WeightRecord {
fn timestamp(&self) -> Timestamp { fn timestamp(&self) -> DateTimeTz {
Timestamp::Date(self.date) self.date.clone()
} }
fn tags(&self) -> Vec<String> { fn tags(&self) -> Vec<String> {
@ -451,7 +396,6 @@ mod test {
} }
} }
/*
#[test] #[test]
pub fn legacy_file_load() { pub fn legacy_file_load() {
let ts: Series<WeightRecord> = let ts: Series<WeightRecord> =
@ -466,5 +410,4 @@ mod test {
Some(rec) => assert_eq!(rec.weight, Weight(77.79109 * KG)), Some(rec) => assert_eq!(rec.weight, Weight(77.79109 * KG)),
} }
} }
*/
} }

View File

@ -1 +0,0 @@
fixtures

View File

@ -1,2 +0,0 @@
fixtures
var

View File

@ -1,47 +0,0 @@
[package]
name = "file-service"
version = "0.2.0"
authors = ["savanni@luminescent-dreams.com"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "file_service"
path = "src/lib.rs"
[[bin]]
name = "file-service"
path = "src/main.rs"
[target.auth-cli.dependencies]
[dependencies]
authdb = { path = "../authdb/" }
base64ct = { version = "1", features = [ "alloc" ] }
build_html = { version = "2" }
bytes = { version = "1" }
chrono = { version = "0.4", features = ["serde"] }
clap = { version = "4", features = [ "derive" ] }
cookie = { version = "0.17" }
futures-util = { version = "0.3" }
hex-string = "0.1.0"
http = { version = "0.2" }
image = "0.23.5"
logger = "*"
log = { version = "0.4" }
mime = "0.3.16"
mime_guess = "2.0.3"
pretty_env_logger = { version = "0.5" }
serde_json = "*"
serde = { version = "1.0", features = ["derive"] }
sha2 = { version = "0.10" }
thiserror = { version = "1" }
tokio = { version = "1", features = [ "full" ] }
uuid = { version = "0.4", features = [ "serde", "v4" ] }
warp = { version = "0.3" }
[dev-dependencies]
cool_asserts = { version = "2" }
tempdir = { version = "0.3" }

View File

@ -1,11 +0,0 @@
version: '3'
tasks:
build:
cmds:
- cargo build
lint:
cmds:
- cargo watch -x clippy

View File

@ -1 +0,0 @@
[{"jti":"ac3a46c6-3fa1-4d0a-af12-e7d3fefdc878","aud":"savanni","exp":1621351436,"iss":"savanni","iat":1589729036,"sub":"https://savanni.luminescent-dreams.com/file-service/","perms":["admin"]}]

View File

@ -1,13 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
VERSION=`cat Cargo.toml | grep "^version =" | sed -r 's/^version = "(.+)"$/\1/'`
mkdir -p dist
cp ../target/release/file-service dist
cp ../target/release/auth-cli dist
strip dist/file-service
strip dist/auth-cli
tar -czf file-service-${VERSION}.tgz dist/

Binary file not shown.

Before

(image error) Size: 23 KiB

View File

@ -1,279 +0,0 @@
use build_html::Html;
use bytes::Buf;
use file_service::WriteFileError;
use futures_util::StreamExt;
use http::{Error, StatusCode};
use std::collections::HashMap;
use std::io::Read;
use warp::{filters::multipart::FormData, http::Response, multipart::Part};
use crate::{pages, App, AuthToken, FileId, FileInfo, ReadFileError, SessionToken};
const CSS: &str = include_str!("../templates/style.css");
pub async fn handle_index(
app: App,
token: Option<SessionToken>,
) -> Result<Response<String>, Error> {
match token {
Some(token) => match app.validate_session(token).await {
Ok(_) => render_gallery_page(app).await,
Err(err) => render_auth_page(Some(format!("session expired: {:?}", err))),
},
None => render_auth_page(None),
}
}
pub async fn handle_css() -> Result<Response<String>, Error> {
Response::builder()
.header("content-type", "text/css")
.status(StatusCode::OK)
.body(CSS.to_owned())
}
pub fn render_auth_page(message: Option<String>) -> Result<Response<String>, Error> {
Response::builder()
.status(StatusCode::OK)
.body(pages::auth(message).to_html_string())
}
pub async fn render_gallery_page(app: App) -> Result<Response<String>, Error> {
match app.list_files().await {
Ok(ids) => {
let mut files = vec![];
for id in ids.into_iter() {
let file = app.get_file(&id).await;
files.push(file);
}
Response::builder()
.header("content-type", "text/html")
.status(StatusCode::OK)
.body(pages::gallery(files).to_html_string())
}
Err(_) => Response::builder()
.header("content-type", "text/html")
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body("".to_owned()),
}
}
pub async fn thumbnail(
app: App,
id: String,
old_etags: Option<String>,
) -> Result<Response<Vec<u8>>, Error> {
match app.get_file(&FileId::from(id)).await {
Ok(file) => serve_file(file.info.clone(), || file.thumbnail(), old_etags),
Err(_err) => Response::builder()
.status(StatusCode::NOT_FOUND)
.body(vec![]),
}
}
pub async fn file(
app: App,
id: String,
old_etags: Option<String>,
) -> Result<Response<Vec<u8>>, Error> {
match app.get_file(&FileId::from(id)).await {
Ok(file) => serve_file(file.info.clone(), || file.content(), old_etags),
Err(_err) => Response::builder()
.status(StatusCode::NOT_FOUND)
.body(vec![]),
}
}
pub async fn handle_auth(
app: App,
form: HashMap<String, String>,
) -> Result<http::Response<String>, Error> {
match form.get("password") {
Some(token) => match app.authenticate(AuthToken::from(token.clone())).await {
Ok(Some(session_token)) => Response::builder()
.header("location", "/")
.header(
"set-cookie",
format!(
"session={}; Secure; HttpOnly; SameSite=Strict",
*session_token
),
)
.status(StatusCode::SEE_OTHER)
.body("".to_owned()),
Ok(None) => render_auth_page(Some("no user found".to_owned())),
Err(_) => render_auth_page(Some("invalid auth token".to_owned())),
},
None => render_auth_page(Some("no token available".to_owned())),
}
}
pub async fn handle_upload(
app: App,
token: SessionToken,
form: FormData,
) -> Result<http::Response<String>, Error> {
match app.validate_session(token).await {
Ok(Some(_)) => match process_file_upload(app, form).await {
Ok(_) => Response::builder()
.header("location", "/")
.status(StatusCode::SEE_OTHER)
.body("".to_owned()),
Err(UploadError::FilenameMissing) => Response::builder()
.status(StatusCode::BAD_REQUEST)
.body("filename is required for all files".to_owned()),
Err(UploadError::WriteFileError(err)) => Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(format!("could not write to the file system: {:?}", err)),
Err(UploadError::WarpError(err)) => Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(format!("error with the app framework: {:?}", err)),
},
_ => Response::builder()
.status(StatusCode::UNAUTHORIZED)
.body("".to_owned()),
}
}
pub async fn handle_delete(
app: App,
token: SessionToken,
id: FileId,
) -> Result<http::Response<String>, Error> {
match app.validate_session(token).await {
Ok(Some(_)) => match app.delete_file(id).await {
Ok(_) => Response::builder()
.header("location", "/")
.status(StatusCode::SEE_OTHER)
.body("".to_owned()),
Err(_) => unimplemented!(),
},
_ => Response::builder()
.status(StatusCode::UNAUTHORIZED)
.body("".to_owned()),
}
}
fn serve_file<F>(
info: FileInfo,
file: F,
old_etags: Option<String>,
) -> http::Result<http::Response<Vec<u8>>>
where
F: FnOnce() -> Result<Vec<u8>, ReadFileError>,
{
match old_etags {
Some(old_etags) if old_etags != info.hash => Response::builder()
.header("content-type", info.file_type)
.status(StatusCode::NOT_MODIFIED)
.body(vec![]),
_ => match file() {
Ok(content) => Response::builder()
.header("content-type", info.file_type)
.header("etag", info.hash)
.status(StatusCode::OK)
.body(content),
Err(_) => Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(vec![]),
},
}
}
async fn collect_multipart(
mut stream: warp::filters::multipart::FormData,
) -> Result<Vec<(Option<String>, Option<String>, Vec<u8>)>, warp::Error> {
let mut content: Vec<(Option<String>, Option<String>, Vec<u8>)> = Vec::new();
while let Some(part) = stream.next().await {
match part {
Ok(part) => content.push(collect_content(part).await.unwrap()),
Err(err) => return Err(err),
}
}
Ok(content)
}
async fn collect_content(
mut part: Part,
) -> Result<(Option<String>, Option<String>, Vec<u8>), String> {
let mut content: Vec<u8> = Vec::new();
while let Some(Ok(data)) = part.data().await {
let mut reader = data.reader();
reader.read_to_end(&mut content).unwrap();
}
Ok((
part.content_type().map(|s| s.to_owned()),
part.filename().map(|s| s.to_owned()),
content,
))
}
/*
async fn handle_upload(
form: warp::filters::multipart::FormData,
app: App,
) -> warp::http::Result<warp::http::Response<String>> {
let files = collect_multipart(form).await;
match files {
Ok(files) => {
for (_, filename, content) in files {
match filename {
Some(filename) => {
app.add_file(filename, content).unwrap();
}
None => {
return warp::http::Response::builder()
.status(StatusCode::BAD_REQUEST)
.body("".to_owned())
}
}
}
}
Err(_err) => {
return warp::http::Response::builder()
.status(StatusCode::BAD_REQUEST)
.body("".to_owned())
}
}
// println!("file length: {:?}", files.map(|f| f.len()));
warp::http::Response::builder()
.header("location", "/")
.status(StatusCode::SEE_OTHER)
.body("".to_owned())
}
*/
enum UploadError {
FilenameMissing,
WriteFileError(WriteFileError),
WarpError(warp::Error),
}
impl From<WriteFileError> for UploadError {
fn from(err: WriteFileError) -> Self {
Self::WriteFileError(err)
}
}
impl From<warp::Error> for UploadError {
fn from(err: warp::Error) -> Self {
Self::WarpError(err)
}
}
async fn process_file_upload(app: App, form: FormData) -> Result<(), UploadError> {
let files = collect_multipart(form).await?;
for (_, filename, content) in files {
match filename {
Some(filename) => {
app.add_file(filename, content).await?;
}
None => return Err(UploadError::FilenameMissing),
}
}
Ok(())
}

View File

@ -1,211 +0,0 @@
use std::fmt::Display;
use build_html::{self, Html, HtmlContainer};
#[derive(Clone, Debug, Default)]
pub struct Attributes(Vec<(String, String)>);
/*
impl FromIterator<(String, String)> for Attributes {
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = (String, String)>,
{
Attributes(iter.collect::<Vec<(String, String)>>())
}
}
impl FromIterator<(&str, &str)> for Attributes {
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = (&str, &str)>,
{
unimplemented!()
}
}
*/
impl Display for Attributes {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let result = self.0
.iter()
.map(|(key, value)| format!("{}=\"{}\"", key, value))
.collect::<Vec<String>>()
.join(" ");
write!(f, "{}", result)
}
}
#[derive(Clone, Debug)]
pub struct Form {
path: String,
method: String,
encoding: Option<String>,
elements: String,
}
impl Form {
pub fn new() -> Self {
Self {
path: "/".to_owned(),
method: "get".to_owned(),
encoding: None,
elements: "".to_owned(),
}
}
pub fn with_path(mut self, path: &str) -> Self {
self.path = path.to_owned();
self
}
pub fn with_method(mut self, method: &str) -> Self {
self.method = method.to_owned();
self
}
pub fn with_encoding(mut self, encoding: &str) -> Self {
self.encoding = Some(encoding.to_owned());
self
}
}
impl Html for Form {
fn to_html_string(&self) -> String {
let encoding = match self.encoding {
Some(ref encoding) => format!("enctype=\"{encoding}\"", encoding = encoding),
None => "".to_owned(),
};
format!(
"<form action=\"{path}\" method=\"{method}\" {encoding}>\n{elements}\n</form>\n",
path = self.path,
method = self.method,
encoding = encoding,
elements = self.elements.to_html_string(),
)
}
}
impl HtmlContainer for Form {
fn add_html<H: Html>(&mut self, html: H) {
self.elements.push_str(&html.to_html_string());
}
}
#[derive(Clone, Debug)]
pub struct Input {
ty: String,
name: String,
id: Option<String>,
value: Option<String>,
attributes: Attributes,
}
impl Html for Input {
fn to_html_string(&self) -> String {
let id = match self.id {
Some(ref id) => format!("id=\"{}\"", id),
None => "".to_owned(),
};
let value = match self.value {
Some(ref value) => format!("value=\"{}\"", value),
None => "".to_owned(),
};
let attrs = self.attributes.to_string();
format!(
"<input type=\"{ty}\" name=\"{name}\" {id} {value} {attrs} />\n",
ty = self.ty,
name = self.name,
id = id,
value = value,
attrs = attrs,
)
}
}
impl Input {
pub fn new(ty: &str, name: &str) -> Self {
Self {
ty: ty.to_owned(),
name: name.to_owned(),
id: None,
value: None,
attributes: Attributes::default(),
}
}
pub fn with_id(mut self, val: &str) -> Self {
self.id = Some(val.to_owned());
self
}
pub fn with_attributes<'a>(
mut self,
values: impl IntoIterator<Item = (&'a str, &'a str)>,
) -> Self {
self.attributes = Attributes(
values
.into_iter()
.map(|(a, b)| (a.to_owned(), b.to_owned()))
.collect::<Vec<(String, String)>>(),
);
self
}
}
#[derive(Clone, Debug)]
pub struct Button {
ty: Option<String>,
name: Option<String>,
label: String,
attributes: Attributes,
}
impl Button {
pub fn new(label: &str) -> Self {
Self {
ty: None,
name: None,
label: label.to_owned(),
attributes: Attributes::default(),
}
}
pub fn with_type(mut self, ty: &str) -> Self {
self.ty = Some(ty.to_owned());
self
}
pub fn with_attributes<'a>(
mut self,
values: impl IntoIterator<Item = (&'a str, &'a str)>,
) -> Self {
self.attributes = Attributes(
values
.into_iter()
.map(|(a, b)| (a.to_owned(), b.to_owned()))
.collect::<Vec<(String, String)>>(),
);
self
}
}
impl Html for Button {
fn to_html_string(&self) -> String {
let ty = match self.ty {
Some(ref ty) => format!("type={}", ty),
None => "".to_owned(),
};
let name = match self.name {
Some(ref name) => format!("name={}", name),
None => "".to_owned(),
};
format!(
"<button {ty} {name} {attrs}>{label}</button>",
name = name,
label = self.label,
attrs = self.attributes
)
}
}

View File

@ -1,5 +0,0 @@
mod store;
pub use store::{
DeleteFileError, FileHandle, FileId, FileInfo, ReadFileError, Store, WriteFileError,
};

View File

@ -1,174 +0,0 @@
extern crate log;
use cookie::Cookie;
use handlers::{file, handle_auth, handle_css, handle_delete, handle_upload, thumbnail};
use std::{
collections::{HashMap, HashSet},
convert::Infallible,
net::{IpAddr, Ipv4Addr, SocketAddr},
path::PathBuf,
sync::Arc,
};
use tokio::sync::RwLock;
use warp::{Filter, Rejection};
mod handlers;
mod html;
mod pages;
const MAX_UPLOAD: u64 = 15 * 1024 * 1024;
use authdb::{AuthDB, AuthError, AuthToken, SessionToken, Username};
use file_service::{
DeleteFileError, FileHandle, FileId, FileInfo, ReadFileError, Store, WriteFileError,
};
pub use handlers::handle_index;
#[derive(Clone)]
pub struct App {
authdb: Arc<RwLock<AuthDB>>,
store: Arc<RwLock<Store>>,
}
impl App {
pub fn new(authdb: AuthDB, store: Store) -> Self {
Self {
authdb: Arc::new(RwLock::new(authdb)),
store: Arc::new(RwLock::new(store)),
}
}
pub async fn authenticate(&self, token: AuthToken) -> Result<Option<SessionToken>, AuthError> {
self.authdb.read().await.authenticate(token).await
}
pub async fn validate_session(
&self,
token: SessionToken,
) -> Result<Option<Username>, AuthError> {
self.authdb.read().await.validate_session(token).await
}
pub async fn list_files(&self) -> Result<HashSet<FileId>, ReadFileError> {
self.store.read().await.list_files()
}
pub async fn get_file(&self, id: &FileId) -> Result<FileHandle, ReadFileError> {
self.store.read().await.get_file(id)
}
pub async fn add_file(
&self,
filename: String,
content: Vec<u8>,
) -> Result<FileHandle, WriteFileError> {
self.store.write().await.add_file(filename, content)
}
pub async fn delete_file(&self, id: FileId) -> Result<(), DeleteFileError> {
self.store.write().await.delete_file(&id)?;
Ok(())
}
}
fn with_app(app: App) -> impl Filter<Extract = (App,), Error = Infallible> + Clone {
warp::any().map(move || app.clone())
}
fn parse_cookies(cookie_str: &str) -> Result<HashMap<String, String>, cookie::ParseError> {
Cookie::split_parse(cookie_str)
.map(|c| c.map(|c| (c.name().to_owned(), c.value().to_owned())))
.collect::<Result<HashMap<String, String>, cookie::ParseError>>()
}
fn get_session_token(cookies: HashMap<String, String>) -> Option<SessionToken> {
cookies.get("session").cloned().map(SessionToken::from)
}
fn maybe_with_session() -> impl Filter<Extract = (Option<SessionToken>,), Error = Rejection> + Copy
{
warp::any()
.and(warp::header::optional::<String>("cookie"))
.map(|cookie_str: Option<String>| match cookie_str {
Some(cookie_str) => parse_cookies(&cookie_str).ok().and_then(get_session_token),
None => None,
})
}
fn with_session() -> impl Filter<Extract = (SessionToken,), Error = Rejection> + Copy {
warp::any()
.and(warp::header::<String>("cookie"))
.and_then(|cookie_str: String| async move {
match parse_cookies(&cookie_str).ok().and_then(get_session_token) {
Some(session_token) => Ok(session_token),
None => Err(warp::reject()),
}
})
}
#[tokio::main]
pub async fn main() {
pretty_env_logger::init();
let authdb = AuthDB::new(PathBuf::from(&std::env::var("AUTHDB").unwrap()))
.await
.unwrap();
let store = Store::new(PathBuf::from(&std::env::var("FILE_SHARE_DIR").unwrap()));
let app = App::new(authdb, store);
let log = warp::log("file_service");
let root = warp::path!()
.and(warp::get())
.and(with_app(app.clone()))
.and(maybe_with_session())
.then(handle_index);
let styles = warp::path!("css").and(warp::get()).then(handle_css);
let auth = warp::path!("auth")
.and(warp::post())
.and(with_app(app.clone()))
.and(warp::filters::body::form())
.then(handle_auth);
let upload_via_form = warp::path!("upload")
.and(warp::post())
.and(with_app(app.clone()))
.and(with_session())
.and(warp::multipart::form().max_length(MAX_UPLOAD))
.then(handle_upload);
let delete_via_form = warp::path!("delete" / String)
.and(warp::post())
.and(with_app(app.clone()))
.and(with_session())
.then(|id, app, token| handle_delete(app, token, FileId::from(id)));
let thumbnail = warp::path!(String / "tn")
.and(warp::get())
.and(warp::header::optional::<String>("if-none-match"))
.and(with_app(app.clone()))
.then(move |id, old_etags, app: App| thumbnail(app, id, old_etags));
let file = warp::path!(String)
.and(warp::get())
.and(warp::header::optional::<String>("if-none-match"))
.and(with_app(app.clone()))
.then(move |id, old_etags, app: App| file(app, id, old_etags));
let server = warp::serve(
root.or(styles)
.or(auth)
.or(upload_via_form)
.or(delete_via_form)
.or(thumbnail)
.or(file)
.with(log),
);
server
.run(SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 8002))
.await;
}

View File

@ -1,114 +0,0 @@
use crate::html::*;
use build_html::{self, Container, ContainerType, Html, HtmlContainer};
use file_service::{FileHandle, FileInfo, ReadFileError};
pub fn auth(_message: Option<String>) -> build_html::HtmlPage {
build_html::HtmlPage::new()
.with_title("Sign In")
.with_stylesheet("/css")
.with_container(
Container::new(ContainerType::Div)
.with_attributes([("class", "authentication-page")])
.with_container(auth_form()),
)
}
fn auth_form() -> Container {
Container::default()
.with_attributes([("class", "card authentication-form")])
.with_html(
Form::new()
.with_path("/auth")
.with_method("post")
.with_container(
Container::new(ContainerType::Div)
.with_html(
Input::new("password", "password")
.with_id("for-token-input")
.with_attributes([
("size", "50"),
("class", "authentication-form__input"),
]),
)
.with_html(
Button::new("Sign In")
.with_attributes([("class", "authentication-form__button")]),
),
),
)
}
pub fn gallery(handles: Vec<Result<FileHandle, ReadFileError>>) -> build_html::HtmlPage {
let mut page = build_html::HtmlPage::new()
.with_title("Gallery")
.with_stylesheet("/css")
.with_container(
Container::new(ContainerType::Div)
.with_attributes([("class", "gallery-page")])
.with_header(1, "Gallery")
.with_html(upload_form()),
);
let mut gallery = Container::new(ContainerType::Div).with_attributes([("class", "gallery")]);
for handle in handles {
let container = match handle {
Ok(ref handle) => thumbnail(&handle.info),
Err(err) => Container::new(ContainerType::Div)
.with_attributes(vec![("class", "file")])
.with_paragraph(format!("{:?}", err)),
};
gallery.add_container(container);
}
page.add_container(gallery);
page
}
pub fn upload_form() -> Form {
Form::new()
.with_path("/upload")
.with_method("post")
.with_encoding("multipart/form-data")
.with_container(
Container::new(ContainerType::Div)
.with_attributes([("class", "card upload-form")])
.with_html(Input::new("file", "file").with_attributes([
("id", "for-selector-input"),
("placeholder", "select file"),
("class", "upload-form__selector"),
]))
.with_html(
Button::new("Upload file")
.with_attributes([("class", "upload-form__button")])
.with_type("submit"),
),
)
}
pub fn thumbnail(info: &FileInfo) -> Container {
Container::new(ContainerType::Div)
.with_attributes(vec![("class", "card thumbnail")])
.with_html(
Container::new(ContainerType::Div).with_link(
format!("/{}", *info.id),
Container::default()
.with_attributes([("class", "thumbnail")])
.with_image(format!("{}/tn", *info.id), "test data")
.to_html_string(),
),
)
.with_html(
Container::new(ContainerType::Div)
.with_html(
Container::new(ContainerType::UnorderedList)
.with_attributes(vec![("class", "thumbnail__metadata")])
.with_html(info.name.clone())
.with_html(format!("{}", info.created.format("%Y-%m-%d"))),
)
.with_html(
Form::new()
.with_path(&format!("/delete/{}", *info.id))
.with_method("post")
.with_html(Button::new("Delete")),
),
)
}

View File

@ -1,299 +0,0 @@
use super::{fileinfo::FileInfo, FileId, ReadFileError, WriteFileError};
use chrono::prelude::*;
use hex_string::HexString;
use image::imageops::FilterType;
use sha2::{Digest, Sha256};
use std::{
convert::TryFrom,
io::{Read, Write},
path::{Path, PathBuf},
};
use thiserror::Error;
use uuid::Uuid;
#[derive(Debug, Error)]
pub enum PathError {
#[error("path cannot be derived from input")]
InvalidPath,
}
#[derive(Clone, Debug)]
pub struct PathResolver {
base: PathBuf,
id: FileId,
extension: String,
}
impl PathResolver {
pub fn new(base: &Path, id: FileId, extension: String) -> Self {
Self {
base: base.to_owned(),
id,
extension,
}
}
pub fn metadata_path_by_id(base: &Path, id: FileId) -> PathBuf {
let mut path = base.to_path_buf();
path.push(PathBuf::from(id.clone()));
path.set_extension("json");
path
}
pub fn id(&self) -> FileId {
self.id.clone()
}
pub fn file_path(&self) -> PathBuf {
let mut path = self.base.clone();
path.push(PathBuf::from(self.id.clone()));
path.set_extension(self.extension.clone());
path
}
pub fn metadata_path(&self) -> PathBuf {
let mut path = self.base.clone();
path.push(PathBuf::from(self.id.clone()));
path.set_extension("json");
path
}
pub fn thumbnail_path(&self) -> PathBuf {
let mut path = self.base.clone();
path.push(PathBuf::from(self.id.clone()));
path.set_extension(format!("tn.{}", self.extension));
path
}
}
impl TryFrom<String> for PathResolver {
type Error = PathError;
fn try_from(s: String) -> Result<Self, Self::Error> {
PathResolver::try_from(s.as_str())
}
}
impl TryFrom<&str> for PathResolver {
type Error = PathError;
fn try_from(s: &str) -> Result<Self, Self::Error> {
PathResolver::try_from(Path::new(s))
}
}
impl TryFrom<PathBuf> for PathResolver {
type Error = PathError;
fn try_from(path: PathBuf) -> Result<Self, Self::Error> {
PathResolver::try_from(path.as_path())
}
}
impl TryFrom<&Path> for PathResolver {
type Error = PathError;
fn try_from(path: &Path) -> Result<Self, Self::Error> {
Ok(Self {
base: path
.parent()
.map(|s| s.to_owned())
.ok_or(PathError::InvalidPath)?,
id: path
.file_stem()
.and_then(|s| s.to_str().map(FileId::from))
.ok_or(PathError::InvalidPath)?,
extension: path
.extension()
.and_then(|s| s.to_str().map(|s| s.to_owned()))
.ok_or(PathError::InvalidPath)?,
})
}
}
/// One file in the database, complete with the path of the file and information about the
/// thumbnail of the file.
#[derive(Debug)]
pub struct FileHandle {
pub id: FileId,
pub path: PathResolver,
pub info: FileInfo,
}
impl FileHandle {
/// Create a new entry in the database
pub fn new(filename: String, root: PathBuf) -> Result<Self, WriteFileError> {
let id = FileId::from(Uuid::new_v4().hyphenated().to_string());
let path = PathBuf::from(filename);
let name = path
.file_stem()
.and_then(|s| s.to_str().map(|s| s.to_owned()))
.ok_or(WriteFileError::InvalidPath)?;
let extension = path
.extension()
.and_then(|s| s.to_str().map(|s| s.to_owned()))
.ok_or(WriteFileError::InvalidPath)?;
let path = PathResolver {
base: root.clone(),
id: id.clone(),
extension: extension.clone(),
};
let file_type = mime_guess::from_ext(&extension)
.first_or_text_plain()
.essence_str()
.to_owned();
let info = FileInfo {
id: id.clone(),
name,
size: 0,
created: Utc::now(),
file_type,
hash: "".to_owned(),
extension,
};
let mut md_file = std::fs::File::create(path.metadata_path())?;
let _ = md_file.write(&serde_json::to_vec(&info)?)?;
Ok(Self { id, path, info })
}
pub fn load(id: &FileId, root: &Path) -> Result<Self, ReadFileError> {
let info = FileInfo::load(PathResolver::metadata_path_by_id(root, id.clone()))?;
let resolver = PathResolver::new(root, id.clone(), info.extension.clone());
Ok(Self {
id: info.id.clone(),
path: resolver,
info,
})
}
pub fn set_content(&mut self, content: Vec<u8>) -> Result<(), WriteFileError> {
let mut content_file = std::fs::File::create(self.path.file_path())?;
let byte_count = content_file.write(&content)?;
self.info.size = byte_count;
self.info.hash = self.hash_content(&content).as_string();
let mut md_file = std::fs::File::create(self.path.metadata_path())?;
let _ = md_file.write(&serde_json::to_vec(&self.info)?)?;
self.write_thumbnail()?;
Ok(())
}
pub fn content(&self) -> Result<Vec<u8>, ReadFileError> {
load_content(&self.path.file_path())
}
pub fn thumbnail(&self) -> Result<Vec<u8>, ReadFileError> {
load_content(&self.path.thumbnail_path())
}
fn hash_content(&self, data: &Vec<u8>) -> HexString {
HexString::from_bytes(&Sha256::digest(data).to_vec())
}
fn write_thumbnail(&self) -> Result<(), WriteFileError> {
let img = image::open(self.path.file_path())?;
let tn = img.resize(640, 640, FilterType::Nearest);
tn.save(self.path.thumbnail_path())?;
Ok(())
}
pub fn delete(self) {
let _ = std::fs::remove_file(self.path.thumbnail_path());
let _ = std::fs::remove_file(self.path.file_path());
let _ = std::fs::remove_file(self.path.metadata_path());
}
}
fn load_content(path: &Path) -> Result<Vec<u8>, ReadFileError> {
let mut buf = Vec::new();
let mut file = std::fs::File::open(path)?;
file.read_to_end(&mut buf)?;
Ok(buf)
}
#[cfg(test)]
mod test {
use super::*;
use std::{convert::TryFrom, path::PathBuf};
use tempdir::TempDir;
#[test]
fn paths() {
let resolver = PathResolver::try_from("path/82420255-d3c8-4d90-a582-f94be588c70c.png")
.expect("to have a valid path");
assert_eq!(
resolver.file_path(),
PathBuf::from("path/82420255-d3c8-4d90-a582-f94be588c70c.png")
);
assert_eq!(
resolver.metadata_path(),
PathBuf::from("path/82420255-d3c8-4d90-a582-f94be588c70c.json")
);
assert_eq!(
resolver.thumbnail_path(),
PathBuf::from("path/82420255-d3c8-4d90-a582-f94be588c70c.tn.png")
);
}
#[test]
fn it_creates_file_info() {
let tmp = TempDir::new("var").unwrap();
let handle =
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
assert_eq!(handle.info.name, "rawr");
assert_eq!(handle.info.size, 0);
assert_eq!(handle.info.file_type, "image/png");
assert_eq!(handle.info.extension, "png");
}
#[test]
fn it_opens_a_file() {
let tmp = TempDir::new("var").unwrap();
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
}
#[test]
fn it_deletes_a_file() {
let tmp = TempDir::new("var").unwrap();
let f =
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
f.delete();
}
#[test]
fn it_can_return_a_thumbnail() {
let tmp = TempDir::new("var").unwrap();
let _ =
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
/*
assert_eq!(
f.thumbnail(),
Thumbnail {
id: String::from("rawr.png"),
root: PathBuf::from("var/"),
},
);
*/
}
#[test]
fn it_can_return_a_file_stream() {
let tmp = TempDir::new("var").unwrap();
let _ =
FileHandle::new("rawr.png".to_owned(), PathBuf::from(tmp.path())).expect("to succeed");
// f.stream().expect("to succeed");
}
#[test]
fn it_raises_an_error_when_file_not_found() {
let tmp = TempDir::new("var").unwrap();
match FileHandle::load(&FileId::from("rawr"), tmp.path()) {
Err(ReadFileError::FileNotFound(_)) => assert!(true),
_ => assert!(false),
}
}
}

View File

@ -1,76 +0,0 @@
use crate::FileId;
use super::{ReadFileError, WriteFileError};
use chrono::prelude::*;
use serde::{Deserialize, Serialize};
use std::{
io::{Read, Write},
path::PathBuf,
};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct FileInfo {
pub id: FileId,
// Early versions of the application didn't support a name field, so it is possible that
// metadata won't contain the name. We can just default to an empty string when loading the
// metadata, as all future versions will require a filename when the file gets uploaded.
#[serde(default)]
pub name: String,
pub size: usize,
pub created: DateTime<Utc>,
pub file_type: String,
pub hash: String,
pub extension: String,
}
impl FileInfo {
pub fn load(path: PathBuf) -> Result<Self, ReadFileError> {
let mut content: Vec<u8> = Vec::new();
let mut file =
std::fs::File::open(path.clone()).map_err(|_| ReadFileError::FileNotFound(path))?;
file.read_to_end(&mut content)?;
let js = serde_json::from_slice(&content)?;
Ok(js)
}
pub fn save(&self, path: PathBuf) -> Result<(), WriteFileError> {
let ser = serde_json::to_string(self).unwrap();
let mut file = std::fs::File::create(path)?;
let _ = file.write(ser.as_bytes())?;
Ok(())
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::store::FileId;
use tempdir::TempDir;
#[test]
fn it_saves_and_loads_metadata() {
let tmp = TempDir::new("var").unwrap();
let created = Utc::now();
let info = FileInfo {
id: FileId("temp-id".to_owned()),
name: "test-image".to_owned(),
size: 23777,
created,
file_type: "image/png".to_owned(),
hash: "abcdefg".to_owned(),
extension: "png".to_owned(),
};
let mut path = tmp.path().to_owned();
path.push(&PathBuf::from(info.id.clone()));
info.save(path.clone()).unwrap();
let info_ = FileInfo::load(path).unwrap();
assert_eq!(info_.size, 23777);
assert_eq!(info_.created, info.created);
assert_eq!(info_.file_type, "image/png");
assert_eq!(info_.hash, info.hash);
}
}

View File

@ -1,273 +0,0 @@
use serde::{Deserialize, Serialize};
use std::{collections::HashSet, ops::Deref, path::PathBuf};
use thiserror::Error;
mod filehandle;
mod fileinfo;
pub use filehandle::FileHandle;
pub use fileinfo::FileInfo;
#[derive(Debug, Error)]
pub enum WriteFileError {
#[error("root file path does not exist")]
RootNotFound,
#[error("permission denied")]
PermissionDenied,
#[error("invalid path")]
InvalidPath,
#[error("no metadata available")]
NoMetadata,
#[error("file could not be loaded")]
LoadError(#[from] ReadFileError),
#[error("image conversion failed")]
ImageError(#[from] image::ImageError),
#[error("JSON error")]
JSONError(#[from] serde_json::error::Error),
#[error("IO error")]
IOError(#[from] std::io::Error),
}
#[derive(Debug, Error)]
pub enum ReadFileError {
#[error("file not found")]
FileNotFound(PathBuf),
#[error("path is not a file")]
NotAFile,
#[error("permission denied")]
PermissionDenied,
#[error("JSON error")]
JSONError(#[from] serde_json::error::Error),
#[error("IO error")]
IOError(#[from] std::io::Error),
}
#[derive(Debug, Error)]
pub enum DeleteFileError {
#[error("file not found")]
FileNotFound(PathBuf),
#[error("metadata path is not a file")]
NotAFile,
#[error("cannot read metadata")]
PermissionDenied,
#[error("invalid metadata path")]
MetadataParseError(serde_json::error::Error),
#[error("IO error")]
IOError(#[from] std::io::Error),
}
impl From<ReadFileError> for DeleteFileError {
fn from(err: ReadFileError) -> Self {
match err {
ReadFileError::FileNotFound(path) => DeleteFileError::FileNotFound(path),
ReadFileError::NotAFile => DeleteFileError::NotAFile,
ReadFileError::PermissionDenied => DeleteFileError::PermissionDenied,
ReadFileError::JSONError(err) => DeleteFileError::MetadataParseError(err),
ReadFileError::IOError(err) => DeleteFileError::IOError(err),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq, Eq)]
pub struct FileId(String);
impl From<String> for FileId {
fn from(s: String) -> Self {
Self(s)
}
}
impl From<&str> for FileId {
fn from(s: &str) -> Self {
Self(s.to_owned())
}
}
impl From<FileId> for PathBuf {
fn from(s: FileId) -> Self {
Self::from(&s)
}
}
impl From<&FileId> for PathBuf {
fn from(s: &FileId) -> Self {
let FileId(s) = s;
Self::from(s)
}
}
impl Deref for FileId {
type Target = String;
fn deref(&self) -> &Self::Target {
&self.0
}
}
/*
pub trait FileRoot {
fn root(&self) -> PathBuf;
}
*/
// pub struct Context(PathBuf);
/*
impl FileRoot for Context {
fn root(&self) -> PathBuf {
self.0.clone()
}
}
*/
pub struct Store {
files_root: PathBuf,
}
impl Store {
pub fn new(files_root: PathBuf) -> Self {
Self { files_root }
}
pub fn list_files(&self) -> Result<HashSet<FileId>, ReadFileError> {
let paths = std::fs::read_dir(&self.files_root)?;
let info_files = paths
.into_iter()
.filter_map(|path| {
let path_ = path.unwrap().path();
if path_.extension().and_then(|s| s.to_str()) == Some("json") {
let stem = path_.file_stem().and_then(|s| s.to_str()).unwrap();
Some(FileId::from(stem))
} else {
None
}
})
.collect::<HashSet<FileId>>();
Ok(info_files)
}
pub fn add_file(
&mut self,
filename: String,
content: Vec<u8>,
) -> Result<FileHandle, WriteFileError> {
let mut file = FileHandle::new(filename, self.files_root.clone())?;
file.set_content(content)?;
Ok(file)
}
pub fn get_file(&self, id: &FileId) -> Result<FileHandle, ReadFileError> {
FileHandle::load(id, &self.files_root)
}
pub fn delete_file(&mut self, id: &FileId) -> Result<(), DeleteFileError> {
let handle = FileHandle::load(id, &self.files_root)?;
handle.delete();
Ok(())
}
pub fn get_metadata(&self, id: &FileId) -> Result<FileInfo, ReadFileError> {
let mut path = self.files_root.clone();
path.push(PathBuf::from(id));
path.set_extension("json");
FileInfo::load(path)
}
}
#[cfg(test)]
mod test {
use super::*;
use cool_asserts::assert_matches;
use std::{collections::HashSet, io::Read};
use tempdir::TempDir;
fn with_file<F>(test_fn: F)
where
F: FnOnce(Store, FileId, TempDir),
{
let tmp = TempDir::new("var").unwrap();
let mut buf = Vec::new();
let mut file = std::fs::File::open("fixtures/rawr.png").unwrap();
file.read_to_end(&mut buf).unwrap();
let mut store = Store::new(PathBuf::from(tmp.path()));
let file_record = store.add_file("rawr.png".to_owned(), buf).unwrap();
test_fn(store, file_record.id, tmp);
}
#[test]
fn adds_files() {
with_file(|store, id, tmp| {
let file = store.get_file(&id).expect("to retrieve the file");
assert_eq!(file.content().map(|file| file.len()).unwrap(), 23777);
assert!(tmp.path().join(&(*id)).with_extension("png").exists());
assert!(tmp.path().join(&(*id)).with_extension("json").exists());
assert!(tmp.path().join(&(*id)).with_extension("tn.png").exists());
});
}
#[test]
fn sets_up_metadata_for_file() {
with_file(|store, id, tmp| {
assert!(tmp.path().join(&(*id)).with_extension("png").exists());
let info = store.get_metadata(&id).expect("to retrieve the metadata");
assert_matches!(info, FileInfo { size, file_type, hash, extension, .. } => {
assert_eq!(size, 23777);
assert_eq!(file_type, "image/png");
assert_eq!(hash, "b6cd35e113b95d62f53d9cbd27ccefef47d3e324aef01a2db6c0c6d3a43c89ee".to_owned());
assert_eq!(extension, "png".to_owned());
});
});
}
/*
#[test]
fn sets_up_thumbnail_for_file() {
with_file(|store, id| {
let (_, thumbnail) = store.get_thumbnail(&id).expect("to retrieve the thumbnail");
assert_eq!(thumbnail.content().map(|file| file.len()).unwrap(), 48869);
});
}
*/
#[test]
fn deletes_associated_files() {
with_file(|mut store, id, tmp| {
store.delete_file(&id).expect("file to be deleted");
assert!(!tmp.path().join(&(*id)).with_extension("png").exists());
assert!(!tmp.path().join(&(*id)).with_extension("json").exists());
assert!(!tmp.path().join(&(*id)).with_extension("tn.png").exists());
});
}
#[test]
fn lists_files_in_the_db() {
with_file(|store, id, _| {
let resolvers = store.list_files().expect("file listing to succeed");
let ids = resolvers.into_iter().collect::<HashSet<FileId>>();
assert_eq!(ids.len(), 1);
assert!(ids.contains(&id));
});
}
}

View File

@ -1,91 +0,0 @@
use super::{ReadFileError, WriteFileError};
use image::imageops::FilterType;
use std::{
fs::remove_file,
io::Read,
path::{Path, PathBuf},
};
#[derive(Clone, Debug, PartialEq)]
pub struct Thumbnail {
pub path: PathBuf,
}
impl Thumbnail {
pub fn open(
origin_path: PathBuf,
thumbnail_path: PathBuf,
) -> Result<Thumbnail, WriteFileError> {
let s = Thumbnail {
path: PathBuf::from(thumbnail_path),
};
if !s.path.exists() {
let img = image::open(&origin_path)?;
let tn = img.resize(640, 640, FilterType::Nearest);
tn.save(&s.path)?;
}
Ok(s)
}
pub fn load(path: PathBuf) -> Result<Thumbnail, ReadFileError> {
let s = Thumbnail { path: path.clone() };
if !s.path.exists() {
return Err(ReadFileError::FileNotFound(path));
}
Ok(s)
}
/*
pub fn from_path(path: &Path) -> Result<Thumbnail, ReadFileError> {
let id = path
.file_name()
.map(|s| String::from(s.to_string_lossy()))
.ok_or(ReadFileError::NotAnImage(PathBuf::from(path)))?;
let path = path
.parent()
.ok_or(ReadFileError::FileNotFound(PathBuf::from(path)))?;
Thumbnail::open(&id, root)
}
*/
/*
pub fn stream(&self) -> Result<std::fs::File, ReadFileError> {
std::fs::File::open(self.path.clone()).map_err(|err| {
if err.kind() == std::io::ErrorKind::NotFound {
ReadFileError::FileNotFound
} else {
ReadFileError::from(err)
}
})
}
*/
/*
pub fn delete(self) -> Result<(), WriteFileError> {
remove_file(self.path).map_err(WriteFileError::from)
}
*/
}
#[cfg(test)]
mod test {
use super::*;
use crate::store::utils::FileCleanup;
#[test]
fn it_creates_a_thumbnail_if_one_does_not_exist() {
let _ = FileCleanup(PathBuf::from("var/rawr.tn.png"));
let _ = Thumbnail::open(
PathBuf::from("fixtures/rawr.png"),
PathBuf::from("var/rawr.tn.png"),
)
.expect("thumbnail open must work");
assert!(Path::new("var/rawr.tn.png").is_file());
}
}

View File

@ -1,15 +0,0 @@
<html>
<head>
<title> {{title}} </title>
<link href="/css" rel="stylesheet" type="text/css" media="screen" />
<script src="/script"></script>
</head>
<body>
<a href="/file/{{id}}"><img src="/tn/{{id}}" /></a>
</body>
</html>

View File

@ -1,54 +0,0 @@
<html>
<head>
<title> Admin list of files </title>
<link href="/css" rel="stylesheet" type="text/css" media="screen" />
<script src="/script"></script>
</head>
<body>
<h1> Admin list of files </h1>
<div class="uploadform">
<form action="/" method="post" enctype="multipart/form-data">
<div id="file-selector">
<input type="file" name="file" id="file-selector-input" />
<label for="file-selector-input" onclick="selectFile('file-selector')">Select a file</label>
</div>
<input type="submit" value="Upload file" />
</form>
</div>
<div class="files">
{{#files}}
<div class="file">
{{#error}}
<div>
<p> {{error}} </p>
</div>
{{/error}}
{{#file}}
<div class="thumbnail">
<a href="/file/{{id}}"><img src="/tn/{{id}}" /></a>
</div>
<div>
<ul>
<li> {{date}} </li>
<li> {{type_}} </li>
<li> {{size}} </li>
</ul>
<div>
<form action="/{{id}}" method="post">
<input type="hidden" name="_method" value="delete" />
<input type="submit" value="Delete" />
</form>
</div>
</div>
{{/file}}
</div>
{{/files}}
</div>
</body>
</html>

View File

@ -1,10 +0,0 @@
const selectFile = (selectorId) => {
console.log("wide arrow functions work: " + selectorId);
const input = document.querySelector("#" + selectorId + " input[type='file']")
const label = document.querySelector("#" + selectorId + " label")
input.addEventListener("change", (e) => {
if (input.files.length > 0) {
label.innerHTML = input.files[0].name
}
})
}

Some files were not shown because too many files have changed in this diff Show More