Compare commits
No commits in common. "81aa7410de2330f993336a65741427d2387076c7" and "d6c2a9519bc89739be860ff092767ac7e37b7b7c" have entirely different histories.
81aa7410de
...
d6c2a9519b
File diff suppressed because it is too large
Load Diff
|
@ -1,25 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "file-service"
|
|
||||||
version = "0.1.0"
|
|
||||||
authors = ["savanni@luminescent-dreams.com"]
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
|
||||||
hex-string = "0.1.0"
|
|
||||||
iron = "0.6.1"
|
|
||||||
logger = "*"
|
|
||||||
mime = "0.3.16"
|
|
||||||
mime_guess = "2.0.3"
|
|
||||||
mustache = "0.9.0"
|
|
||||||
orizentic = "1.0.0"
|
|
||||||
params = "*"
|
|
||||||
router = "*"
|
|
||||||
serde_json = "*"
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
|
||||||
sha2 = "0.8.2"
|
|
||||||
uuid = { version = "0.4", features = ["serde", "v4"] }
|
|
||||||
thiserror = "1.0.20"
|
|
||||||
image = "0.23.5"
|
|
|
@ -1 +0,0 @@
|
||||||
[{"jti":"ac3a46c6-3fa1-4d0a-af12-e7d3fefdc878","aud":"savanni","exp":1621351436,"iss":"savanni","iat":1589729036,"sub":"https://savanni.luminescent-dreams.com/file-service/","perms":["admin"]}]
|
|
Binary file not shown.
Before Width: | Height: | Size: 23 KiB |
|
@ -1,61 +0,0 @@
|
||||||
use iron::headers;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Cookie {
|
|
||||||
pub name: String,
|
|
||||||
pub value: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for Cookie {
|
|
||||||
fn from(s: &str) -> Cookie {
|
|
||||||
let parts: Vec<&str> = s.split("=").collect();
|
|
||||||
Cookie {
|
|
||||||
name: String::from(parts[0]),
|
|
||||||
value: String::from(parts[1]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&String> for Cookie {
|
|
||||||
fn from(s: &String) -> Cookie {
|
|
||||||
Cookie::from(s.as_str())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<String> for Cookie {
|
|
||||||
fn from(s: String) -> Cookie {
|
|
||||||
Cookie::from(s.as_str())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct CookieJar(HashMap<String, Cookie>);
|
|
||||||
|
|
||||||
impl CookieJar {
|
|
||||||
pub fn new() -> CookieJar {
|
|
||||||
CookieJar(HashMap::new())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_cookie(&mut self, name: String, value: Cookie) {
|
|
||||||
self.0.insert(name, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lookup(&self, name: &str) -> Option<&Cookie> {
|
|
||||||
self.0.get(name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Some(Cookie(["auth=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJqdGkiOiJhYzNhNDZjNi0zZmExLTRkMGEtYWYxMi1lN2QzZmVmZGM4NzgiLCJhdWQiOiJzYXZhbm5pIiwiZXhwIjoxNjIxMzUxNDM2LCJpc3MiOiJzYXZhbm5pIiwiaWF0IjoxNTg5NzI5MDM2LCJzdWIiOiJodHRwczovL3NhdmFubmkubHVtaW5lc2NlbnQtZHJlYW1zLmNvbS9maWxlLXNlcnZpY2UvIiwicGVybXMiOlsiYWRtaW4iXX0.8zjAbZ7Ut0d6EcDeyik39GKhXvH4qkMDdaiQVNKWiuM"]))
|
|
||||||
impl From<&headers::Cookie> for CookieJar {
|
|
||||||
fn from(c: &headers::Cookie) -> CookieJar {
|
|
||||||
let jar = CookieJar::new();
|
|
||||||
|
|
||||||
let headers::Cookie(cs) = c;
|
|
||||||
cs.iter().fold(jar, |mut jar, c_| {
|
|
||||||
let cookie = Cookie::from(c_);
|
|
||||||
jar.add_cookie(cookie.name.clone(), cookie);
|
|
||||||
jar
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,31 +0,0 @@
|
||||||
use std::path::PathBuf;
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error("not implemented")]
|
|
||||||
NotImplemented,
|
|
||||||
|
|
||||||
#[error("file not found: `{0}`")]
|
|
||||||
FileNotFound(PathBuf),
|
|
||||||
|
|
||||||
#[error("file is not an image: `{0}`")]
|
|
||||||
NotAnImage(PathBuf),
|
|
||||||
|
|
||||||
#[error("path is not a file: `{0}`")]
|
|
||||||
NotAFile(PathBuf),
|
|
||||||
|
|
||||||
#[error("Image loading error")]
|
|
||||||
ImageError(#[from] image::ImageError),
|
|
||||||
|
|
||||||
#[error("IO error")]
|
|
||||||
IOError(#[from] std::io::Error),
|
|
||||||
|
|
||||||
#[error("JSON error")]
|
|
||||||
JSONError(#[from] serde_json::error::Error),
|
|
||||||
|
|
||||||
#[error("UTF8 Error")]
|
|
||||||
UTF8Error(#[from] std::str::Utf8Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type Result<A> = std::result::Result<A, Error>;
|
|
|
@ -1,151 +0,0 @@
|
||||||
use super::error::{Error, Result};
|
|
||||||
use super::fileinfo::FileInfo;
|
|
||||||
use super::thumbnail::Thumbnail;
|
|
||||||
use std::fs::{copy, read_dir, remove_file};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct File {
|
|
||||||
info: FileInfo,
|
|
||||||
tn: Thumbnail,
|
|
||||||
root: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl File {
|
|
||||||
pub fn new(
|
|
||||||
id: &str,
|
|
||||||
root: &Path,
|
|
||||||
temp_path: &PathBuf,
|
|
||||||
filename: &Option<PathBuf>,
|
|
||||||
) -> Result<File> {
|
|
||||||
let mut dest_path = PathBuf::from(root);
|
|
||||||
dest_path.push(id);
|
|
||||||
match filename {
|
|
||||||
Some(fname) => match fname.extension() {
|
|
||||||
Some(ext) => {
|
|
||||||
dest_path.set_extension(ext);
|
|
||||||
()
|
|
||||||
}
|
|
||||||
None => (),
|
|
||||||
},
|
|
||||||
None => (),
|
|
||||||
};
|
|
||||||
copy(temp_path, dest_path.clone())?;
|
|
||||||
let info = FileInfo::from_path(&dest_path)?;
|
|
||||||
let tn = Thumbnail::from_path(&dest_path)?;
|
|
||||||
Ok(File {
|
|
||||||
info,
|
|
||||||
tn,
|
|
||||||
root: PathBuf::from(root),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn open(id: &str, root: &Path) -> Result<File> {
|
|
||||||
let mut file_path = PathBuf::from(root);
|
|
||||||
file_path.push(id.clone());
|
|
||||||
|
|
||||||
if !file_path.exists() {
|
|
||||||
return Err(Error::FileNotFound(file_path));
|
|
||||||
}
|
|
||||||
if !file_path.is_file() {
|
|
||||||
return Err(Error::NotAFile(file_path));
|
|
||||||
}
|
|
||||||
|
|
||||||
let info = match FileInfo::open(id, root) {
|
|
||||||
Ok(i) => Ok(i),
|
|
||||||
Err(Error::FileNotFound(_)) => {
|
|
||||||
let info = FileInfo::from_path(&file_path)?;
|
|
||||||
info.save(&root)?;
|
|
||||||
Ok(info)
|
|
||||||
}
|
|
||||||
Err(err) => Err(err),
|
|
||||||
}?;
|
|
||||||
|
|
||||||
let tn = Thumbnail::open(id, root)?;
|
|
||||||
|
|
||||||
Ok(File {
|
|
||||||
info,
|
|
||||||
tn,
|
|
||||||
root: PathBuf::from(root),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn list(root: &Path) -> Vec<Result<File>> {
|
|
||||||
let dir_iter = read_dir(&root).unwrap();
|
|
||||||
dir_iter
|
|
||||||
.filter(|entry| {
|
|
||||||
let entry_ = entry.as_ref().unwrap();
|
|
||||||
let filename = entry_.file_name();
|
|
||||||
!(filename.to_string_lossy().starts_with("."))
|
|
||||||
})
|
|
||||||
.map(|entry| {
|
|
||||||
let entry_ = entry.unwrap();
|
|
||||||
let id = entry_.file_name().into_string().unwrap();
|
|
||||||
File::open(&id, root)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn info(&self) -> FileInfo {
|
|
||||||
self.info.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn thumbnail(&self) -> Thumbnail {
|
|
||||||
self.tn.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn stream(&self) -> Result<std::fs::File> {
|
|
||||||
let mut path = self.root.clone();
|
|
||||||
path.push(self.info.id.clone());
|
|
||||||
std::fs::File::open(path).map_err(Error::from)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete(&self) -> Result<()> {
|
|
||||||
let mut path = self.root.clone();
|
|
||||||
path.push(self.info.id.clone());
|
|
||||||
remove_file(path)?;
|
|
||||||
self.tn.delete()?;
|
|
||||||
self.info.delete()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use crate::lib::utils::FileCleanup;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_opens_a_file() {
|
|
||||||
let _md = FileCleanup(PathBuf::from("fixtures/.metadata/rawr.png.json"));
|
|
||||||
let _tn = FileCleanup(PathBuf::from("fixtures/.thumbnails/rawr.png"));
|
|
||||||
|
|
||||||
File::open("rawr.png", Path::new("fixtures/")).expect("to succeed");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_can_return_a_thumbnail() {
|
|
||||||
let f = File::open("rawr.png", Path::new("fixtures/")).expect("to succeed");
|
|
||||||
assert_eq!(
|
|
||||||
f.thumbnail(),
|
|
||||||
Thumbnail {
|
|
||||||
id: String::from("rawr.png"),
|
|
||||||
root: PathBuf::from("fixtures/"),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_can_return_a_file_stream() {
|
|
||||||
let f = File::open("rawr.png", Path::new("fixtures/")).expect("to succeed");
|
|
||||||
f.stream().expect("to succeed");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_raises_an_error_when_file_not_found() {
|
|
||||||
match File::open("garbage", Path::new("fixtures/")) {
|
|
||||||
Err(Error::FileNotFound(_)) => assert!(true),
|
|
||||||
_ => assert!(false),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,160 +0,0 @@
|
||||||
use chrono::prelude::*;
|
|
||||||
use hex_string::HexString;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use serde_json;
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use std::fs::remove_file;
|
|
||||||
use std::io::{Read, Write};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
use super::error::{Error, Result};
|
|
||||||
use super::utils::append_extension;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
|
||||||
pub struct FileInfo {
|
|
||||||
pub id: String,
|
|
||||||
pub size: u64,
|
|
||||||
pub created: DateTime<Utc>,
|
|
||||||
pub file_type: String,
|
|
||||||
pub hash: String,
|
|
||||||
|
|
||||||
#[serde(skip)]
|
|
||||||
root: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileInfo {
|
|
||||||
pub fn save(&self, root: &Path) -> Result<()> {
|
|
||||||
let ser = serde_json::to_string(self).unwrap();
|
|
||||||
std::fs::File::create(FileInfo::metadata_path(&self.id, root))
|
|
||||||
.and_then(|mut stream| stream.write(ser.as_bytes()).map(|_| (())))
|
|
||||||
.map_err(Error::from)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn open(id: &str, root: &Path) -> Result<FileInfo> {
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
|
|
||||||
let md_path = FileInfo::metadata_path(id, root);
|
|
||||||
std::fs::File::open(md_path.clone())
|
|
||||||
.and_then(|mut stream| stream.read_to_end(&mut buf))
|
|
||||||
.map_err(move |err| match err.kind() {
|
|
||||||
std::io::ErrorKind::NotFound => Error::FileNotFound(md_path),
|
|
||||||
_ => Error::IOError(err),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let str_repr = std::str::from_utf8(&buf)?;
|
|
||||||
|
|
||||||
serde_json::from_str(&str_repr).map_err(Error::from)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_path(path: &Path) -> Result<FileInfo> {
|
|
||||||
match (path.is_file(), path.is_dir()) {
|
|
||||||
(false, false) => Err(Error::FileNotFound(PathBuf::from(path))),
|
|
||||||
(false, true) => Err(Error::NotAFile(PathBuf::from(path))),
|
|
||||||
(true, _) => Ok(()),
|
|
||||||
}?;
|
|
||||||
|
|
||||||
let metadata = path.metadata().map_err(Error::IOError)?;
|
|
||||||
let id = path
|
|
||||||
.file_name()
|
|
||||||
.map(|s| String::from(s.to_string_lossy()))
|
|
||||||
.ok_or(Error::NotAFile(PathBuf::from(path)))?;
|
|
||||||
let created = metadata
|
|
||||||
.created()
|
|
||||||
.map(|m| DateTime::from(m))
|
|
||||||
.map_err(|err| Error::IOError(err))?;
|
|
||||||
let file_type = String::from(
|
|
||||||
mime_guess::from_path(path)
|
|
||||||
.first_or_octet_stream()
|
|
||||||
.essence_str(),
|
|
||||||
);
|
|
||||||
let hash = FileInfo::hash_file(path)?;
|
|
||||||
Ok(FileInfo {
|
|
||||||
id,
|
|
||||||
size: metadata.len(),
|
|
||||||
created: created,
|
|
||||||
file_type,
|
|
||||||
hash: hash.as_string(),
|
|
||||||
root: PathBuf::from(path.parent().unwrap()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hash_file(path: &Path) -> Result<HexString> {
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
let mut file = std::fs::File::open(path).map_err(Error::from)?;
|
|
||||||
|
|
||||||
file.read_to_end(&mut buf).map_err(Error::from)?;
|
|
||||||
let mut vec = Vec::new();
|
|
||||||
vec.extend_from_slice(Sha256::digest(&buf).as_slice());
|
|
||||||
Ok(HexString::from_bytes(&vec))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn metadata_path(id: &str, root: &Path) -> PathBuf {
|
|
||||||
let mut path = PathBuf::from(root);
|
|
||||||
path.push(".metadata");
|
|
||||||
path.push(id.clone());
|
|
||||||
append_extension(&path, "json")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete(&self) -> Result<()> {
|
|
||||||
let path = FileInfo::metadata_path(&self.id, &self.root);
|
|
||||||
remove_file(path).map_err(Error::from)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
use crate::lib::utils::FileCleanup;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_generates_information_from_file_path() {
|
|
||||||
let path = Path::new("fixtures/rawr.png");
|
|
||||||
match FileInfo::from_path(&path) {
|
|
||||||
Ok(FileInfo {
|
|
||||||
id,
|
|
||||||
size,
|
|
||||||
file_type,
|
|
||||||
hash,
|
|
||||||
..
|
|
||||||
}) => {
|
|
||||||
assert_eq!(id, "rawr.png");
|
|
||||||
assert_eq!(size, 23777);
|
|
||||||
assert_eq!(file_type, "image/png");
|
|
||||||
assert_eq!(
|
|
||||||
hash,
|
|
||||||
"b6cd35e113b95d62f53d9cbd27ccefef47d3e324aef01a2db6c0c6d3a43c89ee"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
println!("error loading file path: {}", err);
|
|
||||||
assert!(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_saves_and_loads_metadata() {
|
|
||||||
let path = Path::new("fixtures/rawr.png");
|
|
||||||
let _ = FileCleanup(append_extension(path, "json"));
|
|
||||||
let info = FileInfo::from_path(&path).unwrap();
|
|
||||||
info.save(Path::new("fixtures")).unwrap();
|
|
||||||
|
|
||||||
assert!(Path::new("fixtures/.metadata/rawr.png.json").is_file());
|
|
||||||
|
|
||||||
let info_ = FileInfo::open("rawr.png", Path::new("fixtures")).unwrap();
|
|
||||||
assert_eq!(info_.id, "rawr.png");
|
|
||||||
assert_eq!(info_.size, 23777);
|
|
||||||
assert_eq!(info_.created, info.created);
|
|
||||||
assert_eq!(info_.file_type, "image/png");
|
|
||||||
assert_eq!(info_.hash, info.hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_extends_a_file_extension() {
|
|
||||||
assert_eq!(
|
|
||||||
append_extension(Path::new("fixtures/rawr.png"), "json"),
|
|
||||||
Path::new("fixtures/rawr.png.json")
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,56 +0,0 @@
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
mod error;
|
|
||||||
mod file;
|
|
||||||
mod fileinfo;
|
|
||||||
mod thumbnail;
|
|
||||||
mod utils;
|
|
||||||
|
|
||||||
pub use error::{Error, Result};
|
|
||||||
pub use file::File;
|
|
||||||
pub use fileinfo::FileInfo;
|
|
||||||
pub use thumbnail::Thumbnail;
|
|
||||||
|
|
||||||
pub struct App {
|
|
||||||
files_root: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl App {
|
|
||||||
pub fn new(files_root: &Path) -> App {
|
|
||||||
App {
|
|
||||||
files_root: PathBuf::from(files_root),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn list_files(&self) -> Vec<Result<File>> {
|
|
||||||
File::list(&self.files_root)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_file(&mut self, temp_path: &PathBuf, filename: &Option<PathBuf>) -> Result<File> {
|
|
||||||
let id = Uuid::new_v4().hyphenated().to_string();
|
|
||||||
File::new(&id, &self.files_root, temp_path, filename)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete_file(&mut self, id: String) -> Result<()> {
|
|
||||||
let f = File::open(&id, &self.files_root)?;
|
|
||||||
f.delete()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_metadata(&self, id: String) -> Result<FileInfo> {
|
|
||||||
FileInfo::open(&id, &self.files_root)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_file(&self, id: String) -> Result<(FileInfo, std::fs::File)> {
|
|
||||||
let f = File::open(&id, &self.files_root)?;
|
|
||||||
let info = f.info();
|
|
||||||
let stream = f.stream()?;
|
|
||||||
Ok((info, stream))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_thumbnail(&self, id: &str) -> Result<(FileInfo, std::fs::File)> {
|
|
||||||
let f = File::open(id, &self.files_root)?;
|
|
||||||
let stream = f.thumbnail().stream()?;
|
|
||||||
Ok((f.info(), stream))
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,82 +0,0 @@
|
||||||
use image::imageops::FilterType;
|
|
||||||
use std::fs::remove_file;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
use super::error::{Error, Result};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct Thumbnail {
|
|
||||||
pub id: String,
|
|
||||||
pub root: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Thumbnail {
|
|
||||||
pub fn open(id: &str, root: &Path) -> Result<Thumbnail> {
|
|
||||||
let mut source_path = PathBuf::from(root);
|
|
||||||
source_path.push(id);
|
|
||||||
|
|
||||||
let self_ = Thumbnail {
|
|
||||||
id: String::from(id),
|
|
||||||
root: PathBuf::from(root),
|
|
||||||
};
|
|
||||||
|
|
||||||
let thumbnail_path = Thumbnail::thumbnail_path(id, root);
|
|
||||||
if !thumbnail_path.exists() {
|
|
||||||
let img = image::open(source_path)?;
|
|
||||||
let tn = img.resize(640, 640, FilterType::Nearest);
|
|
||||||
tn.save(thumbnail_path)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(self_)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_path(path: &Path) -> Result<Thumbnail> {
|
|
||||||
let id = path
|
|
||||||
.file_name()
|
|
||||||
.map(|s| String::from(s.to_string_lossy()))
|
|
||||||
.ok_or(Error::NotAnImage(PathBuf::from(path)))?;
|
|
||||||
|
|
||||||
let root = path
|
|
||||||
.parent()
|
|
||||||
.ok_or(Error::FileNotFound(PathBuf::from(path)))?;
|
|
||||||
|
|
||||||
Thumbnail::open(&id, root)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn thumbnail_path(id: &str, root: &Path) -> PathBuf {
|
|
||||||
let mut path = PathBuf::from(root);
|
|
||||||
path.push(".thumbnails");
|
|
||||||
path.push(id.clone());
|
|
||||||
path
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn stream(&self) -> Result<std::fs::File> {
|
|
||||||
let thumbnail_path = Thumbnail::thumbnail_path(&self.id, &self.root);
|
|
||||||
std::fs::File::open(thumbnail_path.clone()).map_err(|err| {
|
|
||||||
if err.kind() == std::io::ErrorKind::NotFound {
|
|
||||||
Error::FileNotFound(thumbnail_path)
|
|
||||||
} else {
|
|
||||||
Error::from(err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete(&self) -> Result<()> {
|
|
||||||
let path = Thumbnail::thumbnail_path(&self.id, &self.root);
|
|
||||||
remove_file(path).map_err(Error::from)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use crate::lib::utils::FileCleanup;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_creates_a_thumbnail_if_one_does_not_exist() {
|
|
||||||
let _ = FileCleanup(PathBuf::from("fixtures/.thumbnails/rawr.png"));
|
|
||||||
let _ =
|
|
||||||
Thumbnail::open("rawr.png", Path::new("fixtures")).expect("thumbnail open must work");
|
|
||||||
assert!(Path::new("fixtures/.thumbnails/rawr.png").is_file());
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,17 +0,0 @@
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
pub struct FileCleanup(pub PathBuf);
|
|
||||||
|
|
||||||
impl Drop for FileCleanup {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
let _ = std::fs::remove_file(&self.0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn append_extension(path: &Path, extra_ext: &str) -> PathBuf {
|
|
||||||
let ext_ = match path.extension() {
|
|
||||||
None => String::from(extra_ext),
|
|
||||||
Some(ext) => [ext.to_string_lossy(), std::borrow::Cow::from(extra_ext)].join("."),
|
|
||||||
};
|
|
||||||
path.with_extension(ext_)
|
|
||||||
}
|
|
|
@ -1,341 +0,0 @@
|
||||||
use iron::headers;
|
|
||||||
use iron::middleware::Handler;
|
|
||||||
use iron::modifiers::{Header, Redirect};
|
|
||||||
use iron::prelude::*;
|
|
||||||
use iron::response::BodyReader;
|
|
||||||
use iron::status;
|
|
||||||
use mustache::{compile_path, Template};
|
|
||||||
use orizentic::{Permissions, ResourceName, Secret};
|
|
||||||
use params::{Params, Value};
|
|
||||||
use router::Router;
|
|
||||||
use serde::Serialize;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::fs::File;
|
|
||||||
use std::io::Read;
|
|
||||||
use std::path::Path;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::{Arc, RwLock};
|
|
||||||
|
|
||||||
mod cookies;
|
|
||||||
mod lib;
|
|
||||||
mod middleware;
|
|
||||||
|
|
||||||
use lib::{App, FileInfo};
|
|
||||||
use middleware::{Authentication, RestForm};
|
|
||||||
|
|
||||||
fn is_admin(resource: &ResourceName, permissions: &Permissions) -> bool {
|
|
||||||
let Permissions(perms) = permissions;
|
|
||||||
ResourceName(String::from(
|
|
||||||
"https://savanni.luminescent-dreams.com/file-service/",
|
|
||||||
)) == *resource
|
|
||||||
&& perms.contains(&String::from("admin"))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn compare_etags(info: FileInfo, etag_list: &headers::IfNoneMatch) -> bool {
|
|
||||||
let current_etag = headers::EntityTag::new(false, info.hash);
|
|
||||||
match etag_list {
|
|
||||||
headers::IfNoneMatch::Any => false,
|
|
||||||
headers::IfNoneMatch::Items(lst) => lst.iter().any(|etag| etag.weak_eq(¤t_etag)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod files {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
pub struct IndexHandler {
|
|
||||||
pub app: Arc<RwLock<App>>,
|
|
||||||
pub template: Template,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
pub enum TemplateFile {
|
|
||||||
#[serde(rename = "error")]
|
|
||||||
Error { error: String },
|
|
||||||
#[serde(rename = "file")]
|
|
||||||
File {
|
|
||||||
id: String,
|
|
||||||
size: u64,
|
|
||||||
date: String,
|
|
||||||
type_: String,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
pub struct IndexTemplateParams {
|
|
||||||
files: Vec<TemplateFile>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Handler for IndexHandler {
|
|
||||||
fn handle(&self, req: &mut Request) -> IronResult<Response> {
|
|
||||||
let app = self.app.read().unwrap();
|
|
||||||
let m_token = req.extensions.get::<Authentication>();
|
|
||||||
match m_token {
|
|
||||||
Some(token) => {
|
|
||||||
if token.check_authorizations(is_admin) {
|
|
||||||
let files: Vec<TemplateFile> = app
|
|
||||||
.list_files()
|
|
||||||
.into_iter()
|
|
||||||
.map(|entry| match entry {
|
|
||||||
Ok(file) => TemplateFile::File {
|
|
||||||
id: file.info().id,
|
|
||||||
size: file.info().size,
|
|
||||||
date: format!(
|
|
||||||
"{}",
|
|
||||||
file.info().created.format("%Y-%m-%d %H:%M:%S")
|
|
||||||
),
|
|
||||||
type_: file.info().file_type,
|
|
||||||
},
|
|
||||||
Err(err) => TemplateFile::Error {
|
|
||||||
error: format!("{}", err),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
Ok(Response::with((
|
|
||||||
status::Ok,
|
|
||||||
Header(headers::ContentType::html()),
|
|
||||||
Header(headers::SetCookie(vec![format!("auth={}", token.text)])),
|
|
||||||
self.template
|
|
||||||
.render_to_string(&IndexTemplateParams { files })
|
|
||||||
.expect("the template to render"),
|
|
||||||
)))
|
|
||||||
} else {
|
|
||||||
Ok(Response::with(status::Forbidden))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => Ok(Response::with(status::Forbidden)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct GetHandler {
|
|
||||||
pub app: Arc<RwLock<App>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Handler for GetHandler {
|
|
||||||
fn handle(&self, req: &mut Request) -> IronResult<Response> {
|
|
||||||
let app = self.app.read().unwrap();
|
|
||||||
let capture = req.extensions.get::<Router>().unwrap().clone();
|
|
||||||
let old_etags = req.headers.get::<headers::IfNoneMatch>();
|
|
||||||
match capture.find("id") {
|
|
||||||
Some(id) => {
|
|
||||||
let info = app.get_metadata(String::from(id));
|
|
||||||
match (info, old_etags) {
|
|
||||||
(Ok(info_), Some(if_none_match)) => {
|
|
||||||
if compare_etags(info_, if_none_match) {
|
|
||||||
return Ok(Response::with(status::NotModified));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
match app.get_file(String::from(id)) {
|
|
||||||
Ok((info, stream)) => Ok(Response::with((
|
|
||||||
status::Ok,
|
|
||||||
Header(headers::ContentType(
|
|
||||||
info.file_type.parse::<iron::mime::Mime>().unwrap(),
|
|
||||||
)),
|
|
||||||
Header(headers::ETag(headers::EntityTag::new(false, info.hash))),
|
|
||||||
BodyReader(stream),
|
|
||||||
))),
|
|
||||||
Err(_err) => Ok(Response::with(status::NotFound)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => Ok(Response::with(status::BadRequest)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct GetThumbnailHandler {
|
|
||||||
pub app: Arc<RwLock<App>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Handler for GetThumbnailHandler {
|
|
||||||
fn handle(&self, req: &mut Request) -> IronResult<Response> {
|
|
||||||
let app = self.app.read().unwrap();
|
|
||||||
let capture = req.extensions.get::<Router>().unwrap().clone();
|
|
||||||
let old_etags = req.headers.get::<headers::IfNoneMatch>();
|
|
||||||
match capture.find("id") {
|
|
||||||
Some(id) => {
|
|
||||||
let info = app.get_metadata(String::from(id));
|
|
||||||
match (info, old_etags) {
|
|
||||||
(Ok(info_), Some(if_none_match)) => {
|
|
||||||
if compare_etags(info_, if_none_match) {
|
|
||||||
return Ok(Response::with(status::NotModified));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
match app.get_thumbnail(id) {
|
|
||||||
Ok((info, stream)) => Ok(Response::with((
|
|
||||||
status::Ok,
|
|
||||||
Header(headers::ContentType(
|
|
||||||
info.file_type.parse::<iron::mime::Mime>().unwrap(),
|
|
||||||
)),
|
|
||||||
Header(headers::ETag(headers::EntityTag::new(false, info.hash))),
|
|
||||||
BodyReader(stream),
|
|
||||||
))),
|
|
||||||
Err(_err) => Ok(Response::with(status::NotFound)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => Ok(Response::with(status::BadRequest)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub struct PostHandler {
|
|
||||||
pub app: Arc<RwLock<App>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Handler for PostHandler {
|
|
||||||
fn handle(&self, req: &mut Request) -> IronResult<Response> {
|
|
||||||
let mut app = self.app.write().unwrap();
|
|
||||||
let m_token = req.extensions.get::<Authentication>();
|
|
||||||
match m_token {
|
|
||||||
Some(token) => {
|
|
||||||
if token.check_authorizations(is_admin) {
|
|
||||||
let params = req.get_ref::<Params>().unwrap();
|
|
||||||
if let Value::File(f_info) = params.get("file").unwrap() {
|
|
||||||
match app.add_file(
|
|
||||||
&f_info.path,
|
|
||||||
&f_info.filename.clone().map(|fname| PathBuf::from(fname)),
|
|
||||||
) {
|
|
||||||
Ok(_) => Ok(Response::with((
|
|
||||||
status::MovedPermanently,
|
|
||||||
Redirect(router::url_for(req, "index", HashMap::new())),
|
|
||||||
))),
|
|
||||||
Err(_) => Ok(Response::with(status::InternalServerError)),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(Response::with(status::BadRequest))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(Response::with(status::Forbidden))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => Ok(Response::with(status::Forbidden)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct DeleteHandler {
|
|
||||||
pub app: Arc<RwLock<App>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Handler for DeleteHandler {
|
|
||||||
fn handle(&self, req: &mut Request) -> IronResult<Response> {
|
|
||||||
let mut app = self.app.write().unwrap();
|
|
||||||
let capture = req.extensions.get::<Router>().unwrap().clone();
|
|
||||||
let m_token = req.extensions.get::<Authentication>();
|
|
||||||
match m_token {
|
|
||||||
Some(token) => {
|
|
||||||
if token.check_authorizations(is_admin) {
|
|
||||||
match capture.find("id") {
|
|
||||||
Some(id) => match app.delete_file(String::from(id)) {
|
|
||||||
Ok(()) => Ok(Response::with((
|
|
||||||
status::MovedPermanently,
|
|
||||||
Redirect(router::url_for(req, "index", HashMap::new())),
|
|
||||||
))),
|
|
||||||
Err(_) => Ok(Response::with(status::InternalServerError)),
|
|
||||||
},
|
|
||||||
None => Ok(Response::with(status::BadRequest)),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(Response::with(status::Forbidden))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => Ok(Response::with(status::Forbidden)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn css(_: &mut Request) -> IronResult<Response> {
|
|
||||||
let mut css: String = String::from("");
|
|
||||||
File::open("templates/style.css")
|
|
||||||
.unwrap()
|
|
||||||
.read_to_string(&mut css)
|
|
||||||
.unwrap();
|
|
||||||
Ok(Response::with((
|
|
||||||
status::Ok,
|
|
||||||
Header(headers::ContentType(iron::mime::Mime(
|
|
||||||
iron::mime::TopLevel::Text,
|
|
||||||
iron::mime::SubLevel::Css,
|
|
||||||
vec![],
|
|
||||||
))),
|
|
||||||
css,
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn script(_: &mut Request) -> IronResult<Response> {
|
|
||||||
let mut js: String = String::from("");
|
|
||||||
File::open("templates/script.js")
|
|
||||||
.unwrap()
|
|
||||||
.read_to_string(&mut js)
|
|
||||||
.unwrap();
|
|
||||||
Ok(Response::with((
|
|
||||||
status::Ok,
|
|
||||||
Header(headers::ContentType(iron::mime::Mime(
|
|
||||||
iron::mime::TopLevel::Text,
|
|
||||||
iron::mime::SubLevel::Javascript,
|
|
||||||
vec![],
|
|
||||||
))),
|
|
||||||
js,
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let auth_db_path = std::env::var("ORIZENTIC_DB").unwrap();
|
|
||||||
let secret = Secret(Vec::from(
|
|
||||||
std::env::var("ORIZENTIC_SECRET").unwrap().as_bytes(),
|
|
||||||
));
|
|
||||||
let auth_middleware = Authentication::new(secret, auth_db_path);
|
|
||||||
|
|
||||||
let app = Arc::new(RwLock::new(App::new(Path::new(
|
|
||||||
&std::env::var("FILE_SHARE_DIR").unwrap(),
|
|
||||||
))));
|
|
||||||
|
|
||||||
let mut router = Router::new();
|
|
||||||
router.get(
|
|
||||||
"/",
|
|
||||||
files::IndexHandler {
|
|
||||||
app: app.clone(),
|
|
||||||
template: compile_path("templates/index.html").expect("the template to compile"),
|
|
||||||
},
|
|
||||||
"index",
|
|
||||||
);
|
|
||||||
|
|
||||||
router.get(
|
|
||||||
"/:id",
|
|
||||||
files::GetFileHandler {
|
|
||||||
app: app.clone(),
|
|
||||||
template: compile_path("templates/file.html").expect("the template to compile"),
|
|
||||||
},
|
|
||||||
"get-file-page",
|
|
||||||
);
|
|
||||||
|
|
||||||
router.get(
|
|
||||||
"/:id/raw",
|
|
||||||
files::GetHandler { app: app.clone() },
|
|
||||||
"get-file",
|
|
||||||
);
|
|
||||||
|
|
||||||
router.get(
|
|
||||||
"/:id/tn",
|
|
||||||
files::GetThumbnailHandler { app: app.clone() },
|
|
||||||
"get-thumbnail",
|
|
||||||
);
|
|
||||||
|
|
||||||
router.post("/", files::PostHandler { app: app.clone() }, "upload-file");
|
|
||||||
|
|
||||||
router.delete(
|
|
||||||
"/:id",
|
|
||||||
files::DeleteHandler { app: app.clone() },
|
|
||||||
"delete-file",
|
|
||||||
);
|
|
||||||
router.get("/css", css, "styles");
|
|
||||||
router.get("/script", script, "script");
|
|
||||||
|
|
||||||
let mut chain = Chain::new(router);
|
|
||||||
chain.link_before(auth_middleware);
|
|
||||||
chain.link_before(RestForm {});
|
|
||||||
|
|
||||||
Iron::new(chain).http("0.0.0.0:3000").unwrap();
|
|
||||||
}
|
|
|
@ -1,51 +0,0 @@
|
||||||
use iron::headers;
|
|
||||||
use iron::middleware::BeforeMiddleware;
|
|
||||||
use iron::prelude::*;
|
|
||||||
use iron::typemap::Key;
|
|
||||||
use orizentic::{filedb, OrizenticCtx, Secret};
|
|
||||||
use params::{FromValue, Params};
|
|
||||||
|
|
||||||
use crate::cookies::{Cookie, CookieJar};
|
|
||||||
|
|
||||||
pub struct Authentication {
|
|
||||||
pub auth: OrizenticCtx,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Key for Authentication {
|
|
||||||
type Value = orizentic::VerifiedToken;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Authentication {
|
|
||||||
pub fn new(secret: Secret, auth_db_path: String) -> Authentication {
|
|
||||||
let claims = filedb::load_claims_from_file(&auth_db_path).expect("claims did not load");
|
|
||||||
let orizentic = OrizenticCtx::new(secret, claims);
|
|
||||||
Authentication { auth: orizentic }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn authenticate_user(
|
|
||||||
&self,
|
|
||||||
token_str: String,
|
|
||||||
) -> Result<orizentic::VerifiedToken, orizentic::Error> {
|
|
||||||
self.auth.decode_and_validate_text(&token_str)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BeforeMiddleware for Authentication {
|
|
||||||
fn before(&self, req: &mut Request) -> IronResult<()> {
|
|
||||||
let params = req.get_ref::<Params>().unwrap();
|
|
||||||
let token = match params.get("auth").and_then(|v| String::from_value(v)) {
|
|
||||||
Some(token_str) => self.authenticate_user(token_str).ok(),
|
|
||||||
None => {
|
|
||||||
let m_jar = req
|
|
||||||
.headers
|
|
||||||
.get::<headers::Cookie>()
|
|
||||||
.map(|cookies| CookieJar::from(cookies));
|
|
||||||
m_jar
|
|
||||||
.and_then(|jar| jar.lookup("auth").cloned())
|
|
||||||
.and_then(|Cookie { value, .. }| self.authenticate_user(value.clone()).ok())
|
|
||||||
}
|
|
||||||
};
|
|
||||||
token.map(|t| req.extensions.insert::<Authentication>(t));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,16 +0,0 @@
|
||||||
use iron::middleware::{AfterMiddleware, BeforeMiddleware};
|
|
||||||
use iron::prelude::*;
|
|
||||||
|
|
||||||
pub struct Logging {}
|
|
||||||
|
|
||||||
impl BeforeMiddleware for Logging {
|
|
||||||
fn before(&self, _: &mut Request) -> IronResult<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AfterMiddleware for Logging {
|
|
||||||
fn after(&self, _: &mut Request, res: Response) -> IronResult<Response> {
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,6 +0,0 @@
|
||||||
mod authentication;
|
|
||||||
mod logging;
|
|
||||||
mod restform;
|
|
||||||
|
|
||||||
pub use authentication::Authentication;
|
|
||||||
pub use restform::RestForm;
|
|
|
@ -1,34 +0,0 @@
|
||||||
use iron::method::Method;
|
|
||||||
use iron::middleware::BeforeMiddleware;
|
|
||||||
use iron::prelude::*;
|
|
||||||
use params::{Params, Value};
|
|
||||||
|
|
||||||
pub struct RestForm {}
|
|
||||||
|
|
||||||
impl RestForm {
|
|
||||||
fn method(&self, v: &Value) -> Option<Method> {
|
|
||||||
match v {
|
|
||||||
Value::String(method_str) => match method_str.as_str() {
|
|
||||||
"delete" => Some(Method::Delete),
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BeforeMiddleware for RestForm {
|
|
||||||
fn before(&self, req: &mut Request) -> IronResult<()> {
|
|
||||||
if req.method == Method::Post {
|
|
||||||
let method = {
|
|
||||||
let params = req.get_ref::<Params>().unwrap();
|
|
||||||
params
|
|
||||||
.get("_method")
|
|
||||||
.and_then(|m| self.method(m))
|
|
||||||
.unwrap_or(Method::Post)
|
|
||||||
};
|
|
||||||
req.method = method;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,15 +0,0 @@
|
||||||
<html>
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<title> {{title}} </title>
|
|
||||||
<link href="/css" rel="stylesheet" type="text/css" media="screen" />
|
|
||||||
<script src="/script"></script>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
|
|
||||||
<a href="/file/{{id}}"><img src="/tn/{{id}}" /></a>
|
|
||||||
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
|
@ -1,54 +0,0 @@
|
||||||
<html>
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<title> Admin list of files </title>
|
|
||||||
<link href="/css" rel="stylesheet" type="text/css" media="screen" />
|
|
||||||
<script src="/script"></script>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<h1> Admin list of files </h1>
|
|
||||||
|
|
||||||
<div class="uploadform">
|
|
||||||
<form action="/" method="post" enctype="multipart/form-data">
|
|
||||||
<div id="file-selector">
|
|
||||||
<input type="file" name="file" id="file-selector-input" />
|
|
||||||
<label for="file-selector-input" onclick="selectFile('file-selector')">Select a file</label>
|
|
||||||
</div>
|
|
||||||
<input type="submit" value="Upload file" />
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="files">
|
|
||||||
{{#files}}
|
|
||||||
<div class="file">
|
|
||||||
{{#error}}
|
|
||||||
<div>
|
|
||||||
<p> {{error}} </p>
|
|
||||||
</div>
|
|
||||||
{{/error}}
|
|
||||||
|
|
||||||
{{#file}}
|
|
||||||
<div class="thumbnail">
|
|
||||||
<a href="/file/{{id}}"><img src="/tn/{{id}}" /></a>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<ul>
|
|
||||||
<li> {{date}} </li>
|
|
||||||
<li> {{type_}} </li>
|
|
||||||
<li> {{size}} </li>
|
|
||||||
</ul>
|
|
||||||
<div>
|
|
||||||
<form action="/{{id}}" method="post">
|
|
||||||
<input type="hidden" name="_method" value="delete" />
|
|
||||||
<input type="submit" value="Delete" />
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{{/file}}
|
|
||||||
</div>
|
|
||||||
{{/files}}
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
|
@ -1,10 +0,0 @@
|
||||||
const selectFile = (selectorId) => {
|
|
||||||
console.log("wide arrow functions work: " + selectorId);
|
|
||||||
const input = document.querySelector("#" + selectorId + " input[type='file']")
|
|
||||||
const label = document.querySelector("#" + selectorId + " label")
|
|
||||||
input.addEventListener("change", (e) => {
|
|
||||||
if (input.files.length > 0) {
|
|
||||||
label.innerHTML = input.files[0].name
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
|
@ -1,103 +0,0 @@
|
||||||
body {
|
|
||||||
font-family: 'Ariel', sans-serif;
|
|
||||||
}
|
|
||||||
|
|
||||||
.files {
|
|
||||||
display: flex;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.file {
|
|
||||||
display: flex;
|
|
||||||
margin: 1em;
|
|
||||||
border: 1px solid #449dfc;
|
|
||||||
border-radius: 5px;
|
|
||||||
padding: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.thumbnail {
|
|
||||||
max-width: 320px;
|
|
||||||
margin: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
img {
|
|
||||||
max-width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
[type="submit"] {
|
|
||||||
border-radius: 1em;
|
|
||||||
margin: 1em;
|
|
||||||
padding: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.uploadform {
|
|
||||||
display: flex;
|
|
||||||
margin: 1em;
|
|
||||||
background-color: #e5f0fc;
|
|
||||||
border: 1px solid #449dfc;
|
|
||||||
border-radius: 5px;
|
|
||||||
padding: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* https://benmarshall.me/styling-file-inputs/ */
|
|
||||||
[type="file"] {
|
|
||||||
border: 0;
|
|
||||||
clip: rect(0, 0, 0, 0);
|
|
||||||
height: 1px;
|
|
||||||
overflow: hidden;
|
|
||||||
padding: 0;
|
|
||||||
position: absolute !important;
|
|
||||||
white-space: nowrap;
|
|
||||||
width: 1px;
|
|
||||||
}
|
|
||||||
|
|
||||||
[type="file"] + label {
|
|
||||||
background-color: rgb(0, 86, 112);
|
|
||||||
border-radius: 1em;
|
|
||||||
color: #fff;
|
|
||||||
cursor: pointer;
|
|
||||||
display: inline-block;
|
|
||||||
padding: 1em;
|
|
||||||
margin: 1em;
|
|
||||||
transition: background-color 0.3s;
|
|
||||||
}
|
|
||||||
|
|
||||||
[type="file"]:focus + label,
|
|
||||||
[type="file"] + label:hover {
|
|
||||||
background-color: #67b0ff;
|
|
||||||
}
|
|
||||||
|
|
||||||
[type="file"]:focus + label {
|
|
||||||
outline: 1px dotted #000;
|
|
||||||
outline: -webkit-focus-ring-color auto 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media screen and (max-width: 980px) { /* This is the screen width of a OnePlus 5t */
|
|
||||||
body {
|
|
||||||
font-size: xx-large;
|
|
||||||
}
|
|
||||||
|
|
||||||
[type="submit"] {
|
|
||||||
font-size: xx-large;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.uploadform {
|
|
||||||
display: flex;
|
|
||||||
}
|
|
||||||
|
|
||||||
[type="file"] + label {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.thumbnail {
|
|
||||||
max-width: 100%;
|
|
||||||
margin: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.file {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,74 +0,0 @@
|
||||||
# Contributor Covenant Code of Conduct
|
|
||||||
|
|
||||||
## Our Pledge
|
|
||||||
|
|
||||||
In the interest of fostering an open and welcoming environment, we as
|
|
||||||
contributors and maintainers pledge to making participation in our project and
|
|
||||||
our community a harassment-free experience for everyone, regardless of age, body
|
|
||||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
|
||||||
education, socio-economic status, nationality, personal appearance, race,
|
|
||||||
religion, or sexual identity and orientation.
|
|
||||||
|
|
||||||
## Our Standards
|
|
||||||
|
|
||||||
Examples of behavior that contributes to creating a positive environment
|
|
||||||
include:
|
|
||||||
|
|
||||||
* Using welcoming and inclusive language
|
|
||||||
* Being respectful of differing viewpoints and experiences
|
|
||||||
* Gracefully accepting constructive criticism
|
|
||||||
* Focusing on what is best for the community
|
|
||||||
* Showing empathy towards other community members
|
|
||||||
|
|
||||||
Examples of unacceptable behavior by participants include:
|
|
||||||
|
|
||||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
|
||||||
advances
|
|
||||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
|
||||||
* Public or private harassment
|
|
||||||
* Publishing others' private information, such as a physical or electronic
|
|
||||||
address, without explicit permission
|
|
||||||
* Other conduct which could reasonably be considered inappropriate in a
|
|
||||||
professional setting
|
|
||||||
|
|
||||||
## Our Responsibilities
|
|
||||||
|
|
||||||
Project maintainers are responsible for clarifying the standards of acceptable
|
|
||||||
behavior and are expected to take appropriate and fair corrective action in
|
|
||||||
response to any instances of unacceptable behavior.
|
|
||||||
|
|
||||||
Project maintainers have the right and responsibility to remove, edit, or
|
|
||||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
|
||||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
|
||||||
permanently any contributor for other behaviors that they deem inappropriate,
|
|
||||||
threatening, offensive, or harmful.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
This Code of Conduct applies both within project spaces and in public spaces
|
|
||||||
when an individual is representing the project or its community. Examples of
|
|
||||||
representing a project or community include using an official project e-mail
|
|
||||||
address, posting via an official social media account, or acting as an appointed
|
|
||||||
representative at an online or offline event. Representation of a project may be
|
|
||||||
further defined and clarified by project maintainers.
|
|
||||||
|
|
||||||
## Enforcement
|
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
|
||||||
reported by contacting the project team at savanni@luminescent-dreams.com. All
|
|
||||||
complaints will be reviewed and investigated and will result in a response that
|
|
||||||
is deemed necessary and appropriate to the circumstances. The project team is
|
|
||||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
|
||||||
Further details of specific enforcement policies may be posted separately.
|
|
||||||
|
|
||||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
|
||||||
faith may face temporary or permanent repercussions as determined by other
|
|
||||||
members of the project's leadership.
|
|
||||||
|
|
||||||
## Attribution
|
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
|
||||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
|
||||||
|
|
||||||
[homepage]: https://www.contributor-covenant.org
|
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
* [Savanni D'Gerinel](http://github.com/savannidgerinel)
|
|
||||||
* [Daria Phoebe Brasea](http://github.com/dariaphoebe)
|
|
||||||
* [Aria Stewart](http://github.com/aredridel)
|
|
||||||
|
|
|
@ -1,446 +0,0 @@
|
||||||
# This file is automatically @generated by Cargo.
|
|
||||||
# It is not intended for manual editing.
|
|
||||||
version = 3
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ansi_term"
|
|
||||||
version = "0.11.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
|
|
||||||
dependencies = [
|
|
||||||
"winapi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "atty"
|
|
||||||
version = "0.2.10"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2fc4a1aa4c24c0718a250f0681885c1af91419d242f29eb8f2ab28502d80dbd1"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
"termion",
|
|
||||||
"winapi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "base64"
|
|
||||||
version = "0.9.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "85415d2594767338a74a30c1d370b2f3262ec1b4ed2d7bba5b3faf4de40467d9"
|
|
||||||
dependencies = [
|
|
||||||
"byteorder",
|
|
||||||
"safemem",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bitflags"
|
|
||||||
version = "1.0.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d0c54bb8f454c567f21197eefcdbf5679d0bd99f2ddbe52e84c77061952e6789"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "byteorder"
|
|
||||||
version = "1.2.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "74c0b906e9446b0a2e4f760cdb3fa4b2c48cdc6db8766a845c54b6ff063fd2e9"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cc"
|
|
||||||
version = "1.0.31"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c9ce8bb087aacff865633f0bd5aeaed910fe2fe55b55f4739527f2e023a2e53d"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cfg-if"
|
|
||||||
version = "1.0.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "chrono"
|
|
||||||
version = "0.4.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6962c635d530328acc53ac6a955e83093fedc91c5809dfac1fa60fa470830a37"
|
|
||||||
dependencies = [
|
|
||||||
"num-integer",
|
|
||||||
"num-traits",
|
|
||||||
"serde",
|
|
||||||
"time",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "clap"
|
|
||||||
version = "2.33.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
|
|
||||||
dependencies = [
|
|
||||||
"ansi_term",
|
|
||||||
"atty",
|
|
||||||
"bitflags",
|
|
||||||
"strsim",
|
|
||||||
"textwrap",
|
|
||||||
"unicode-width",
|
|
||||||
"vec_map",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "dtoa"
|
|
||||||
version = "0.4.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "either"
|
|
||||||
version = "1.5.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "getrandom"
|
|
||||||
version = "0.2.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
"libc",
|
|
||||||
"wasi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "itertools"
|
|
||||||
version = "0.10.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf"
|
|
||||||
dependencies = [
|
|
||||||
"either",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "itoa"
|
|
||||||
version = "0.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c069bbec61e1ca5a596166e55dfe4773ff745c3d16b700013bcaff9a6df2c682"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "jsonwebtoken"
|
|
||||||
version = "5.0.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8d438ea707d465c230305963b67f8357a1d56fcfad9434797d7cb1c46c2e41df"
|
|
||||||
dependencies = [
|
|
||||||
"base64",
|
|
||||||
"chrono",
|
|
||||||
"ring",
|
|
||||||
"serde",
|
|
||||||
"serde_derive",
|
|
||||||
"serde_json",
|
|
||||||
"untrusted",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lazy_static"
|
|
||||||
version = "1.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "libc"
|
|
||||||
version = "0.2.103"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "dd8f7255a17a627354f321ef0055d63b898c6fb27eff628af4d1b66b7331edf6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "linked-hash-map"
|
|
||||||
version = "0.5.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "70fb39025bc7cdd76305867c4eccf2f2dcf6e9a57f5b21a93e1c2d86cd03ec9e"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "num-integer"
|
|
||||||
version = "0.1.39"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea"
|
|
||||||
dependencies = [
|
|
||||||
"num-traits",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "num-traits"
|
|
||||||
version = "0.2.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "630de1ef5cc79d0cdd78b7e33b81f083cbfe90de0f4b2b2f07f905867c70e9fe"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "orizentic"
|
|
||||||
version = "1.0.1"
|
|
||||||
dependencies = [
|
|
||||||
"chrono",
|
|
||||||
"clap",
|
|
||||||
"itertools",
|
|
||||||
"jsonwebtoken",
|
|
||||||
"serde",
|
|
||||||
"serde_derive",
|
|
||||||
"serde_json",
|
|
||||||
"thiserror",
|
|
||||||
"uuid",
|
|
||||||
"version_check",
|
|
||||||
"yaml-rust",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro2"
|
|
||||||
version = "0.4.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "effdb53b25cdad54f8f48843d67398f7ef2e14f12c1b4cb4effc549a6462a4d6"
|
|
||||||
dependencies = [
|
|
||||||
"unicode-xid 0.1.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro2"
|
|
||||||
version = "1.0.29"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b9f5105d4fdaab20335ca9565e106a5d9b82b6219b5ba735731124ac6711d23d"
|
|
||||||
dependencies = [
|
|
||||||
"unicode-xid 0.2.2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quote"
|
|
||||||
version = "0.6.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 0.4.6",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quote"
|
|
||||||
version = "1.0.9"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 1.0.29",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "redox_syscall"
|
|
||||||
version = "0.1.40"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "redox_termios"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
|
|
||||||
dependencies = [
|
|
||||||
"redox_syscall",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ring"
|
|
||||||
version = "0.13.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2c4db68a2e35f3497146b7e4563df7d4773a2433230c5e4b448328e31740458a"
|
|
||||||
dependencies = [
|
|
||||||
"cc",
|
|
||||||
"lazy_static",
|
|
||||||
"libc",
|
|
||||||
"untrusted",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "safemem"
|
|
||||||
version = "0.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e27a8b19b835f7aea908818e871f5cc3a5a186550c30773be987e155e8163d8f"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "serde"
|
|
||||||
version = "1.0.69"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "210e5a3b159c566d7527e9b22e44be73f2e0fcc330bb78fef4dbccb56d2e74c8"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "serde_derive"
|
|
||||||
version = "1.0.69"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "dd724d68017ae3a7e63600ee4b2fdb3cad2158ffd1821d44aff4580f63e2b593"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 0.4.6",
|
|
||||||
"quote 0.6.3",
|
|
||||||
"syn 0.14.4",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "serde_json"
|
|
||||||
version = "1.0.22"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "84b8035cabe9b35878adec8ac5fe03d5f6bc97ff6edd7ccb96b44c1276ba390e"
|
|
||||||
dependencies = [
|
|
||||||
"dtoa",
|
|
||||||
"itoa",
|
|
||||||
"serde",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "strsim"
|
|
||||||
version = "0.8.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "syn"
|
|
||||||
version = "0.14.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2beff8ebc3658f07512a413866875adddd20f4fd47b2a4e6c9da65cd281baaea"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 0.4.6",
|
|
||||||
"quote 0.6.3",
|
|
||||||
"unicode-xid 0.1.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "syn"
|
|
||||||
version = "1.0.77"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5239bc68e0fef57495900cfea4e8dc75596d9a319d7e16b1e0a440d24e6fe0a0"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 1.0.29",
|
|
||||||
"quote 1.0.9",
|
|
||||||
"unicode-xid 0.2.2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "termion"
|
|
||||||
version = "1.5.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
"redox_syscall",
|
|
||||||
"redox_termios",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "textwrap"
|
|
||||||
version = "0.11.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
|
|
||||||
dependencies = [
|
|
||||||
"unicode-width",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "thiserror"
|
|
||||||
version = "1.0.29"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "602eca064b2d83369e2b2f34b09c70b605402801927c65c11071ac911d299b88"
|
|
||||||
dependencies = [
|
|
||||||
"thiserror-impl",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "thiserror-impl"
|
|
||||||
version = "1.0.29"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bad553cc2c78e8de258400763a647e80e6d1b31ee237275d756f6836d204494c"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 1.0.29",
|
|
||||||
"quote 1.0.9",
|
|
||||||
"syn 1.0.77",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "time"
|
|
||||||
version = "0.1.40"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d825be0eb33fda1a7e68012d51e9c7f451dc1a69391e7fdc197060bb8c56667b"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
"redox_syscall",
|
|
||||||
"winapi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-width"
|
|
||||||
version = "0.1.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "882386231c45df4700b275c7ff55b6f3698780a650026380e72dabe76fa46526"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-xid"
|
|
||||||
version = "0.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-xid"
|
|
||||||
version = "0.2.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "untrusted"
|
|
||||||
version = "0.6.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "55cd1f4b4e96b46aeb8d4855db4a7a9bd96eeeb5c6a1ab54593328761642ce2f"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "uuid"
|
|
||||||
version = "0.8.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
|
|
||||||
dependencies = [
|
|
||||||
"getrandom",
|
|
||||||
"serde",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "vec_map"
|
|
||||||
version = "0.8.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "version_check"
|
|
||||||
version = "0.1.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasi"
|
|
||||||
version = "0.10.2+wasi-snapshot-preview1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi"
|
|
||||||
version = "0.3.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "773ef9dcc5f24b7d850d0ff101e542ff24c3b090a9768e03ff889fdef41f00fd"
|
|
||||||
dependencies = [
|
|
||||||
"winapi-i686-pc-windows-gnu",
|
|
||||||
"winapi-x86_64-pc-windows-gnu",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-i686-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-x86_64-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "yaml-rust"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "57ab38ee1a4a266ed033496cf9af1828d8d6e6c1cfa5f643a2809effcae4d628"
|
|
||||||
dependencies = [
|
|
||||||
"linked-hash-map",
|
|
||||||
]
|
|
|
@ -1,39 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "orizentic"
|
|
||||||
version = "1.0.1"
|
|
||||||
authors = ["Savanni D'Gerinel <savanni@luminescent-dreams.com>"]
|
|
||||||
description = "A library for inerfacing with a JWT auth token database and a command line tool for managing it."
|
|
||||||
license = "GPL3"
|
|
||||||
documentation = "https://docs.rs/orizentic"
|
|
||||||
homepage = "https://github.com/luminescent-dreams/orizentic"
|
|
||||||
repository = "https://github.com/luminescent-dreams/orizentic"
|
|
||||||
categories = ["authentication", "command-line-utilities"]
|
|
||||||
|
|
||||||
include = [
|
|
||||||
"**/*.rs",
|
|
||||||
"Cargo.toml",
|
|
||||||
"build.rs",
|
|
||||||
]
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
version_check = "0.1.5"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
|
||||||
clap = "2.33"
|
|
||||||
itertools = "0.10"
|
|
||||||
jsonwebtoken = "5"
|
|
||||||
serde = "1"
|
|
||||||
serde_derive = "1"
|
|
||||||
serde_json = "1"
|
|
||||||
thiserror = "1"
|
|
||||||
uuid = { version = "0.8", features = ["v4", "serde"] }
|
|
||||||
yaml-rust = "0.4"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
name = "orizentic"
|
|
||||||
path = "src/lib.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "orizentic"
|
|
||||||
path = "src/bin.rs"
|
|
|
@ -1,30 +0,0 @@
|
||||||
Copyright Savanni D'Gerinel (c) 2017 - 2019
|
|
||||||
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
* Redistributions in binary form must reproduce the above
|
|
||||||
copyright notice, this list of conditions and the following
|
|
||||||
disclaimer in the documentation and/or other materials provided
|
|
||||||
with the distribution.
|
|
||||||
|
|
||||||
* Neither the name of Savanni D'Gerinel nor the names of other
|
|
||||||
contributors may be used to endorse or promote products derived
|
|
||||||
from this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
@ -1,73 +0,0 @@
|
||||||
# Orizentic
|
|
||||||
|
|
||||||
[![CircleCI](https://circleci.com/gh/luminescent-dreams/orizentic/tree/sol.svg?style=svg)](https://circleci.com/gh/luminescent-dreams/orizentic/tree/sol)
|
|
||||||
|
|
||||||
[Documentation](https://docs.rs/orizentic")
|
|
||||||
|
|
||||||
Orizentic provides a library that streamlines token-based authentication, and a CLI tool for maintaining a database of tokens.
|
|
||||||
|
|
||||||
## Credit
|
|
||||||
|
|
||||||
The name is a contraction of Auth(oriz)ation/Auth(entic)ation, and credit goes to [Daria Phoebe Brashear](https://github.com/dariaphoebe).
|
|
||||||
|
|
||||||
The original idea has been debated online for many years, but the push to make this useful comes from [Aria Stewart](https://github.com/aredridel).
|
|
||||||
|
|
||||||
## Tokens
|
|
||||||
|
|
||||||
Tokens are simple [JWTs](https://jwt.io/). This library simplifies the process by easily generating and checking JWTs that have only an issuer, an optional time-to-live, a resource name, a username, and a list of permissions. A typical resulting JWT would look like this:
|
|
||||||
|
|
||||||
{ iss = Savanni
|
|
||||||
, sub = health
|
|
||||||
, aud = "Savanni Desktop"
|
|
||||||
, exp = null
|
|
||||||
, nbf = null
|
|
||||||
, iat = 1499650083
|
|
||||||
, jti = 9d57a8d8-d11e-43b2-a4d6-7b82ad043994
|
|
||||||
, unregisteredClaims = { perms: [ "read", "write" ] }
|
|
||||||
}
|
|
||||||
|
|
||||||
The `issuer` and `audience` (or username) are almost entirely for human readability. In this instance, I issued a token that was intended to be used on my desktop system.
|
|
||||||
|
|
||||||
The `subject` in this case is synonymous with Resource and is a name for the resource for which access is being granted. Permissions are a simple list of freeform strings. Both of these are flexible within your application and your authorization checks will use them to verify that the token can be used for the specified purpose.
|
|
||||||
|
|
||||||
## CLI Usage
|
|
||||||
|
|
||||||
## Library Usage
|
|
||||||
|
|
||||||
[orizentic - Rust](https://docs.rs/orizentic/1.0.0/orizentic/)
|
|
||||||
|
|
||||||
There are multiple errata for the documentation:
|
|
||||||
|
|
||||||
* There are, in fact, now [two functions](https://docs.rs/orizentic/1.0.0/orizentic/filedb/index.html) for saving and loading a database.
|
|
||||||
* An example for how to use the library is currently here [for loading the database](https://github.com/luminescent-dreams/fitnesstrax/blob/8c9f3f418ff75675874f7a8e3928ad3f7d134eb4/server/src/web.rs#L64) and here [as part of the AuthMiddleware for an Iron server](https://github.com/luminescent-dreams/fitnesstrax/blob/8c9f3f418ff75675874f7a8e3928ad3f7d134eb4/server/src/server.rs#L156). I apologize for not writing this in more detail yet.
|
|
||||||
|
|
||||||
## Language support
|
|
||||||
|
|
||||||
This library and application is only supported for Rust. Haskell and Go support has been discontinued, but can be revived if I discover folks have an interest. The token database is compatible across tools. See readmes in the language directory for usage information.
|
|
||||||
|
|
||||||
Future Haskell, Go, and other language versions of the library will be done through language bindings against the Rust utilities instead of through my previous clean-room re-implementations.
|
|
||||||
|
|
||||||
## Nix installation
|
|
||||||
|
|
||||||
If you have Nix installed on your system, or you run NixOS, create this derivation:
|
|
||||||
|
|
||||||
orizentic.nix:
|
|
||||||
|
|
||||||
```
|
|
||||||
{ fetchFromGitHub }:
|
|
||||||
let src = fetchFromGitHub {
|
|
||||||
owner = "luminescent-dreams";
|
|
||||||
repo = "orizentic";
|
|
||||||
rev = "896140f594fe3c106662ffe2550f289bb68bc0cb";
|
|
||||||
sha256 = "05g7b0jiyy0pv74zf89yikf65vi3jrn1da0maj0k9fxnxb2vv7a4";
|
|
||||||
};
|
|
||||||
in import "${src}/default.nix" {}
|
|
||||||
```
|
|
||||||
|
|
||||||
At this time, you must have nixpkgs-19.03 defined (and preferably pointing to the 19.03 channel). I will parameterize this and update the instructions in the future.
|
|
||||||
|
|
||||||
I import this into my shell.nix `with import ./orizentic.nix { inherit (pkgs) fetchFromGitHub; };`.
|
|
||||||
|
|
||||||
For a complete example, see my [shell.nix](https://github.com/savannidgerinel/nix-shell/blob/sol/shell.nix) file.
|
|
||||||
|
|
||||||
I have not bundled this application for any other distribution, but you should nave no trouble just building with just cargo build --release with Rust-1.33 and Cargo.
|
|
|
@ -1,20 +0,0 @@
|
||||||
let
|
|
||||||
rust_overlay = import (builtins.fetchTarball "https://github.com/oxalica/rust-overlay/archive/master.tar.gz");
|
|
||||||
pkgs = import <nixpkgs> { overlays = [ rust_overlay ]; };
|
|
||||||
unstable = import <unstable> {};
|
|
||||||
rust = pkgs.rust-bin.stable."1.59.0".default.override {
|
|
||||||
extensions = [ "rust-src" ];
|
|
||||||
};
|
|
||||||
|
|
||||||
in pkgs.mkShell {
|
|
||||||
name = "datasphere";
|
|
||||||
|
|
||||||
nativeBuildInputs = [
|
|
||||||
rust
|
|
||||||
unstable.rust-analyzer
|
|
||||||
];
|
|
||||||
|
|
||||||
shellHook = ''
|
|
||||||
if [ -e ~/.nixpkgs/shellhook.sh ]; then . ~/.nixpkgs/shellhook.sh; fi
|
|
||||||
'';
|
|
||||||
}
|
|
|
@ -1,251 +0,0 @@
|
||||||
extern crate chrono;
|
|
||||||
extern crate clap;
|
|
||||||
extern crate orizentic;
|
|
||||||
|
|
||||||
use chrono::Duration;
|
|
||||||
use clap::{App, Arg, ArgMatches, SubCommand};
|
|
||||||
use std::env;
|
|
||||||
|
|
||||||
use orizentic::*;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum OrizenticErr {
|
|
||||||
ParseError(std::num::ParseIntError),
|
|
||||||
}
|
|
||||||
|
|
||||||
// ORIZENTIC_DB
|
|
||||||
// ORIZENTIC_SECRET
|
|
||||||
//
|
|
||||||
// list
|
|
||||||
// create
|
|
||||||
// revoke
|
|
||||||
// encode
|
|
||||||
pub fn main() {
|
|
||||||
let db_path = env::var_os("ORIZENTIC_DB").map(|str| {
|
|
||||||
str.into_string()
|
|
||||||
.expect("ORIZENTIC_DB contains invalid Unicode sequences")
|
|
||||||
});
|
|
||||||
let secret = env::var_os("ORIZENTIC_SECRET").map(|str| {
|
|
||||||
Secret(
|
|
||||||
str.into_string()
|
|
||||||
.map(|s| s.into_bytes())
|
|
||||||
.expect("ORIZENTIC_SECRET contains invalid Unicode sequences"),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let matches = App::new("orizentic cli")
|
|
||||||
.subcommand(SubCommand::with_name("list"))
|
|
||||||
.subcommand(
|
|
||||||
SubCommand::with_name("create")
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("issuer")
|
|
||||||
.long("issuer")
|
|
||||||
.takes_value(true)
|
|
||||||
.required(true),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("ttl")
|
|
||||||
.long("ttl")
|
|
||||||
.takes_value(true)
|
|
||||||
.required(true),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("resource")
|
|
||||||
.long("resource")
|
|
||||||
.takes_value(true)
|
|
||||||
.required(true),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("username")
|
|
||||||
.long("username")
|
|
||||||
.takes_value(true)
|
|
||||||
.required(true),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("perms")
|
|
||||||
.long("perms")
|
|
||||||
.takes_value(true)
|
|
||||||
.required(true),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.subcommand(
|
|
||||||
SubCommand::with_name("revoke").arg(
|
|
||||||
Arg::with_name("id")
|
|
||||||
.long("id")
|
|
||||||
.takes_value(true)
|
|
||||||
.required(true),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.subcommand(
|
|
||||||
SubCommand::with_name("encode").arg(
|
|
||||||
Arg::with_name("id")
|
|
||||||
.long("id")
|
|
||||||
.takes_value(true)
|
|
||||||
.required(true),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.get_matches();
|
|
||||||
|
|
||||||
match matches.subcommand() {
|
|
||||||
("list", _) => list_tokens(db_path),
|
|
||||||
("create", Some(args)) => create_token(db_path, secret, args),
|
|
||||||
("revoke", Some(args)) => revoke_token(db_path, args),
|
|
||||||
("encode", Some(args)) => encode_token(db_path, secret, args),
|
|
||||||
(cmd, _) => {
|
|
||||||
println!("unknown subcommand: {}", cmd);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn list_tokens(db_path: Option<String>) {
|
|
||||||
let db_path_ = db_path.expect("ORIZENTIC_DB is required for this operation");
|
|
||||||
let claimsets = orizentic::filedb::load_claims_from_file(&db_path_);
|
|
||||||
match claimsets {
|
|
||||||
Ok(claimsets_) => {
|
|
||||||
for claimset in claimsets_ {
|
|
||||||
println!("[{}]", claimset.id);
|
|
||||||
println!("Audience: {}", String::from(claimset.audience));
|
|
||||||
match claimset.expiration {
|
|
||||||
Some(expiration) => println!(
|
|
||||||
"Expiration: {}",
|
|
||||||
expiration.format("%Y-%m-%d %H:%M:%S")
|
|
||||||
),
|
|
||||||
None => println!("Expiration: None"),
|
|
||||||
}
|
|
||||||
println!("Issuer: {}", claimset.issuer.0);
|
|
||||||
println!(
|
|
||||||
"Issued At: {}",
|
|
||||||
claimset.issued_at.format("%Y-%m-%d %H:%M:%S")
|
|
||||||
);
|
|
||||||
println!("Resource Name: {}", claimset.resource.0);
|
|
||||||
|
|
||||||
let perm_val: String = itertools::Itertools::intersperse(
|
|
||||||
claimset.permissions.0.clone().into_iter(),
|
|
||||||
String::from(", "),
|
|
||||||
)
|
|
||||||
.collect();
|
|
||||||
println!("Permissions: {}", perm_val);
|
|
||||||
println!("")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
println!("claimset failed to load: {}", err);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_token(db_path: Option<String>, secret: Option<Secret>, args: &ArgMatches) {
|
|
||||||
let db_path_ = db_path.expect("ORIZENTIC_DB is required for this operation");
|
|
||||||
let secret_ = secret.expect("ORIZENTIC_SECRET is required for this operation");
|
|
||||||
let issuer = args
|
|
||||||
.value_of("issuer")
|
|
||||||
.map(|x| Issuer(String::from(x)))
|
|
||||||
.expect("--issuer is a required parameter");
|
|
||||||
let ttl: Option<TTL> = args.value_of("ttl").map(|x| {
|
|
||||||
x.parse()
|
|
||||||
.and_then(|d| Ok(TTL(Duration::seconds(d))))
|
|
||||||
.map_err(|err| OrizenticErr::ParseError(err))
|
|
||||||
.expect("Failed to parse TTL")
|
|
||||||
});
|
|
||||||
let resource_name = args
|
|
||||||
.value_of("resource")
|
|
||||||
.map(|x| ResourceName(String::from(x)))
|
|
||||||
.expect("--resource is a required parameter");
|
|
||||||
let username = args
|
|
||||||
.value_of("username")
|
|
||||||
.map(|x| Username::from(x))
|
|
||||||
.expect("--username is a required parameter");
|
|
||||||
let perms: Permissions = args
|
|
||||||
.value_of("perms")
|
|
||||||
.map(|str| Permissions(str.split(',').map(|s| String::from(s)).collect()))
|
|
||||||
.expect("--permissions is a required parameter");
|
|
||||||
|
|
||||||
let claimsets = orizentic::filedb::load_claims_from_file(&db_path_);
|
|
||||||
match claimsets {
|
|
||||||
Err(err) => {
|
|
||||||
println!("claimset failed to load: {}", err);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
Ok(claimsets_) => {
|
|
||||||
let new_claimset = ClaimSet::new(issuer, ttl, resource_name, username, perms);
|
|
||||||
let mut ctx = orizentic::OrizenticCtx::new(secret_, claimsets_);
|
|
||||||
ctx.add_claimset(new_claimset.clone());
|
|
||||||
match orizentic::filedb::save_claims_to_file(&ctx.list_claimsets(), &db_path_) {
|
|
||||||
Err(err) => {
|
|
||||||
println!("Failed to write claimset to file: {:?}", err);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
Ok(_) => match ctx.encode_claimset(&new_claimset) {
|
|
||||||
Ok(token) => println!("{}", token.text),
|
|
||||||
Err(err) => {
|
|
||||||
println!("token could not be encoded: {:?}", err);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn revoke_token(db_path: Option<String>, args: &ArgMatches) {
|
|
||||||
let db_path_ = db_path.expect("ORIZENTIC_DB is required for this operation");
|
|
||||||
let claimsets = orizentic::filedb::load_claims_from_file(&db_path_);
|
|
||||||
|
|
||||||
match claimsets {
|
|
||||||
Err(err) => {
|
|
||||||
println!("claimset failed to load: {}", err);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
Ok(claimsets_) => {
|
|
||||||
let id = args
|
|
||||||
.value_of("id")
|
|
||||||
.map(String::from)
|
|
||||||
.expect("--id is a required parameter");
|
|
||||||
let mut ctx =
|
|
||||||
orizentic::OrizenticCtx::new(Secret(String::from("").into_bytes()), claimsets_);
|
|
||||||
ctx.revoke_by_uuid(&id);
|
|
||||||
match orizentic::filedb::save_claims_to_file(&ctx.list_claimsets(), &db_path_) {
|
|
||||||
Err(err) => {
|
|
||||||
println!("Failed to write claimset to file: {:?}", err);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
Ok(_) => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn encode_token(db_path: Option<String>, secret: Option<Secret>, args: &ArgMatches) {
|
|
||||||
let db_path_ = db_path.expect("ORIZENTIC_DB is required for this operation");
|
|
||||||
let secret_ = secret.expect("ORIZENTIC_SECRET is required for this operation");
|
|
||||||
let id = args
|
|
||||||
.value_of("id")
|
|
||||||
.map(String::from)
|
|
||||||
.expect("--id is a required parameter");
|
|
||||||
|
|
||||||
let claimsets = orizentic::filedb::load_claims_from_file(&db_path_);
|
|
||||||
match claimsets {
|
|
||||||
Err(err) => {
|
|
||||||
println!("claimset failed to load: {}", err);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
Ok(claimsets_) => {
|
|
||||||
let ctx = orizentic::OrizenticCtx::new(secret_, claimsets_);
|
|
||||||
let claimset = ctx.find_claimset(&id);
|
|
||||||
match claimset {
|
|
||||||
Some(claimset_) => match ctx.encode_claimset(&claimset_) {
|
|
||||||
Ok(token) => println!("{}", token.text),
|
|
||||||
Err(err) => {
|
|
||||||
println!("token could not be encoded: {:?}", err);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
None => {
|
|
||||||
println!("No claimset found");
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,303 +0,0 @@
|
||||||
extern crate chrono;
|
|
||||||
extern crate jsonwebtoken as jwt;
|
|
||||||
extern crate serde;
|
|
||||||
extern crate serde_json;
|
|
||||||
extern crate uuid;
|
|
||||||
extern crate yaml_rust;
|
|
||||||
|
|
||||||
use core::chrono::prelude::*;
|
|
||||||
use core::uuid::Uuid;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
/// Orizentic Errors
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum Error {
|
|
||||||
/// An underlying JWT decoding error. May be replaced with Orizentic semantic errors to better
|
|
||||||
/// encapsulate the JWT library.
|
|
||||||
#[error("JWT failed to decode: {0}")]
|
|
||||||
JWTError(jwt::errors::Error),
|
|
||||||
/// Token decoded and verified but was not present in the database.
|
|
||||||
#[error("Token not recognized")]
|
|
||||||
UnknownToken,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// ResourceName is application-defined and names a resource to which access should be controlled
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct ResourceName(pub String);
|
|
||||||
|
|
||||||
/// Permissions are application-defined descriptions of what can be done with the named resource
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct Permissions(pub Vec<String>);
|
|
||||||
|
|
||||||
/// Issuers are typically informative, but should generally describe who or what created the token
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct Issuer(pub String);
|
|
||||||
|
|
||||||
/// Time to live is the number of seconds until a token expires. This is used for creating tokens
|
|
||||||
/// but tokens store their actual expiration time.
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct TTL(pub chrono::Duration);
|
|
||||||
|
|
||||||
/// Username, or Audience in JWT terms, should describe who or what is supposed to be using this
|
|
||||||
/// token
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct Username(String);
|
|
||||||
|
|
||||||
impl From<Username> for String {
|
|
||||||
fn from(u: Username) -> String {
|
|
||||||
u.0.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for Username {
|
|
||||||
fn from(s: &str) -> Username {
|
|
||||||
Username(s.to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct Secret(pub Vec<u8>);
|
|
||||||
|
|
||||||
/// A ClaimSet represents one set of permissions and claims. It is a standardized way of specifying
|
|
||||||
/// the owner, issuer, expiration time, relevant resources, and specific permissions on that
|
|
||||||
/// resource. By itself, this is only an informative data structure and so should never be trusted
|
|
||||||
/// when passed over the wire. See `VerifiedToken` and `UnverifiedToken`.
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
|
||||||
pub struct ClaimSet {
|
|
||||||
pub id: String,
|
|
||||||
pub audience: Username,
|
|
||||||
pub expiration: Option<DateTime<Utc>>,
|
|
||||||
pub issuer: Issuer,
|
|
||||||
pub issued_at: DateTime<Utc>,
|
|
||||||
pub resource: ResourceName,
|
|
||||||
pub permissions: Permissions,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClaimSet {
|
|
||||||
/// Create a new `ClaimSet`. This will return a claimset with the expiration time calculated
|
|
||||||
/// from the TTL if the TTL is provided. No expiration will be set if no TTL is provided.
|
|
||||||
pub fn new(
|
|
||||||
issuer: Issuer,
|
|
||||||
ttl: Option<TTL>,
|
|
||||||
resource_name: ResourceName,
|
|
||||||
user_name: Username,
|
|
||||||
perms: Permissions,
|
|
||||||
) -> ClaimSet {
|
|
||||||
let issued_at: DateTime<Utc> = Utc::now().with_nanosecond(0).unwrap();
|
|
||||||
let expiration = match ttl {
|
|
||||||
Some(TTL(ttl_)) => issued_at.checked_add_signed(ttl_),
|
|
||||||
None => None,
|
|
||||||
};
|
|
||||||
ClaimSet {
|
|
||||||
id: String::from(Uuid::new_v4().to_hyphenated().to_string()),
|
|
||||||
audience: user_name,
|
|
||||||
expiration,
|
|
||||||
issuer,
|
|
||||||
issued_at,
|
|
||||||
resource: resource_name,
|
|
||||||
permissions: perms,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_json(&self) -> Result<String, serde_json::Error> {
|
|
||||||
serde_json::to_string(&(ClaimSetJS::from_claimset(self)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_json(text: &String) -> Result<ClaimSet, serde_json::Error> {
|
|
||||||
serde_json::from_str(&text).map(|x| ClaimSetJS::to_claimset(&x))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// ClaimSetJS is an intermediary data structure between JWT serialization and a more usable
|
|
||||||
/// ClaimSet.
|
|
||||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct ClaimSetJS {
|
|
||||||
jti: String,
|
|
||||||
aud: String,
|
|
||||||
exp: Option<i64>,
|
|
||||||
iss: String,
|
|
||||||
iat: i64,
|
|
||||||
sub: String,
|
|
||||||
perms: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClaimSetJS {
|
|
||||||
pub fn from_claimset(claims: &ClaimSet) -> ClaimSetJS {
|
|
||||||
ClaimSetJS {
|
|
||||||
jti: claims.id.clone(),
|
|
||||||
aud: claims.audience.0.clone(),
|
|
||||||
exp: claims.expiration.map(|t| t.timestamp()),
|
|
||||||
iss: claims.issuer.0.clone(),
|
|
||||||
iat: claims.issued_at.timestamp(),
|
|
||||||
sub: claims.resource.0.clone(),
|
|
||||||
perms: claims.permissions.0.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_claimset(&self) -> ClaimSet {
|
|
||||||
ClaimSet {
|
|
||||||
id: self.jti.clone(),
|
|
||||||
audience: Username(self.aud.clone()),
|
|
||||||
expiration: self.exp.map(|t| Utc.timestamp(t, 0)),
|
|
||||||
issuer: Issuer(self.iss.clone()),
|
|
||||||
issued_at: Utc.timestamp(self.iat, 0),
|
|
||||||
resource: ResourceName(self.sub.clone()),
|
|
||||||
permissions: Permissions(self.perms.clone()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The Orizentic Context encapsulates a set of claims and an associated secret. This provides the
|
|
||||||
/// overall convenience of easily creating and validating tokens. Generated claimsets are stored
|
|
||||||
/// here on the theory that, even with validation, only those claims actually stored in the
|
|
||||||
/// database should be considered valid.
|
|
||||||
pub struct OrizenticCtx(Secret, HashMap<String, ClaimSet>);
|
|
||||||
|
|
||||||
/// An UnverifiedToken is a combination of the JWT serialization and the decoded `ClaimSet`. As this
|
|
||||||
/// is unverified, this should only be used for information purposes, such as determining what a
|
|
||||||
/// user can do with a token even when the decoding key is absent.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct UnverifiedToken {
|
|
||||||
pub text: String,
|
|
||||||
pub claims: ClaimSet,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UnverifiedToken {
|
|
||||||
/// Decode a JWT text string without verification
|
|
||||||
pub fn decode_text(text: String) -> Result<UnverifiedToken, Error> {
|
|
||||||
let res = jwt::dangerous_unsafe_decode::<ClaimSetJS>(&text);
|
|
||||||
match res {
|
|
||||||
Ok(res_) => Ok(UnverifiedToken {
|
|
||||||
text,
|
|
||||||
claims: res_.claims.to_claimset(),
|
|
||||||
}),
|
|
||||||
Err(err) => Err(Error::JWTError(err)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An VerifiedToken is a combination of the JWT serialization and the decoded `ClaimSet`. This will
|
|
||||||
/// only be created by the `validate_function`, and thus will represent a token which has been
|
|
||||||
/// validated via signature, expiration time, and presence in the database.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct VerifiedToken {
|
|
||||||
pub text: String,
|
|
||||||
pub claims: ClaimSet,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl VerifiedToken {
|
|
||||||
/// Given a `VerifiedToken`, pass the resource name and permissions to a user-defined function. The
|
|
||||||
/// function should return true if the caller should be granted access to the resource and false,
|
|
||||||
/// otherwise. That result will be passed back to the caller.
|
|
||||||
pub fn check_authorizations<F: FnOnce(&ResourceName, &Permissions) -> bool>(
|
|
||||||
&self,
|
|
||||||
f: F,
|
|
||||||
) -> bool {
|
|
||||||
f(&self.claims.resource, &self.claims.permissions)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OrizenticCtx {
|
|
||||||
/// Create a new Orizentic Context with an initial set of claims.
|
|
||||||
pub fn new(secret: Secret, claims_lst: Vec<ClaimSet>) -> OrizenticCtx {
|
|
||||||
let mut hm = HashMap::new();
|
|
||||||
for claimset in claims_lst {
|
|
||||||
hm.insert(claimset.id.clone(), claimset);
|
|
||||||
}
|
|
||||||
OrizenticCtx(secret, hm)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Validate a token by checking its signature, that it is not expired, and that it is still
|
|
||||||
/// present in the database. Return an error if any check fails, but return a `VerifiedToken`
|
|
||||||
/// if it all succeeds.
|
|
||||||
pub fn validate_token(&self, token: &UnverifiedToken) -> Result<VerifiedToken, Error> {
|
|
||||||
let validator = match token.claims.expiration {
|
|
||||||
Some(_) => jwt::Validation::default(),
|
|
||||||
None => jwt::Validation {
|
|
||||||
validate_exp: false,
|
|
||||||
..jwt::Validation::default()
|
|
||||||
},
|
|
||||||
};
|
|
||||||
let res = jwt::decode::<ClaimSetJS>(&token.text, &(self.0).0, &validator);
|
|
||||||
match res {
|
|
||||||
Ok(res_) => {
|
|
||||||
let claims = res_.claims;
|
|
||||||
let in_db = self.1.get(&claims.jti);
|
|
||||||
if in_db.is_some() {
|
|
||||||
Ok(VerifiedToken {
|
|
||||||
text: token.text.clone(),
|
|
||||||
claims: claims.to_claimset(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Err(Error::UnknownToken)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => Err(Error::JWTError(err)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Given a text string, as from a web application's `Authorization` header, decode the string
|
|
||||||
/// and then validate the token.
|
|
||||||
pub fn decode_and_validate_text(&self, text: String) -> Result<VerifiedToken, Error> {
|
|
||||||
// it is necessary to first decode the token because we need the validator to know whether
|
|
||||||
// to attempt to validate the expiration. Without that check, the validator will fail any
|
|
||||||
// expiration set to None.
|
|
||||||
match UnverifiedToken::decode_text(text) {
|
|
||||||
Ok(unverified) => self.validate_token(&unverified),
|
|
||||||
Err(err) => Err(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a claimset to the database.
|
|
||||||
pub fn add_claimset(&mut self, claimset: ClaimSet) {
|
|
||||||
self.1.insert(claimset.id.clone(), claimset);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove a claims set from the database so that all additional validation checks fail.
|
|
||||||
pub fn revoke_claimset(&mut self, claim: &ClaimSet) {
|
|
||||||
self.1.remove(&claim.id);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Revoke a ClaimsSet given its ID, which is set in the `jti` claim of a JWT or the `id` field
|
|
||||||
/// of a `ClaimSet`.
|
|
||||||
pub fn revoke_by_uuid(&mut self, claim_id: &String) {
|
|
||||||
self.1.remove(claim_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// *NOT IMPLEMENTED*
|
|
||||||
pub fn replace_claimsets(&mut self, _claims_lst: Vec<ClaimSet>) {
|
|
||||||
unimplemented!()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// List all of the `ClaimSet` IDs in the database.
|
|
||||||
pub fn list_claimsets(&self) -> Vec<&ClaimSet> {
|
|
||||||
self.1.values().map(|item| item).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Find a `ClaimSet` by ID.
|
|
||||||
pub fn find_claimset(&self, claims_id: &String) -> Option<&ClaimSet> {
|
|
||||||
self.1.get(claims_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Encode and sign a claimset, returning the result as a `VerifiedToken`.
|
|
||||||
pub fn encode_claimset(&self, claims: &ClaimSet) -> Result<VerifiedToken, Error> {
|
|
||||||
let in_db = self.1.get(&claims.id);
|
|
||||||
if in_db.is_some() {
|
|
||||||
let text = jwt::encode(
|
|
||||||
&jwt::Header::default(),
|
|
||||||
&ClaimSetJS::from_claimset(&claims),
|
|
||||||
&(self.0).0,
|
|
||||||
);
|
|
||||||
match text {
|
|
||||||
Ok(text_) => Ok(VerifiedToken {
|
|
||||||
text: text_,
|
|
||||||
claims: claims.clone(),
|
|
||||||
}),
|
|
||||||
Err(err) => Err(Error::JWTError(err)),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Err(Error::UnknownToken)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,37 +0,0 @@
|
||||||
extern crate serde_json;
|
|
||||||
|
|
||||||
use core;
|
|
||||||
|
|
||||||
use std::fs::File;
|
|
||||||
use std::path::Path;
|
|
||||||
use std::io::{Read, Error, Write};
|
|
||||||
|
|
||||||
pub fn save_claims_to_file(claimsets: &Vec<&core::ClaimSet>, path: &String) -> Result<(), Error> {
|
|
||||||
let path = Path::new(path);
|
|
||||||
let mut file = File::create(&path)?;
|
|
||||||
|
|
||||||
let claimsets_js: Vec<core::ClaimSetJS> = claimsets
|
|
||||||
.into_iter()
|
|
||||||
.map(|claims| core::ClaimSetJS::from_claimset(claims))
|
|
||||||
.collect();
|
|
||||||
let claimset_str = serde_json::to_string(&claimsets_js)?;
|
|
||||||
file.write_fmt(format_args!("{}", claimset_str))?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn load_claims_from_file(path: &String) -> Result<Vec<core::ClaimSet>, Error> {
|
|
||||||
let path = Path::new(path);
|
|
||||||
let mut file = File::open(&path)?;
|
|
||||||
let mut text = String::new();
|
|
||||||
|
|
||||||
file.read_to_string(&mut text)?;
|
|
||||||
|
|
||||||
let claimsets_js: Vec<core::ClaimSetJS> = serde_json::from_str(&text)?;
|
|
||||||
let claimsets = claimsets_js
|
|
||||||
.into_iter()
|
|
||||||
.map(|cl_js| core::ClaimSetJS::to_claimset(&cl_js))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(claimsets)
|
|
||||||
}
|
|
|
@ -1,30 +0,0 @@
|
||||||
//! The Orizentic token management library
|
|
||||||
//!
|
|
||||||
//! This library provides a high level interface for authentication token management. It wraps
|
|
||||||
//! around the [JWT](https://jwt.io/) standard using the
|
|
||||||
//! [`jsonwebtoken`](https://github.com/Keats/jsonwebtoken) library for serialization and
|
|
||||||
//! validation.
|
|
||||||
//!
|
|
||||||
//! Functionality revolves around the relationship between a [ClaimSet](struct.ClaimSet.html), a
|
|
||||||
//! [VerifiedToken](struct.VerifiedToken.html), and an
|
|
||||||
//! [UnverifiedToken](struct.UnverifiedToken.html). A [ClaimSet](struct.ClaimSet.html) is
|
|
||||||
//! considered informative and stores all of the information about the permissions and resources
|
|
||||||
//! that the token bearer should have access to. [VerifiedToken](struct.VerifiedToken.html) and
|
|
||||||
//! [UnverifiedToken](struct.UnverifiedToken.html) are the result of the process of decoding a
|
|
||||||
//! string JWT, and inherently specify whether the decoding process verified the signature,
|
|
||||||
//! expiration time, and presence in the database.
|
|
||||||
//!
|
|
||||||
//! This library does not currently contain database save and load features, but those are a likely
|
|
||||||
//! upcoming feature.
|
|
||||||
//!
|
|
||||||
//! No setup is necessary when using this library to decode JWT strings. Refer to the standalone
|
|
||||||
//! [decode_text](fn.decode_text.html) function.
|
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
extern crate serde_derive;
|
|
||||||
extern crate thiserror;
|
|
||||||
|
|
||||||
pub use core::*;
|
|
||||||
|
|
||||||
mod core;
|
|
||||||
pub mod filedb;
|
|
|
@ -1,429 +0,0 @@
|
||||||
extern crate chrono;
|
|
||||||
extern crate orizentic;
|
|
||||||
|
|
||||||
use orizentic::filedb::*;
|
|
||||||
use orizentic::*;
|
|
||||||
use std::fs;
|
|
||||||
use std::ops;
|
|
||||||
use std::thread;
|
|
||||||
use std::time;
|
|
||||||
|
|
||||||
struct FileCleanup(String);
|
|
||||||
|
|
||||||
impl FileCleanup {
|
|
||||||
fn new(path: &str) -> FileCleanup {
|
|
||||||
FileCleanup(String::from(path))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ops::Drop for FileCleanup {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
fs::remove_file(&self.0).expect("failed to remove time series file");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn can_create_a_new_claimset() {
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("abcdefg".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-1")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
assert_eq!(claims.audience, Username::from("Savanni"));
|
|
||||||
match claims.expiration {
|
|
||||||
Some(ttl) => assert_eq!(ttl - claims.issued_at, chrono::Duration::seconds(3600)),
|
|
||||||
None => panic!("ttl should not be None"),
|
|
||||||
}
|
|
||||||
assert_eq!(claims.issuer, Issuer(String::from("test")));
|
|
||||||
assert_eq!(claims.resource, ResourceName(String::from("resource-1")));
|
|
||||||
assert_eq!(
|
|
||||||
claims.permissions,
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
])
|
|
||||||
);
|
|
||||||
{
|
|
||||||
let tok_list = ctx.list_claimsets();
|
|
||||||
assert_eq!(tok_list.len(), 1);
|
|
||||||
assert!(tok_list.contains(&&claims));
|
|
||||||
}
|
|
||||||
|
|
||||||
let claims2 = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-2")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims2.clone());
|
|
||||||
|
|
||||||
assert_ne!(claims2.id, claims.id);
|
|
||||||
assert_eq!(claims2.resource, ResourceName(String::from("resource-2")));
|
|
||||||
|
|
||||||
let tok_list = ctx.list_claimsets();
|
|
||||||
assert_eq!(tok_list.len(), 2);
|
|
||||||
assert!(tok_list.contains(&&claims));
|
|
||||||
assert!(tok_list.contains(&&claims2));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn can_retrieve_claim_by_id() {
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("abcdefg".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-1")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
let claims2 = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-2")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
ctx.add_claimset(claims2.clone());
|
|
||||||
|
|
||||||
assert_eq!(ctx.find_claimset(&claims.id), Some(&claims));
|
|
||||||
assert_eq!(ctx.find_claimset(&claims2.id), Some(&claims2));
|
|
||||||
|
|
||||||
ctx.revoke_claimset(&claims);
|
|
||||||
assert_eq!(ctx.find_claimset(&claims.id), None);
|
|
||||||
assert_eq!(ctx.find_claimset(&claims2.id), Some(&claims2));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn can_revoke_claim_by_id() {
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("abcdefg".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-1")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
let claims2 = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-2")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
ctx.add_claimset(claims2.clone());
|
|
||||||
|
|
||||||
assert_eq!(ctx.find_claimset(&claims.id), Some(&claims));
|
|
||||||
assert_eq!(ctx.find_claimset(&claims2.id), Some(&claims2));
|
|
||||||
|
|
||||||
ctx.revoke_by_uuid(&claims.id);
|
|
||||||
assert_eq!(ctx.find_claimset(&claims.id), None);
|
|
||||||
assert_eq!(ctx.find_claimset(&claims2.id), Some(&claims2));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn can_revoke_a_token() {
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("abcdefg".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-1")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
let claims2 = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-2")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
ctx.add_claimset(claims2.clone());
|
|
||||||
|
|
||||||
ctx.revoke_claimset(&claims);
|
|
||||||
let tok_list = ctx.list_claimsets();
|
|
||||||
assert_eq!(tok_list.len(), 1);
|
|
||||||
assert!(!tok_list.contains(&&claims));
|
|
||||||
assert!(tok_list.contains(&&claims2));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn rejects_tokens_with_an_invalid_secret() {
|
|
||||||
let mut ctx1 = OrizenticCtx::new(Secret("ctx1".to_string().into_bytes()), Vec::new());
|
|
||||||
let ctx2 = OrizenticCtx::new(Secret("ctx2".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-1")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx1.add_claimset(claims.clone());
|
|
||||||
let encoded_token = ctx1.encode_claimset(&claims).ok().unwrap();
|
|
||||||
assert!(ctx2.decode_and_validate_text(encoded_token.text).is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn rejects_tokens_that_are_absent_from_the_database() {
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-1")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
|
||||||
|
|
||||||
ctx.revoke_claimset(&claims);
|
|
||||||
assert!(ctx.decode_and_validate_text(encoded_token.text).is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn validates_present_tokens_with_a_valid_secret() {
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-1")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
|
||||||
assert!(ctx.decode_and_validate_text(encoded_token.text).is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn rejects_expired_tokens() {
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(1))),
|
|
||||||
ResourceName(String::from("resource-1")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
thread::sleep(time::Duration::from_secs(2));
|
|
||||||
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
|
||||||
assert!(ctx.decode_and_validate_text(encoded_token.text).is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn accepts_tokens_that_have_no_expiration() {
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
None,
|
|
||||||
ResourceName(String::from("resource-1")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
|
||||||
assert!(ctx.decode_and_validate_text(encoded_token.text).is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn authorizes_a_token_with_the_correct_resource_and_permissions() {
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
None,
|
|
||||||
ResourceName(String::from("resource-1")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
|
||||||
let token = ctx
|
|
||||||
.decode_and_validate_text(encoded_token.text)
|
|
||||||
.ok()
|
|
||||||
.unwrap();
|
|
||||||
let res = token.check_authorizations(|rn: &ResourceName, perms: &Permissions| {
|
|
||||||
*rn == ResourceName(String::from("resource-1")) && perms.0.contains(&String::from("grant"))
|
|
||||||
});
|
|
||||||
assert!(res);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn rejects_a_token_with_the_incorrect_permissions() {
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
None,
|
|
||||||
ResourceName(String::from("resource-1")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![String::from("read"), String::from("write")]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
|
||||||
let token = ctx
|
|
||||||
.decode_and_validate_text(encoded_token.text)
|
|
||||||
.ok()
|
|
||||||
.unwrap();
|
|
||||||
let res = token.check_authorizations(|rn: &ResourceName, perms: &Permissions| {
|
|
||||||
*rn == ResourceName(String::from("resource-1")) && perms.0.contains(&String::from("grant"))
|
|
||||||
});
|
|
||||||
assert!(!res);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn rejects_a_token_with_the_incorrect_resource_name() {
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
None,
|
|
||||||
ResourceName(String::from("resource-2")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
let encoded_token = ctx.encode_claimset(&claims).ok().unwrap();
|
|
||||||
let token = ctx
|
|
||||||
.decode_and_validate_text(encoded_token.text)
|
|
||||||
.ok()
|
|
||||||
.unwrap();
|
|
||||||
let res = token.check_authorizations(|rn: &ResourceName, perms: &Permissions| {
|
|
||||||
*rn == ResourceName(String::from("resource-1")) && perms.0.contains(&String::from("grant"))
|
|
||||||
});
|
|
||||||
assert!(!res);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn claims_serialize_to_json() {
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
None,
|
|
||||||
ResourceName(String::from("resource-2")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
|
|
||||||
let expected_jti = format!("\"jti\":\"{}\"", claims.id);
|
|
||||||
|
|
||||||
let claim_str = claims.to_json().expect("to_json threw an error");
|
|
||||||
//.expect(assert!(false, format!("[claims_serilazie_to_json] {}", err)));
|
|
||||||
assert!(claim_str.contains(&expected_jti));
|
|
||||||
|
|
||||||
let claims_ = ClaimSet::from_json(&claim_str).expect("from_json threw an error");
|
|
||||||
assert_eq!(claims, claims_);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn save_and_load() {
|
|
||||||
let _file_cleanup = FileCleanup::new("var/claims.db");
|
|
||||||
let mut ctx = OrizenticCtx::new(Secret("ctx".to_string().into_bytes()), Vec::new());
|
|
||||||
let claims = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
None,
|
|
||||||
ResourceName(String::from("resource-2")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims.clone());
|
|
||||||
|
|
||||||
let claims2 = ClaimSet::new(
|
|
||||||
Issuer(String::from("test")),
|
|
||||||
Some(TTL(chrono::Duration::seconds(3600))),
|
|
||||||
ResourceName(String::from("resource-2")),
|
|
||||||
Username::from("Savanni"),
|
|
||||||
Permissions(vec![
|
|
||||||
String::from("read"),
|
|
||||||
String::from("write"),
|
|
||||||
String::from("grant"),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
ctx.add_claimset(claims2.clone());
|
|
||||||
|
|
||||||
let res = save_claims_to_file(&ctx.list_claimsets(), &String::from("var/claims.db"));
|
|
||||||
assert!(res.is_ok());
|
|
||||||
|
|
||||||
let claimset = load_claims_from_file(&String::from("var/claims.db"));
|
|
||||||
match claimset {
|
|
||||||
Ok(claimset_) => {
|
|
||||||
assert!(claimset_.contains(&claims));
|
|
||||||
assert!(claimset_.contains(&claims2));
|
|
||||||
}
|
|
||||||
Err(err) => assert!(false, "{}", err),
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in New Issue