From 808f2a79e6b1664f888370169a3431a51e8e5e1b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Bardon?= Date: Tue, 2 Jan 2024 19:20:48 -0600 Subject: [PATCH] =?UTF-8?q?=F0=9F=92=84=20Format?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .githooks/pre-commit | 29 +++ .github/workflows/test.yml | 9 +- src/orangutan/rustfmt.toml | 6 + src/orangutan/src/config.rs | 28 ++- src/orangutan/src/generate.rs | 156 +++++++++----- src/orangutan/src/helpers.rs | 47 +++-- src/orangutan/src/init.rs | 2 +- src/orangutan/src/keys_reader.rs | 57 +++-- src/orangutan/src/object_reader.rs | 60 ++++-- src/orangutan/src/preflight.rs | 25 ++- src/orangutan/src/refresh-token.rs | 67 +++--- src/orangutan/src/server.rs | 322 ++++++++++++++++------------- 12 files changed, 521 insertions(+), 287 deletions(-) create mode 100644 .githooks/pre-commit create mode 100644 src/orangutan/rustfmt.toml diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100644 index 0000000..9a46564 --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,29 @@ +#!/bin/bash + +# Comes from . + +exe=$(which rustfmt) + +if [ -n "$exe" ] +then + # field separator to the new line + IFS=$'\n' + + for line in $(git status -s) + do + # if added or modified + if [[ $line == A* || $line == M* ]] + then + # check file extension + if [[ $line == *.rs ]] + then + # format file + rustfmt --edition 2021 -l -- $(pwd)/${line:3} + # add changes + git add $(pwd)/${line:3} + fi + fi + done +else + echo "rustfmt was not found" +fi \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 02294da..2c43215 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -39,8 +39,7 @@ jobs: working-directory: ./src/orangutan run: cargo test - # TODO: Re-enable style checking - # # TODO: Only check code style for one target - # - name: Check code style - # working-directory: ./src/orangutan - # run: cargo fmt -- --check + # TODO: Only check code style for one target + - name: Check code style + working-directory: ./src/orangutan + run: cargo fmt -- --check diff --git a/src/orangutan/rustfmt.toml b/src/orangutan/rustfmt.toml new file mode 100644 index 0000000..ca6f6ec --- /dev/null +++ b/src/orangutan/rustfmt.toml @@ -0,0 +1,6 @@ +array_width = 48 +fn_params_layout = "Vertical" +group_imports = "StdExternalCrate" +imports_granularity = "Module" +match_block_trailing_comma = true +overflow_delimited_expr = true \ No newline at end of file diff --git a/src/orangutan/src/config.rs b/src/orangutan/src/config.rs index 56106b7..93791c3 100644 --- a/src/orangutan/src/config.rs +++ b/src/orangutan/src/config.rs @@ -1,4 +1,7 @@ -use std::{path::{PathBuf, Path}, env, collections::HashSet, fmt::Display}; +use std::collections::HashSet; +use std::env; +use std::fmt::Display; +use std::path::{Path, PathBuf}; use lazy_static::lazy_static; @@ -20,15 +23,16 @@ const WEBSITE_DIR_NAME: &'static str = "website"; lazy_static! { static ref WORK_DIR: PathBuf = env::current_dir().unwrap(); - pub static ref WEBSITE_REPOSITORY: String = env::var("WEBSITE_REPOSITORY").expect("Environment variable `WEBSITE_REPOSITORY` is required."); + pub static ref WEBSITE_REPOSITORY: String = env::var("WEBSITE_REPOSITORY") + .expect("Environment variable `WEBSITE_REPOSITORY` is required."); pub static ref BASE_DIR: PathBuf = WORK_DIR.join(".orangutan"); pub static ref WEBSITE_ROOT: PathBuf = BASE_DIR.join("website"); pub static ref KEYS_DIR: PathBuf = BASE_DIR.join("keys"); pub static ref HUGO_CONFIG_DIR: PathBuf = BASE_DIR.join("hugo-config"); pub static ref DEST_DIR: PathBuf = BASE_DIR.join("out"); pub static ref WEBSITE_DATA_DIR: PathBuf = DEST_DIR.join("data"); - pub static ref SUFFIXED_EXTENSIONS: Vec<&'static str> = vec!["html", "json", "xml", "css", "js", "txt"]; - + pub static ref SUFFIXED_EXTENSIONS: Vec<&'static str> = + vec!["html", "json", "xml", "css", "js", "txt"]; pub static ref MODE: Result = env::var("MODE"); pub static ref KEYS_MODE: Result = env::var("KEYS_MODE"); } @@ -61,7 +65,7 @@ impl WebsiteId { impl From<&Vec> for WebsiteId { fn from(value: &Vec) -> Self { if value.is_empty() { - return Self::default() + return Self::default(); } // Convert Vec to HashSet to get unique profiles @@ -71,14 +75,20 @@ impl From<&Vec> for WebsiteId { let mut used_profiles = used_profiles().clone(); // Insert special "*" profile so it is kept for website generation used_profiles.insert("*".to_string()); - profiles = profiles.intersection(&used_profiles).map(|s| s.clone()).collect(); + profiles = profiles + .intersection(&used_profiles) + .map(|s| s.clone()) + .collect(); - return Self { profiles } + return Self { profiles }; } } impl Display for WebsiteId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fn fmt( + &self, + f: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { write!(f, "{}", self.dir_name()) } } @@ -86,7 +96,7 @@ impl Display for WebsiteId { impl Default for WebsiteId { fn default() -> Self { let profiles = vec![DEFAULT_PROFILE.to_string()].into_iter().collect(); - return Self { profiles } + return Self { profiles }; } } diff --git a/src/orangutan/src/generate.rs b/src/orangutan/src/generate.rs index be25d42..7c79aeb 100644 --- a/src/orangutan/src/generate.rs +++ b/src/orangutan/src/generate.rs @@ -1,22 +1,23 @@ -use crate::config::*; -use crate::helpers::copy_directory; use core::fmt; use std::borrow::BorrowMut; -use std::os::fd::FromRawFd; -use std::sync::{Mutex, MutexGuard, Arc}; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::io::Cursor; -use rocket::request::Request; -use rocket::response::{self, Response, Responder}; -use rocket::http::ContentType; -use lazy_static::lazy_static; use std::collections::HashSet; use std::env; use std::fs::{self, File}; -use std::io::{self, Write}; +use std::io::{self, Cursor, Write}; +use std::os::fd::FromRawFd; use std::path::PathBuf; -use std::process::{Command, Stdio, Output}; -use tracing::{info, debug, trace}; +use std::process::{Command, Output, Stdio}; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::{Arc, Mutex, MutexGuard}; + +use lazy_static::lazy_static; +use rocket::http::ContentType; +use rocket::request::Request; +use rocket::response::{self, Responder, Response}; +use tracing::{debug, info, trace}; + +use crate::config::*; +use crate::helpers::copy_directory; static HUGO_CONFIG_GENERATED: AtomicBool = AtomicBool::new(false); static DATA_FILES_GENERATED: AtomicBool = AtomicBool::new(false); @@ -38,7 +39,7 @@ pub fn generate_default_website() -> Result<(), Error> { pub fn clone_repository() -> Result<(), Error> { if WEBSITE_ROOT.is_dir() { - return pull_repository() + return pull_repository(); } _clone_repository()?; @@ -49,7 +50,11 @@ pub fn clone_repository() -> Result<(), Error> { fn _clone_repository() -> Result<(), Error> { let mut command = Command::new("git"); command - .args(vec!["clone", &WEBSITE_REPOSITORY, &WEBSITE_ROOT.display().to_string()]) + .args(vec![ + "clone", + &WEBSITE_REPOSITORY, + &WEBSITE_ROOT.display().to_string(), + ]) .args(vec!["--depth", "1"]); trace!("Running `{:?}`…", command); @@ -60,7 +65,10 @@ fn _clone_repository() -> Result<(), Error> { if status.success() { Ok(()) } else { - Err(Error::CommandExecutionFailed { command: format!("{:?}", command), code: status.code() }) + Err(Error::CommandExecutionFailed { + command: format!("{:?}", command), + code: status.code(), + }) } } @@ -78,7 +86,10 @@ fn _init_submodules() -> Result<(), Error> { if status.success() { Ok(()) } else { - Err(Error::CommandExecutionFailed { command: format!("{:?}", command), code: status.code() }) + Err(Error::CommandExecutionFailed { + command: format!("{:?}", command), + code: status.code(), + }) } } @@ -102,7 +113,10 @@ fn _pull_repository() -> Result<(), Error> { if status.success() { Ok(()) } else { - Err(Error::CommandExecutionFailed { command: format!("{:?}", command), code: status.code() }) + Err(Error::CommandExecutionFailed { + command: format!("{:?}", command), + code: status.code(), + }) } } @@ -122,7 +136,10 @@ fn _update_submodules() -> Result<(), Error> { if status.success() { Ok(()) } else { - Err(Error::CommandExecutionFailed { command: format!("{:?}", command), code: status.code() }) + Err(Error::CommandExecutionFailed { + command: format!("{:?}", command), + code: status.code(), + }) } } @@ -131,8 +148,7 @@ fn _copy_hugo_config() -> Result<(), Error> { // Create config dir let config_dir = HUGO_CONFIG_DIR.join("_default"); - fs::create_dir_all(&config_dir) - .map_err(Error::CannotCreateHugoConfigFile)?; + fs::create_dir_all(&config_dir).map_err(Error::CannotCreateHugoConfigFile)?; debug!("Hugo config will be saved in <{}>", &config_dir.display()); // Read current config @@ -153,15 +169,17 @@ fn _copy_hugo_config() -> Result<(), Error> { fn gen_hugo_config(website_id: &WebsiteId) -> Result<(), Error> { // Create config dir let config_dir = HUGO_CONFIG_DIR.join(website_id.dir_name()); - fs::create_dir_all(&config_dir) - .map_err(Error::CannotCreateHugoConfigFile)?; + fs::create_dir_all(&config_dir).map_err(Error::CannotCreateHugoConfigFile)?; // Create new config let profiles: Vec = website_id.profiles.iter().map(|s| s.clone()).collect(); let profiles_json = serde_json::to_string(&profiles).unwrap(); - let config = format!("[Params] + let config = format!( + "[Params] currentProfiles = {} -", profiles_json); +", + profiles_json + ); // Write new config file let config_file = config_dir.join("hugo.toml"); @@ -184,10 +202,14 @@ fn copy_hugo_config_if_needed() -> Result<(), Error> { fn generate_website( id: &WebsiteId, destination: &PathBuf, - generated_websites: &mut MutexGuard<'_, HashSet> + generated_websites: &mut MutexGuard<'_, HashSet>, ) -> Result<(), Error> { info!("Generating website for {:?}…", id.profiles); - debug!("Website for {:?} will be generated at <{}>", id.profiles, destination.display()); + debug!( + "Website for {:?} will be generated at <{}>", + id.profiles, + destination.display() + ); copy_hugo_config_if_needed()?; gen_hugo_config(id)?; @@ -195,10 +217,13 @@ fn generate_website( let config_dir = HUGO_CONFIG_DIR.display().to_string(); let environment = id.dir_name(); let mut params = vec![ - "--disableKinds", "RSS,sitemap", + "--disableKinds", + "RSS,sitemap", "--cleanDestinationDir", - "--configDir", &config_dir, - "--environment", &environment, + "--configDir", + &config_dir, + "--environment", + &environment, ]; if env::var("LOCALHOST") == Ok("true".to_string()) { params.append(&mut vec!["--baseURL", "http://localhost:8080"]); @@ -235,12 +260,21 @@ fn _generate_data_files() -> Result<(), Error> { let shortcodes_dir = WEBSITE_ROOT.join("themes/PaperMod/layouts/shortcodes"); let shortcodes_dest_dir_path = format!("themes/{}/layouts/shortcodes", THEME_NAME); let shortcodes_dest_dir = WEBSITE_ROOT.join(&shortcodes_dest_dir_path); - trace!("Copying shortcodes from {} to {}…", shortcodes_dir.display(), shortcodes_dest_dir.display()); + trace!( + "Copying shortcodes from {} to {}…", + shortcodes_dir.display(), + shortcodes_dest_dir.display() + ); copy_directory(&shortcodes_dir, &shortcodes_dest_dir).unwrap(); let res = hugo_gen( - vec!["--disableKinds", "RSS,sitemap,home", "--theme", THEME_NAME], - WEBSITE_DATA_DIR.display().to_string() + vec![ + "--disableKinds", + "RSS,sitemap,home", + "--theme", + THEME_NAME, + ], + WEBSITE_DATA_DIR.display().to_string(), )?; DATA_FILES_GENERATED.store(true, Ordering::Relaxed); @@ -256,24 +290,33 @@ pub fn generate_data_files_if_needed() -> Result<(), Error> { } } -pub fn hugo_gen(params: Vec<&str>, destination: String) -> Result<(), Error> { +pub fn hugo_gen( + params: Vec<&str>, + destination: String, +) -> Result<(), Error> { let website_root = WEBSITE_ROOT.display().to_string(); let base_params: Vec<&str> = vec![ - "--source", website_root.as_str(), - "--destination", destination.as_str(), + "--source", + website_root.as_str(), + "--destination", + destination.as_str(), ]; - hugo(base_params.into_iter().chain(params.into_iter()).collect(), false)?; + hugo( + base_params.into_iter().chain(params.into_iter()).collect(), + false, + )?; Ok(()) } -fn hugo(params: Vec<&str>, pipe_stdout: bool) -> Result { +fn hugo( + params: Vec<&str>, + pipe_stdout: bool, +) -> Result { let mut command = Command::new("hugo"); let website_root = WEBSITE_ROOT.display().to_string(); - let base_params: Vec<&str> = vec![ - "--source", website_root.as_str(), - ]; + let base_params: Vec<&str> = vec!["--source", website_root.as_str()]; let params = base_params.iter().chain(params.iter()); command.args(params); @@ -292,7 +335,10 @@ fn hugo(params: Vec<&str>, pipe_stdout: bool) -> Result { if output.status.success() { Ok(output.clone()) } else { - Err(Error::CommandExecutionFailed { command: format!("{:?}", command), code: output.status.code() }) + Err(Error::CommandExecutionFailed { + command: format!("{:?}", command), + code: output.status.code(), + }) } } @@ -314,20 +360,34 @@ pub enum Error { } impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt( + &self, + f: &mut fmt::Formatter<'_>, + ) -> fmt::Result { match self { - Error::CannotExecuteCommand(command, err) => write!(f, "Could not execute command `{command}`: {err}"), - Error::CommandExecutionFailed { command, code } => write!(f, "Command `{command}` failed with exit code {:?}", code), + Error::CannotExecuteCommand(command, err) => { + write!(f, "Could not execute command `{command}`: {err}") + }, + Error::CommandExecutionFailed { command, code } => { + write!(f, "Command `{command}` failed with exit code {:?}", code) + }, Error::CannotGenerateWebsite(err) => write!(f, "Could not generate website: {err}"), - Error::CannotEmptyIndexJson(err) => write!(f, "Could not empty file: {err}"), - Error::CannotCreateHugoConfigFile(err) => write!(f, "Could create hugo config file: {err}"), + Error::CannotEmptyIndexJson(err) => { + write!(f, "Could not empty file: {err}") + }, + Error::CannotCreateHugoConfigFile(err) => { + write!(f, "Could create hugo config file: {err}") + }, } } } #[rocket::async_trait] impl<'r> Responder<'r, 'static> for Error { - fn respond_to(self, _: &'r Request<'_>) -> response::Result<'static> { + fn respond_to( + self, + _: &'r Request<'_>, + ) -> response::Result<'static> { let res = self.to_string(); Response::build() .header(ContentType::Plain) diff --git a/src/orangutan/src/helpers.rs b/src/orangutan/src/helpers.rs index d8ff63a..550aa9b 100644 --- a/src/orangutan/src/helpers.rs +++ b/src/orangutan/src/helpers.rs @@ -1,8 +1,12 @@ -use std::{path::{PathBuf, Path}, collections::HashSet, fs::{self, File}, io, sync::Mutex}; +use std::collections::HashSet; +use std::fs::{self, File}; +use std::io; +use std::path::{Path, PathBuf}; +use std::sync::Mutex; use lazy_static::lazy_static; use serde_json::{self, Value}; -use tracing::{trace, debug, error}; +use tracing::{debug, error, trace}; use crate::config::*; @@ -14,7 +18,7 @@ pub fn used_profiles<'a>() -> &'a HashSet { let mut used_profiles = USED_PROFILES.lock().unwrap(); if let Some(profiles) = used_profiles.clone() { trace!("Read used profiles from cache"); - return profiles + return profiles; } debug!("Reading used profiles…"); @@ -31,7 +35,9 @@ pub fn used_profiles<'a>() -> &'a HashSet { debug!(" read_allowed: {:?}", read_allowed); // Store new profiles - read_allowed.iter().for_each(|p| { acc.insert(p.clone()); }); + read_allowed.iter().for_each(|p| { + acc.insert(p.clone()); + }); } *used_profiles = Some(acc); @@ -39,12 +45,20 @@ pub fn used_profiles<'a>() -> &'a HashSet { acc } -pub fn find(dir: &PathBuf, extensions: &Vec<&str>, files: &mut Vec) { +pub fn find( + dir: &PathBuf, + extensions: &Vec<&str>, + files: &mut Vec, +) { for entry in fs::read_dir(dir).unwrap() { if let Ok(entry) = entry { let path = entry.path(); if path.is_file() { - if path.extension().map(|ext| extensions.contains(&ext.to_str().unwrap())).unwrap_or(false) { + if path + .extension() + .map(|ext| extensions.contains(&ext.to_str().unwrap())) + .unwrap_or(false) + { files.push(path); } } else if path.is_dir() { @@ -95,7 +109,11 @@ pub fn data_file(html_file: &PathBuf) -> PathBuf { fn find_data_files() -> Vec { let mut data_files: Vec = Vec::new(); - find(&WEBSITE_DATA_DIR, &vec![DATA_FILE_EXTENSION], &mut data_files); + find( + &WEBSITE_DATA_DIR, + &vec![DATA_FILE_EXTENSION], + &mut data_files, + ); data_files } @@ -114,21 +132,26 @@ fn _read_allowed(data_file: &PathBuf) -> Option>> { // `None` if file not found pub fn read_allowed(data_file: &PathBuf) -> Option> { - _read_allowed(data_file) - .map(|o| o.unwrap_or(vec![DEFAULT_PROFILE.to_string()])) + _read_allowed(data_file).map(|o| o.unwrap_or(vec![DEFAULT_PROFILE.to_string()])) } -pub fn object_key>(path: &P, profile: &str) -> String { +pub fn object_key>( + path: &P, + profile: &str, +) -> String { let path = path.as_ref(); if let Some(ext) = path.extension() { if SUFFIXED_EXTENSIONS.contains(&ext.to_str().unwrap()) { - return format!("{}@{}", path.display(), profile) + return format!("{}@{}", path.display(), profile); } } path.display().to_string() } -pub fn copy_directory(src: &std::path::Path, dest: &std::path::Path) -> io::Result<()> { +pub fn copy_directory( + src: &std::path::Path, + dest: &std::path::Path, +) -> io::Result<()> { if src.is_file() { // If the source is a file, copy it to the destination fs::copy(src, dest)?; diff --git a/src/orangutan/src/init.rs b/src/orangutan/src/init.rs index c0d352f..17c8478 100644 --- a/src/orangutan/src/init.rs +++ b/src/orangutan/src/init.rs @@ -1,3 +1,3 @@ fn main() { - println!("Init"); + println!("Init"); } diff --git a/src/orangutan/src/keys_reader.rs b/src/orangutan/src/keys_reader.rs index 9e26111..fb6620d 100644 --- a/src/orangutan/src/keys_reader.rs +++ b/src/orangutan/src/keys_reader.rs @@ -1,9 +1,9 @@ -use crate::config::{KEYS_DIR, ROOT_KEY_NAME}; - -use std::io::{Write, Read}; -use std::{fmt, io}; use std::fs::File; -use std::{env, path::PathBuf}; +use std::io::{Read, Write}; +use std::path::PathBuf; +use std::{env, fmt, io}; + +use crate::config::{KEYS_DIR, ROOT_KEY_NAME}; extern crate biscuit_auth as biscuit; use lazy_static::lazy_static; @@ -33,11 +33,16 @@ impl KeysReader for EnvKeysReader { let key_name = ROOT_KEY_NAME; let env_var_name = format!("KEY_{}", key_name); - trace!("Reading key '{}' from environment ({})…", key_name, env_var_name); + trace!( + "Reading key '{}' from environment ({})…", + key_name, + env_var_name + ); env::var(env_var_name) .map_err(Error::Env) .and_then(|key_bytes| { - let key = biscuit::PrivateKey::from_bytes_hex(&key_bytes).map_err(Error::BiscuitFormat)?; + let key = biscuit::PrivateKey::from_bytes_hex(&key_bytes) + .map_err(Error::BiscuitFormat)?; Ok(biscuit::KeyPair::from(&key)) }) } @@ -46,7 +51,10 @@ impl KeysReader for EnvKeysReader { struct LocalKeysReader {} impl LocalKeysReader { - fn key_file(&self, key_name: &str) -> PathBuf { + fn key_file( + &self, + key_name: &str, + ) -> PathBuf { KEYS_DIR.join(format!("{}.key", key_name)) } } @@ -63,14 +71,20 @@ impl KeysReader for LocalKeysReader { let mut file = File::open(key_file).map_err(Error::IO)?; let mut key_bytes = String::new(); file.read_to_string(&mut key_bytes).map_err(Error::IO)?; - let key = biscuit::PrivateKey::from_bytes_hex(&key_bytes).map_err(Error::BiscuitFormat)?; + let key = + biscuit::PrivateKey::from_bytes_hex(&key_bytes).map_err(Error::BiscuitFormat)?; Ok(biscuit::KeyPair::from(&key)) } else { // If key file does not exist, create a new key and save it to a new file - trace!("Saving new key '{}' into <{}>…", key_name, key_file.display()); + trace!( + "Saving new key '{}' into <{}>…", + key_name, + key_file.display() + ); let key_pair = biscuit::KeyPair::new(); let mut file = File::create(&key_file).map_err(Error::IO)?; - file.write_all(key_pair.private().to_bytes_hex().as_bytes()).map_err(Error::IO)?; + file.write_all(key_pair.private().to_bytes_hex().as_bytes()) + .map_err(Error::IO)?; Ok(key_pair) } } @@ -88,14 +102,25 @@ pub enum Error { } impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt( + &self, + f: &mut fmt::Formatter<'_>, + ) -> fmt::Result { match self { Error::Env(err) => err.fmt(f), Error::IO(err) => err.fmt(f), - Error::CannotCreateFile(path, err) => write!(f, "Could not create <{}> file: {err}", path.display()), - Error::CannotOpenFile(path, err) => write!(f, "Could not open <{}> file: {err}", path.display()), - Error::CannotWriteInFile(path, err) => write!(f, "Could not write in <{}> file: {err}", path.display()), - Error::CannotReadFile(path, err) => write!(f, "Could not read <{}> file: {err}", path.display()), + Error::CannotCreateFile(path, err) => { + write!(f, "Could not create <{}> file: {err}", path.display()) + }, + Error::CannotOpenFile(path, err) => { + write!(f, "Could not open <{}> file: {err}", path.display()) + }, + Error::CannotWriteInFile(path, err) => { + write!(f, "Could not write in <{}> file: {err}", path.display()) + }, + Error::CannotReadFile(path, err) => { + write!(f, "Could not read <{}> file: {err}", path.display()) + }, Error::BiscuitFormat(err) => err.fmt(f), } } diff --git a/src/orangutan/src/object_reader.rs b/src/orangutan/src/object_reader.rs index 4487a62..f600409 100644 --- a/src/orangutan/src/object_reader.rs +++ b/src/orangutan/src/object_reader.rs @@ -1,11 +1,13 @@ -use std::{io, path::PathBuf, fs, fmt}; +use std::path::PathBuf; +use std::{fmt, fs, io}; -use crate::config::WebsiteId; -use crate::generate::{self, generate_website_if_needed}; use rocket::fs::NamedFile; use rocket::response::Responder; use tracing::trace; +use crate::config::WebsiteId; +use crate::generate::{self, generate_website_if_needed}; + pub type ObjectReader = LocalObjectReader; impl ObjectReader { @@ -32,29 +34,44 @@ impl LocalObjectReader { } impl LocalObjectReader { - pub fn list_objects(&self, prefix: &str, website_id: &WebsiteId) -> Result, Error> { + pub fn list_objects( + &self, + prefix: &str, + website_id: &WebsiteId, + ) -> Result, Error> { trace!("Listing files with prefix '{}' for {}…", prefix, website_id); - let website_dir = generate_website_if_needed(website_id) - .map_err(Error::WebsiteGenerationError)?; - - Ok(find_all_files(&website_dir).iter() - .map(|path| - format!("/{}", path - .strip_prefix(website_dir.as_path()) - .expect("Could not remove prefix") - .display()) - ) + let website_dir = + generate_website_if_needed(website_id).map_err(Error::WebsiteGenerationError)?; + + Ok(find_all_files(&website_dir) + .iter() + .map(|path| { + format!( + "/{}", + path.strip_prefix(website_dir.as_path()) + .expect("Could not remove prefix") + .display() + ) + }) .collect()) } - pub async fn read_object<'r>(&self, object_key: &str, website_id: &WebsiteId) -> LocalReadObjectResponse { + pub async fn read_object<'r>( + &self, + object_key: &str, + website_id: &WebsiteId, + ) -> LocalReadObjectResponse { let website_dir = match generate_website_if_needed(website_id) { Ok(dir) => dir, Err(err) => return LocalReadObjectResponse::NotFound(err.to_string()), }; let file_path = website_dir.join(object_key.strip_prefix("/").unwrap()); - trace!("Reading '{}' from disk at <{}>…", object_key, file_path.display()); + trace!( + "Reading '{}' from disk at <{}>…", + object_key, + file_path.display() + ); LocalReadObjectResponse::Found(Self::serve_file(file_path).await) } @@ -66,7 +83,10 @@ fn find_all_files(dir: &PathBuf) -> Vec { files } -fn find(dir: &PathBuf, files: &mut Vec) { +fn find( + dir: &PathBuf, + files: &mut Vec, +) { for entry in fs::read_dir(dir).unwrap() { if let Ok(entry) = entry { let path = entry.path(); @@ -86,10 +106,12 @@ pub enum Error { } impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt( + &self, + f: &mut fmt::Formatter<'_>, + ) -> fmt::Result { match self { Error::WebsiteGenerationError(err) => write!(f, "Website generation error: {err}"), } } } - diff --git a/src/orangutan/src/preflight.rs b/src/orangutan/src/preflight.rs index 51a6ceb..0f23b39 100644 --- a/src/orangutan/src/preflight.rs +++ b/src/orangutan/src/preflight.rs @@ -3,13 +3,15 @@ mod generate; mod helpers; mod keys_reader; -use crate::config::*; -use crate::generate::*; -use crate::helpers::*; use core::fmt; use std::process::exit; + +use tracing::{debug, error, Level}; use tracing_subscriber::FmtSubscriber; -use tracing::{Level, debug, error}; + +use crate::config::*; +use crate::generate::*; +use crate::helpers::*; fn main() { let subscriber = FmtSubscriber::builder() @@ -26,12 +28,10 @@ fn main() { pub fn throwing_main() -> Result<(), Error> { // Generate the website - generate_website_if_needed(&WebsiteId::default()) - .map_err(Error::WebsiteGenerationError)?; + generate_website_if_needed(&WebsiteId::default()).map_err(Error::WebsiteGenerationError)?; // Generate Orangutan data files - generate_data_files_if_needed() - .map_err(Error::CannotGenerateDataFiles)?; + generate_data_files_if_needed().map_err(Error::CannotGenerateDataFiles)?; // Read all profiles just for debug purposes let used_profiles = used_profiles(); @@ -56,10 +56,15 @@ pub enum Error { } impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt( + &self, + f: &mut fmt::Formatter<'_>, + ) -> fmt::Result { match self { Error::WebsiteGenerationError(err) => write!(f, "Website generation error: {err}"), - Error::CannotGenerateDataFiles(err) => write!(f, "Could not generate data files: {err}"), + Error::CannotGenerateDataFiles(err) => { + write!(f, "Could not generate data files: {err}") + }, } } } diff --git a/src/orangutan/src/refresh-token.rs b/src/orangutan/src/refresh-token.rs index 9f4563e..50187b7 100644 --- a/src/orangutan/src/refresh-token.rs +++ b/src/orangutan/src/refresh-token.rs @@ -1,25 +1,24 @@ -use std::io::{Read, Write}; -use std::{env, fmt, io}; use std::fs::File; -use std::path::{PathBuf, Path}; +use std::io::{Read, Write}; +use std::path::{Path, PathBuf}; use std::process::exit; use std::time::SystemTime; +use std::{env, fmt, io}; + use iso8601_duration::Duration as IsoDuration; extern crate biscuit_auth as biscuit; +use biscuit::macros::{block, fact}; use biscuit::Biscuit; -use biscuit::macros::{fact, block}; -use tracing::{error, trace}; use lazy_static::lazy_static; +use tracing::{error, trace}; const ROOT_KEY_NAME: &'static str = "_biscuit_root"; lazy_static! { static ref BASE_DIR: &'static Path = Path::new(".orangutan"); static ref KEYS_DIR: PathBuf = BASE_DIR.join("keys"); - static ref KEYS_MODE: Result = env::var("KEYS_MODE"); - static ref ROOT_KEY: biscuit::KeyPair = { let keys_reader = ::detect(); match keys_reader.get_root_biscuit_key() { @@ -27,7 +26,7 @@ lazy_static! { Err(err) => { error!("Error generating root Biscuit key: {}", err); exit(1); - } + }, } }; } @@ -36,23 +35,27 @@ fn main() { let mut builder = Biscuit::builder(); for profile in env::args().skip(2) { let fact = fact!("profile({profile});"); - builder.add_fact(fact.clone()) + builder + .add_fact(fact.clone()) .expect(&format!("Could not add fact '{:?}' to Biscuit", fact)); } match builder.build(&ROOT_KEY) { Ok(mut biscuit) => { let duration = IsoDuration::parse( - &env::args().skip(1).next() - .expect("Duration required as the first argument.") - ) - .expect("Duration malformatted. Check ISO 8601.") - .to_std() - .expect("Cannot convert `iso8601_duration::Duration` to `std::time::Duration`."); + &env::args() + .skip(1) + .next() + .expect("Duration required as the first argument."), + ) + .expect("Duration malformatted. Check ISO 8601.") + .to_std() + .expect("Cannot convert `iso8601_duration::Duration` to `std::time::Duration`."); let expiry_block = block!( "check if time($time), $time <= {expiry};", expiry = SystemTime::now() + duration, ); - biscuit = biscuit.append(expiry_block) + biscuit = biscuit + .append(expiry_block) .expect(&format!("Could not add block '' to Biscuit")); match biscuit.to_base64() { Ok(biscuit_base64) => { @@ -86,11 +89,16 @@ impl KeysReader for EnvKeysReader { let key_name = ROOT_KEY_NAME; let env_var_name = format!("KEY_{}", key_name); - trace!("Reading key '{}' from environment ({})…", key_name, env_var_name); + trace!( + "Reading key '{}' from environment ({})…", + key_name, + env_var_name + ); env::var(env_var_name) .map_err(Error::Env) .and_then(|key_bytes| { - let key = biscuit::PrivateKey::from_bytes_hex(&key_bytes).map_err(Error::BiscuitFormat)?; + let key = biscuit::PrivateKey::from_bytes_hex(&key_bytes) + .map_err(Error::BiscuitFormat)?; Ok(biscuit::KeyPair::from(&key)) }) } @@ -99,7 +107,10 @@ impl KeysReader for EnvKeysReader { struct LocalKeysReader {} impl LocalKeysReader { - fn key_file(&self, key_name: &str) -> PathBuf { + fn key_file( + &self, + key_name: &str, + ) -> PathBuf { KEYS_DIR.join(format!("{}.key", key_name)) } } @@ -116,20 +127,25 @@ impl KeysReader for LocalKeysReader { let mut file = File::open(key_file).map_err(Error::IO)?; let mut key_bytes = String::new(); file.read_to_string(&mut key_bytes).map_err(Error::IO)?; - let key = biscuit::PrivateKey::from_bytes_hex(&key_bytes).map_err(Error::BiscuitFormat)?; + let key = + biscuit::PrivateKey::from_bytes_hex(&key_bytes).map_err(Error::BiscuitFormat)?; Ok(biscuit::KeyPair::from(&key)) } else { // If key file does not exist, create a new key and save it to a new file - trace!("Saving new key '{}' into <{}>…", key_name, key_file.display()); + trace!( + "Saving new key '{}' into <{}>…", + key_name, + key_file.display() + ); let key_pair = biscuit::KeyPair::new(); let mut file = File::create(&key_file).map_err(Error::IO)?; - file.write_all(key_pair.private().to_bytes_hex().as_bytes()).map_err(Error::IO)?; + file.write_all(key_pair.private().to_bytes_hex().as_bytes()) + .map_err(Error::IO)?; Ok(key_pair) } } } - #[derive(Debug)] enum Error { IO(io::Error), @@ -138,7 +154,10 @@ enum Error { } impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt( + &self, + f: &mut fmt::Formatter<'_>, + ) -> fmt::Result { match self { Error::IO(err) => err.fmt(f), Error::Env(err) => err.fmt(f), diff --git a/src/orangutan/src/server.rs b/src/orangutan/src/server.rs index 6f7ceda..e09d0a6 100644 --- a/src/orangutan/src/server.rs +++ b/src/orangutan/src/server.rs @@ -4,31 +4,29 @@ mod helpers; mod keys_reader; mod object_reader; +use std::path::{Path, PathBuf}; +use std::process::exit; +use std::time::SystemTime; +use std::{fmt, fs, io}; + use biscuit::builder::{Fact, Term}; +use biscuit::macros::authorizer; +use biscuit::Biscuit; +use biscuit_auth as biscuit; +use lazy_static::lazy_static; use object_reader::{ObjectReader, ReadObjectResponse}; use rocket::fairing::AdHoc; -use rocket::{Either, post, Responder}; use rocket::form::Errors; -use rocket::http::CookieJar; -use rocket::http::{Status, Cookie, SameSite}; use rocket::http::uri::Origin; +use rocket::http::{Cookie, CookieJar, SameSite, Status}; +use rocket::outcome::Outcome; +use rocket::request::FromRequest; use rocket::response::status::{BadRequest, NotFound}; -use rocket::{Request, request, get, routes, catch, catchers, State}; use rocket::response::Redirect; -use rocket::request::FromRequest; -use rocket::outcome::Outcome; -use tracing::Level; -use tracing_subscriber::FmtSubscriber; -use std::{fmt, fs, io}; -use std::time::SystemTime; -use std::path::{PathBuf, Path}; -use std::process::exit; -use tracing::{debug, error, trace}; +use rocket::{catch, catchers, get, post, request, routes, Either, Request, Responder, State}; use time::Duration; -use lazy_static::lazy_static; -use biscuit_auth as biscuit; -use biscuit::Biscuit; -use biscuit::macros::authorizer; +use tracing::{debug, error, trace, Level}; +use tracing_subscriber::FmtSubscriber; use urlencoding::decode; use crate::config::*; @@ -44,7 +42,7 @@ lazy_static! { Err(err) => { error!("Error generating root Biscuit key: {}", err); exit(1); - } + }, } }; } @@ -80,12 +78,14 @@ async fn throwing_main() -> Result<(), Box> { ]) .register("/", catchers![not_found]) .manage(ObjectReader::new()) - .attach(AdHoc::on_liftoff("Liftoff website generation", |rocket| Box::pin(async move { - if let Err(err) = liftoff() { - error!("Error: {}", err); - rocket.shutdown().notify(); - } - }))) + .attach(AdHoc::on_liftoff("Liftoff website generation", |rocket| { + Box::pin(async move { + if let Err(err) = liftoff() { + error!("Error: {}", err); + rocket.shutdown().notify(); + } + }) + })) .launch() .await?; @@ -105,7 +105,9 @@ fn get_user_info(token: Option) -> String { "**Biscuit:**\n\n{}\n\n\ **Dump:**\n\n{}", biscuit.print(), - biscuit.authorizer().map_or_else(|e| format!("Error: {}", e).to_string(), |a| a.dump_code()), + biscuit + .authorizer() + .map_or_else(|e| format!("Error: {}", e).to_string(), |a| a.dump_code()), ), None => "Not authenticated".to_string(), } @@ -162,24 +164,19 @@ async fn handle_request( #[post("/update-content/github")] fn update_content_github() -> Result<(), Error> { // Update repository - pull_repository() - .map_err(Error::CannotPullOutdatedRepository)?; + pull_repository().map_err(Error::CannotPullOutdatedRepository)?; // Remove outdated websites - fs::remove_dir_all(DEST_DIR.as_path()) - .map_err(Error::CannotDeleteOutdatedWebsites)?; + fs::remove_dir_all(DEST_DIR.as_path()).map_err(Error::CannotDeleteOutdatedWebsites)?; // Pre-generate default website as we will access it at some point anyway - generate_default_website() - .map_err(Error::WebsiteGenerationError)?; + generate_default_website().map_err(Error::WebsiteGenerationError)?; Ok(()) } #[post("/update-content/")] -fn update_content_other( - source: &str, -) -> BadRequest { +fn update_content_other(source: &str) -> BadRequest { BadRequest(format!("Source '{source}' is not supported.")) } @@ -197,42 +194,55 @@ async fn _handle_request<'r>( let path = decode(origin.path().as_str()).unwrap().into_owned(); trace!("GET {}", &path); - let user_profiles: Vec = biscuit.as_ref() - .map(|b| b - .authorizer().unwrap() - .query_all("data($name) <- profile($name)").unwrap() - .iter().map(|f: &Fact| - match f.predicate.terms.get(0).unwrap() { + let user_profiles: Vec = biscuit + .as_ref() + .map(|b| { + b.authorizer() + .unwrap() + .query_all("data($name) <- profile($name)") + .unwrap() + .iter() + .map(|f: &Fact| match f.predicate.terms.get(0).unwrap() { Term::Str(s) => s.clone(), t => panic!("Term {t} should be of type String"), - } - ) - .collect() - ) + }) + .collect() + }) .unwrap_or_default(); let website_id = WebsiteId::from(&user_profiles); - let stored_objects: Vec = object_reader.list_objects(&path, &website_id) - .map_err(|err| { - error!("Error when listing objects matching '{}': {}", &path, err); - err - })?; - let Some(object_key) = matching_files(&path, &stored_objects).first().map(|o| o.to_owned()) else { + let stored_objects: Vec = + object_reader + .list_objects(&path, &website_id) + .map_err(|err| { + error!("Error when listing objects matching '{}': {}", &path, err); + err + })?; + let Some(object_key) = matching_files(&path, &stored_objects) + .first() + .map(|o| o.to_owned()) + else { error!("No file matching '{}' found in stored objects", &path); - return Ok(Either::Right(NotFound(()))) + return Ok(Either::Right(NotFound(()))); }; let allowed_profiles = allowed_profiles(&object_key); let Some(allowed_profiles) = allowed_profiles else { // If allowed profiles is empty, it means it's a static file - trace!("File <{}> did not explicitly allow profiles, serving static file", &path); + trace!( + "File <{}> did not explicitly allow profiles, serving static file", + &path + ); - return Ok(Either::Left(object_reader.read_object(&object_key, &website_id).await)) + return Ok(Either::Left( + object_reader.read_object(&object_key, &website_id).await, + )); }; debug!( "Page <{}> can be read by {}", &path, - allowed_profiles.iter() + allowed_profiles + .iter() .map(|p| format!("'{}'", p)) .collect::>() .join(", ") @@ -242,7 +252,8 @@ async fn _handle_request<'r>( if allowed_profile == DEFAULT_PROFILE { profile = Some(allowed_profile); } else if let Some(ref biscuit) = biscuit { - let authorizer = authorizer!(r#" + let authorizer = authorizer!( + r#" operation("read"); right({p}, "read"); right("*", "read"); @@ -251,7 +262,9 @@ async fn _handle_request<'r>( operation($op), profile($p), right($p, $op); - "#, p = allowed_profile.clone()); + "#, + p = allowed_profile.clone() + ); if biscuit.authorize(&authorizer).is_ok() { profile = Some(allowed_profile); } @@ -259,10 +272,12 @@ async fn _handle_request<'r>( } if profile.is_none() { debug!("No profile allowed in token"); - return Ok(Either::Right(NotFound(()))) + return Ok(Either::Right(NotFound(()))); } - Ok(Either::Left(object_reader.read_object(object_key, &website_id).await)) + Ok(Either::Left( + object_reader.read_object(object_key, &website_id).await, + )) } fn allowed_profiles<'r>(path: &String) -> Option> { @@ -310,7 +325,7 @@ impl<'r> FromRequest<'r> for Token { token: &str, token_source: &str, biscuit: &mut Option, - should_save: &mut bool + should_save: &mut bool, ) { // Because tokens can be passed as URL query params, // they might have the "=" padding characters remove. @@ -351,7 +366,10 @@ impl<'r> FromRequest<'r> for Token { // Check authorization headers let authorization_headers: Vec<&str> = request.headers().get("Authorization").collect(); - debug!("{} 'Authorization' headers provided", authorization_headers.len()); + debug!( + "{} 'Authorization' headers provided", + authorization_headers.len() + ); for authorization in authorization_headers { if authorization.starts_with("Bearer ") { debug!("Bearer Authorization provided"); @@ -363,13 +381,19 @@ impl<'r> FromRequest<'r> for Token { } // Check query params - if let Some(token) = request.query_value::(TOKEN_QUERY_PARAM_NAME).and_then(Result::ok) { + if let Some(token) = request + .query_value::(TOKEN_QUERY_PARAM_NAME) + .and_then(Result::ok) + { debug!("Found token query param"); process_token(&token, "token query param", &mut biscuit, &mut should_save); } match biscuit { - Some(biscuit) => Outcome::Success(Token { biscuit, should_save }), + Some(biscuit) => Outcome::Success(Token { + biscuit, + should_save, + }), None => Outcome::Forward(Status::Unauthorized), } } @@ -382,24 +406,26 @@ impl<'r> FromRequest<'r> for RefreshedToken { type Error = (); async fn from_request(request: &'r Request<'_>) -> request::Outcome { - let Some(refresh_token) = request.query_value::(REFRESH_TOKEN_QUERY_PARAM_NAME) else { + let Some(refresh_token) = request.query_value::(REFRESH_TOKEN_QUERY_PARAM_NAME) + else { debug!("Refresh token query param not found."); - return Outcome::Forward(Status::Unauthorized) + return Outcome::Forward(Status::Unauthorized); }; let Ok(mut refresh_token) = refresh_token else { debug!("Error: Refresh token query param could not be decoded as `String`."); - return Outcome::Forward(Status::Unauthorized) + return Outcome::Forward(Status::Unauthorized); }; debug!("Found refresh token query param"); // If query contains `force=true`, `force` or `force=`, don't search for an existing token. // If instead it contains `force=false` or `force` is not present, search for a token to augment. - let token = if should_force_token_refresh(request.query_value::(FORCE_QUERY_PARAM_NAME)) { - None - } else { - Token::from_request(request).await.succeeded() - }; + let token = + if should_force_token_refresh(request.query_value::(FORCE_QUERY_PARAM_NAME)) { + None + } else { + Token::from_request(request).await.succeeded() + }; refresh_token = decode(&refresh_token).unwrap().to_string(); // Because tokens can be passed as URL query params, @@ -418,7 +444,7 @@ impl<'r> FromRequest<'r> for RefreshedToken { ); if let Err(err) = refresh_biscuit.authorize(&authorizer) { debug!("Refresh token is invalid: {}", err); - return Outcome::Forward(Status::Unauthorized) + return Outcome::Forward(Status::Unauthorized); } trace!("Baking biscuit from refresh token"); @@ -462,16 +488,21 @@ fn should_force_token_refresh(query_param_value: Option> query_param_value.is_some_and(|v| v.unwrap_or(true)) } -fn add_cookie_if_necessary(token: &Token, cookies: &CookieJar<'_>) { +fn add_cookie_if_necessary( + token: &Token, + cookies: &CookieJar<'_>, +) { if token.should_save { match token.biscuit.to_base64() { Ok(base64) => { - cookies.add(Cookie::build((TOKEN_COOKIE_NAME, base64)) - .path("/") - .max_age(Duration::days(365 * 5)) - .http_only(true) - .secure(true) - .same_site(SameSite::Strict)); + cookies.add( + Cookie::build((TOKEN_COOKIE_NAME, base64)) + .path("/") + .max_age(Duration::days(365 * 5)) + .http_only(true) + .secure(true) + .same_site(SameSite::Strict), + ); }, Err(err) => { error!("Error setting token cookie: {}", err); @@ -480,7 +511,10 @@ fn add_cookie_if_necessary(token: &Token, cookies: &CookieJar<'_>) { } } -fn merge_biscuits(b1: &Biscuit, b2: &Biscuit) -> Option { +fn merge_biscuits( + b1: &Biscuit, + b2: &Biscuit, +) -> Option { let source = b1.authorizer().unwrap().dump_code(); let new_code = b2.authorizer().unwrap().dump_code(); @@ -496,16 +530,20 @@ fn merge_biscuits(b1: &Biscuit, b2: &Biscuit) -> Option { } } -fn matching_files<'a>(query: &str, stored_objects: &'a Vec) -> Vec<&'a String> { - stored_objects.into_iter() +fn matching_files<'a>( + query: &str, + stored_objects: &'a Vec, +) -> Vec<&'a String> { + stored_objects + .into_iter() .filter(|p| { let query = query.strip_suffix("index.html").unwrap_or(query); let Some(mut p) = p.strip_prefix(query) else { - return false + return false; }; p = p.trim_start_matches('/'); p = p.strip_prefix("index.html").unwrap_or(p); - return p.is_empty() || p.starts_with('@') + return p.is_empty() || p.starts_with('@'); }) .collect() } @@ -513,7 +551,7 @@ fn matching_files<'a>(query: &str, stored_objects: &'a Vec) -> Vec<&'a S fn add_padding(base64_string: &str) -> String { // If the base64 string is already padded, don't do anything. if base64_string.ends_with("=") { - return base64_string.to_string() + return base64_string.to_string(); } match base64_string.len() % 4 { @@ -534,11 +572,18 @@ enum Error { } impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt( + &self, + f: &mut fmt::Formatter<'_>, + ) -> fmt::Result { match self { Error::WebsiteGenerationError(err) => write!(f, "Website generation error: {err}"), - Error::CannotPullOutdatedRepository(err) => write!(f, "Cannot pull outdated repository: {err}"), - Error::CannotDeleteOutdatedWebsites(err) => write!(f, "Cannot delete outdated websites: {err}"), + Error::CannotPullOutdatedRepository(err) => { + write!(f, "Cannot pull outdated repository: {err}") + }, + Error::CannotDeleteOutdatedWebsites(err) => { + write!(f, "Cannot delete outdated websites: {err}") + }, } } } @@ -559,41 +604,29 @@ mod tests { "/whatever/index.htmlindex.html@_default", "/whatever/other-page/index.html@_default", "/whatever/a/b.html@_default", - ].into_iter().map(|p| p.to_string()).collect::>(); + ] + .into_iter() + .map(|p| p.to_string()) + .collect::>(); - assert_eq!( - matching_files("", &stored_objects), - vec![ - "/index.html@_default", - ] - ); - assert_eq!( - matching_files("/", &stored_objects), - vec![ - "/index.html@_default", - ] - ); - assert_eq!( - matching_files("/index.html", &stored_objects), - vec![ - "/index.html@_default", - ] - ); + assert_eq!(matching_files("", &stored_objects), vec![ + "/index.html@_default", + ]); + assert_eq!(matching_files("/", &stored_objects), vec![ + "/index.html@_default", + ]); + assert_eq!(matching_files("/index.html", &stored_objects), vec![ + "/index.html@_default", + ]); - assert_eq!( - matching_files("/whatever", &stored_objects), - vec![ - "/whatever/index.html@friends", - "/whatever/index.html@family", - ] - ); - assert_eq!( - matching_files("/whatever/", &stored_objects), - vec![ - "/whatever/index.html@friends", - "/whatever/index.html@family", - ] - ); + assert_eq!(matching_files("/whatever", &stored_objects), vec![ + "/whatever/index.html@friends", + "/whatever/index.html@family", + ]); + assert_eq!(matching_files("/whatever/", &stored_objects), vec![ + "/whatever/index.html@friends", + "/whatever/index.html@family", + ]); assert_eq!( matching_files("/whatever/index.html", &stored_objects), vec![ @@ -610,12 +643,9 @@ mod tests { matching_files("/whatever/a/b", &stored_objects), Vec::<&str>::new() ); - assert_eq!( - matching_files("/whatever/a/b.html", &stored_objects), - vec![ - "/whatever/a/b.html@_default", - ] - ); + assert_eq!(matching_files("/whatever/a/b.html", &stored_objects), vec![ + "/whatever/a/b.html@_default", + ]); } #[test] @@ -624,21 +654,18 @@ mod tests { "/style.css@_default", "/anything.custom@friends", "/anything.custom@family", - ].into_iter().map(|p| p.to_string()).collect::>(); + ] + .into_iter() + .map(|p| p.to_string()) + .collect::>(); - assert_eq!( - matching_files("/style.css", &stored_objects), - vec![ - "/style.css@_default", - ] - ); - assert_eq!( - matching_files("/anything.custom", &stored_objects), - vec![ - "/anything.custom@friends", - "/anything.custom@family", - ] - ); + assert_eq!(matching_files("/style.css", &stored_objects), vec![ + "/style.css@_default", + ]); + assert_eq!(matching_files("/anything.custom", &stored_objects), vec![ + "/anything.custom@friends", + "/anything.custom@family", + ]); } #[test] @@ -659,8 +686,17 @@ mod tests { assert_eq!(should_force_token_refresh(None), false); assert_eq!(should_force_token_refresh(Some(Ok(true))), true); assert_eq!(should_force_token_refresh(Some(Ok(false))), false); - assert_eq!(should_force_token_refresh(Some(Err(Errors::new().with_name("yes")))), true); - assert_eq!(should_force_token_refresh(Some(Err(Errors::new().with_name("no")))), true); - assert_eq!(should_force_token_refresh(Some(Err(Errors::new().with_name("")))), true); + assert_eq!( + should_force_token_refresh(Some(Err(Errors::new().with_name("yes")))), + true + ); + assert_eq!( + should_force_token_refresh(Some(Err(Errors::new().with_name("no")))), + true + ); + assert_eq!( + should_force_token_refresh(Some(Err(Errors::new().with_name("")))), + true + ); } }