Skip to content

Commit

Permalink
🚧 WIP Migrate from Rocket to Axum
Browse files Browse the repository at this point in the history
Serving pages and 404 work, need to finish authentication.
  • Loading branch information
RemiBardon committed Aug 21, 2024
1 parent 92cb6a8 commit 55f8281
Show file tree
Hide file tree
Showing 16 changed files with 861 additions and 1,052 deletions.
1,240 changes: 513 additions & 727 deletions src/Cargo.lock

Large diffs are not rendered by default.

10 changes: 9 additions & 1 deletion src/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,25 @@ members = [
resolver = "2"

[workspace.dependencies]
axum = { version = "0.7", features = ["macros"] }
axum-extra = { version = "0.9", features = ["cookie-private"] }
base64 = "0.22"
biscuit-auth = "5"
chrono = "0.4"
hex = "0.4"
iso8601-duration = "0.2"
lazy_static = "1"
rocket = "0.5"
mime = "0.3"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_urlencoded = "0.7"
serde_with = "3"
tera = "1"
thiserror = "1"
time = "0.3"
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
tower = "0.5"
tower-http = { version = "0.5", features = ["fs", "trace"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
urlencoding = "2"
7 changes: 5 additions & 2 deletions src/helpers/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
[package]
name = "orangutan-helpers"
version = "0.1.1"
version = "0.2.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
axum = { workspace = true }
biscuit-auth = { workspace = true }
hex = { workspace = true }
lazy_static = { workspace = true }
rocket = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
serde_with = { workspace = true }
thiserror = { workspace = true }
tower-http = { workspace = true }
tracing = { workspace = true }
14 changes: 6 additions & 8 deletions src/helpers/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,26 +4,24 @@ use lazy_static::lazy_static;

pub const THEME_NAME: &str = "Orangutan";
pub const DATA_FILE_EXTENSION: &str = "orangutan";
pub(super) const READ_ALLOWED_FIELD: &str = "read_allowed";
pub const PATH_FIELD: &str = "path";
pub const DEFAULT_PROFILE: &str = "_default";
pub const ROOT_KEY_NAME: &'static str = "_biscuit_root";

pub(super) const WEBSITE_DIR_NAME: &'static str = "website";

lazy_static! {
static ref WORK_DIR: PathBuf = env::current_dir().unwrap();
pub static ref WEBSITE_REPOSITORY: String = env::var("WEBSITE_REPOSITORY")
.expect("Environment variable `WEBSITE_REPOSITORY` is required.");
pub static ref MODE: Result<String, env::VarError> = env::var("MODE");
pub static ref KEYS_MODE: Result<String, env::VarError> = env::var("KEYS_MODE");
}
lazy_static! {
static ref WORK_DIR: PathBuf = env::current_dir().unwrap();
pub static ref BASE_DIR: PathBuf = WORK_DIR.join(".orangutan");
pub static ref TMP_DIR: PathBuf = BASE_DIR.join("tmp");
pub static ref KEYS_DIR: PathBuf = BASE_DIR.join("keys");
pub static ref MODE: Result<String, env::VarError> = env::var("MODE");
pub static ref KEYS_MODE: Result<String, env::VarError> = env::var("KEYS_MODE");
pub(super) static ref WEBSITE_ROOT: PathBuf = BASE_DIR.join("website");
pub(super) static ref WEBSITE_ROOT: PathBuf = BASE_DIR.join("website-src");
pub(super) static ref HUGO_CONFIG_DIR: PathBuf = BASE_DIR.join("hugo-config");
pub static ref DEST_DIR: PathBuf = BASE_DIR.join("out");
pub static ref WEBSITE_DATA_DIR: PathBuf = DEST_DIR.join("data");
pub(super) static ref SUFFIXED_EXTENSIONS: Vec<&'static str> =
vec!["html", "json", "xml", "css", "js", "txt"];
}
8 changes: 5 additions & 3 deletions src/helpers/src/generate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -406,14 +406,14 @@ pub fn trash_outdated_websites() -> Result<State, Error> {
hugo_config_generated: HUGO_CONFIG_GENERATED.load(Ordering::Relaxed),
data_files_generated: DATA_FILES_GENERATED.load(Ordering::Relaxed),
generated_websites: GENERATED_WEBSITES.lock().unwrap().to_owned(),
used_profiles: super::USED_PROFILES.lock().unwrap().to_owned(),
used_profiles: super::USED_PROFILES.read().unwrap().to_owned(),
};

// Clear caches
HUGO_CONFIG_GENERATED.store(false, Ordering::Relaxed);
DATA_FILES_GENERATED.store(false, Ordering::Relaxed);
GENERATED_WEBSITES.lock().unwrap().clear();
*super::USED_PROFILES.lock().unwrap() = None;
*super::USED_PROFILES.write().unwrap() = None;

Ok(state)
}
Expand All @@ -428,7 +428,7 @@ pub fn recover_trash(state: State) -> Result<(), Error> {
HUGO_CONFIG_GENERATED.store(state.hugo_config_generated, Ordering::Relaxed);
DATA_FILES_GENERATED.store(state.data_files_generated, Ordering::Relaxed);
*GENERATED_WEBSITES.lock().unwrap() = state.generated_websites;
*super::USED_PROFILES.lock().unwrap() = state.used_profiles;
*super::USED_PROFILES.write().unwrap() = state.used_profiles;

Ok(())
}
Expand Down Expand Up @@ -456,4 +456,6 @@ pub enum Error {
CannotCreateHugoConfigFile(io::Error),
#[error("IO error: {0}")]
IOError(#[from] io::Error),
#[error("Could not read page metadata: JSON error: {0}")]
CannotReadPageMetadata(serde_json::Error),
}
175 changes: 102 additions & 73 deletions src/helpers/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,48 +7,61 @@ use std::{
collections::HashSet,
fs::{self, File},
io,
path::{Path, PathBuf},
sync::{Arc, Mutex},
ops::Deref,
path::PathBuf,
sync::{Arc, RwLock},
};

use lazy_static::lazy_static;
use serde_json::Value;
use tracing::trace;
use serde::{de::DeserializeOwned, Deserialize};
use serde_with::{serde_as, DefaultOnNull};
use tracing::{debug, error, trace};

use crate::config::*;

lazy_static! {
static ref USED_PROFILES: Arc<Mutex<Option<&'static HashSet<String>>>> =
Arc::new(Mutex::new(None));
static ref USED_PROFILES: Arc<RwLock<Option<&'static HashSet<String>>>> = Arc::default();
}

pub fn used_profiles<'a>() -> &'a HashSet<String> {
let mut used_profiles = USED_PROFILES.lock().unwrap();
if let Some(profiles) = used_profiles.clone() {
if let Some(profiles) = *USED_PROFILES.read().unwrap() {
trace!("Read used profiles from cache");
return profiles;
}

trace!("Reading used profiles…");
debug!("Reading all used profiles…");
let acc: &'static mut HashSet<String> = Box::leak(Box::new(HashSet::new()));

for data_file in find_data_files() {
// trace!("Reading <{}>…", data_file.display());

// Make sure this generator isn't broken (could be replaced by unit tests)
// let html_file = html_file(&data_file).unwrap();
// trace!("{}", html_file.display());

let read_allowed = read_allowed(&data_file).unwrap();
let metadata: PageMetadata = match deser(&data_file) {
Ok(Some(metadata)) => metadata,
Ok(None) => {
error!(
"Could not read page metadata at <{}>: File not found",
data_file.display(),
);
continue;
},
Err(err) => {
error!(
"Could not read page metadata at <{}>: {err}",
data_file.display(),
);
continue;
},
};
let read_allowed = metadata.read_allowed;
// trace!(" read_allowed: {:?}", read_allowed);

// Store new profiles
read_allowed.iter().for_each(|p| {
acc.insert(p.clone());
read_allowed.into_iter().for_each(|p| {
acc.insert(p.to_owned());
});
}

*used_profiles = Some(acc);
*USED_PROFILES.write().unwrap() = Some(acc);

acc
}
Expand Down Expand Up @@ -76,43 +89,13 @@ pub fn find(
}
}

fn _html_file(data_file: &PathBuf) -> Option<Option<PathBuf>> {
let file = File::open(data_file).ok()?;
let reader = io::BufReader::new(file);
let data: Value = match serde_json::from_reader(reader) {
Ok(data) => data,
Err(_) => return None,
};

let path = data.get(PATH_FIELD)?;

Some(serde_json::from_value(path.to_owned()).ok())
}

// fn html_file(data_file: &PathBuf) -> Result<PathBuf, ()> {
// match _html_file(data_file) {
// Some(Some(path)) => Ok(path),
// Some(None) => {
// error!("Path not defined");
// Err(())
// },
// None => {
// error!("File not found");
// Err(())
// },
// }
// }

pub fn data_file(html_file: &PathBuf) -> PathBuf {
let mut data_file_rel = html_file
.strip_prefix(WEBSITE_DATA_DIR.to_path_buf())
.unwrap_or(html_file)
.with_extension(DATA_FILE_EXTENSION);
data_file_rel = match data_file_rel.strip_prefix("/") {
pub fn data_file_path(page_relpath: &PathBuf) -> PathBuf {
let mut data_file_relpath = page_relpath.with_extension(DATA_FILE_EXTENSION);
data_file_relpath = match data_file_relpath.strip_prefix("/") {
Ok(trimmed) => trimmed.to_path_buf(),
Err(_) => data_file_rel,
Err(_) => data_file_relpath,
};
WEBSITE_DATA_DIR.join(data_file_rel)
WEBSITE_DATA_DIR.join(data_file_relpath)
}

fn find_data_files() -> Vec<PathBuf> {
Expand All @@ -125,35 +108,81 @@ fn find_data_files() -> Vec<PathBuf> {
data_files
}

fn _read_allowed(data_file: &PathBuf) -> Option<Option<Vec<String>>> {
let file = File::open(data_file).ok()?;
let reader = io::BufReader::new(file);
let data: Value = match serde_json::from_reader(reader) {
Ok(data) => data,
Err(_) => return None,
};
#[serde_as]
#[derive(Deserialize)]
pub struct PageMetadata {
// NOTE: Hugo taxonomy term pages contain `read_allowed": null`.
// TODO: Investigate and fix this, to remove the `serde` default which could be in conflict with the user's preference.
#[serde(default)]
#[serde_as(deserialize_as = "DefaultOnNull")]
pub read_allowed: ReadAllowed,
pub path: PathBuf,
}

#[derive(Debug, Clone, Deserialize)]
pub struct ReadAllowed(Vec<String>);

let read_allowed = data.get(READ_ALLOWED_FIELD)?;
impl Deref for ReadAllowed {
type Target = Vec<String>;

Some(serde_json::from_value(read_allowed.to_owned()).ok())
fn deref(&self) -> &Self::Target {
&self.0
}
}

// `None` if file not found
pub fn read_allowed(data_file: &PathBuf) -> Option<Vec<String>> {
_read_allowed(data_file).map(|o| o.unwrap_or(vec![DEFAULT_PROFILE.to_string()]))
impl Default for ReadAllowed {
fn default() -> Self {
Self(vec![DEFAULT_PROFILE.to_owned()])
}
}

pub fn object_key<P: AsRef<Path>>(
path: &P,
profile: &str,
) -> String {
let path = path.as_ref();
if let Some(ext) = path.extension() {
if SUFFIXED_EXTENSIONS.contains(&ext.to_str().unwrap()) {
return format!("{}@{}", path.display(), profile);
impl IntoIterator for ReadAllowed {
type Item = <<Self as Deref>::Target as IntoIterator>::Item;
type IntoIter = <<Self as Deref>::Target as IntoIterator>::IntoIter;

fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}

fn deser<T: DeserializeOwned>(file_path: &PathBuf) -> Result<Option<T>, serde_json::Error> {
let Ok(file) = File::open(file_path) else {
return Ok(None);
};
let res: T = serde_json::from_reader(file)?;
Ok(Some(res))
}

fn deser_first_match<T: DeserializeOwned>(
file_paths: Vec<PathBuf>
) -> Result<Option<T>, serde_json::Error> {
for file_path in file_paths.iter() {
trace!("Trying {}…", file_path.display());
match deser(file_path)? {
Some(res) => {
// TODO: Check if this index is always the same.
// debug!("Found page metadata in match #{i}");
return Ok(Some(res));
},
None => continue,
}
}
path.display().to_string()
Ok(None)
}

// `Ok(None)` if file not found.
// `Err(_)` if file found but deserialization error.
// `Ok(Some(_))` if file found.
pub fn page_metadata(page_relpath: &PathBuf) -> Result<Option<PageMetadata>, serde_json::Error> {
let mut file_paths = vec![
data_file_path(page_relpath),
data_file_path(&page_relpath.join("index.html")),
];
// Don't try parsing the exact path if it points to a directory.
if page_relpath.is_dir() {
file_paths.remove(0);
}
deser_first_match(file_paths)
}

pub fn copy_directory(
Expand Down
1 change: 0 additions & 1 deletion src/helpers/src/readers/mod.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
pub mod keys_reader;
pub mod object_reader;
Loading

0 comments on commit 55f8281

Please sign in to comment.