Skip to content

Commit

Permalink
Server changes
Browse files Browse the repository at this point in the history
  • Loading branch information
GPeaky committed Jul 20, 2024
1 parent 63d4135 commit bd2316a
Show file tree
Hide file tree
Showing 4 changed files with 93 additions and 77 deletions.
1 change: 0 additions & 1 deletion server/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ahash = "0.8"
once_cell = "1"
mimalloc = "0.1"
mime_guess = "2"
ntex = { version = "2", features = ["tokio"] }
Expand Down
125 changes: 71 additions & 54 deletions server/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,33 +3,33 @@ use std::{fs, io::Write, path::Path};
use ahash::AHashMap;
use brotli::{enc::BrotliEncoderParams, CompressorWriter};
use mime_guess::from_path;
use ntex::http::header::HeaderValue;
use ntex::{http::header::HeaderValue, util::Bytes};

#[derive(Clone)]
pub struct Cache {
cache: AHashMap<String, FileInfo>,
cache: &'static AHashMap<String, FileInfo>,
}

pub struct FileInfo {
pub content_type: HeaderValue,
pub content_type: Bytes,
pub is_compressed: bool,
pub data: Vec<u8>,
pub data: &'static [u8],
}

impl Cache {
pub fn new() -> Cache {
Cache {
cache: AHashMap::new(),
}
}
pub fn new(root_path: &str) -> Cache {
let mut map = AHashMap::new();

// Todo - Use parallelism to load files (Not mandatory because is only for loading files at startup)
pub fn initialize(&mut self, root_path: &str) {
let root_dir = Path::new(root_path);
let root_len = root_dir.to_str().unwrap().len();
Cache::load_files_from_dir(&mut map, root_dir, root_len);

self.load_files_from_dir(root_dir, root_len);
Cache {
cache: Box::leak(Box::new(map)),
}
}

#[inline]
pub fn get(&self, key: &str) -> Option<&FileInfo> {
let cache_key = if !self.cache.contains_key(key) {
"/index.html"
Expand All @@ -41,60 +41,77 @@ impl Cache {
}

#[inline]
fn insert_file(&mut self, path: &Path, root_len: usize) {
fn load_files_from_dir(map: &mut AHashMap<String, FileInfo>, dir: &Path, root_len: usize) {
if dir.is_dir() {
if let Ok(entries) = fs::read_dir(dir) {
for entry in entries.filter_map(Result::ok) {
let path: std::path::PathBuf = entry.path();
if path.is_dir() {
Cache::load_files_from_dir(map, &path, root_len);
} else {
Cache::insert_file(map, &path, root_len);
}
}
}
}
}

#[inline]
fn insert_file(map: &mut AHashMap<String, FileInfo>, path: &Path, root_len: usize) {
if let Ok(data) = fs::read(path) {
let mime_type = from_path(path).first_or_octet_stream().to_string();
let should_compress = matches!(
mime_type.as_str(),
"text/html"
| "text/css"
| "application/javascript"
| "application/json"
| "image/svg+xml"
);
let content_type = HeaderValue::from_str(&mime_type).unwrap();
let should_compress = Cache::should_compress(&mime_type);

let data = if should_compress {
let params = BrotliEncoderParams::default();
let mut compressed_data = Vec::new();

{
let mut writter =
CompressorWriter::with_params(&mut compressed_data, 4096, &params);
writter.write_all(&data).unwrap();
writter.flush().unwrap();
}

compressed_data
Cache::compress_data(&data)
} else {
data
Box::leak(data.into_boxed_slice())
};

let mut key = path.to_str().unwrap().to_string().replace('\\', "/");
key = key[root_len..].to_string();
let key = Cache::generate_key(path, root_len);

let file_info = FileInfo {
content_type: HeaderValue::from_str(&mime_type).unwrap(),
is_compressed: should_compress,
data,
};

self.cache.insert(key, file_info);
map.insert(
key,
FileInfo {
content_type: Bytes::copy_from_slice(content_type.as_bytes()),
data,
is_compressed: should_compress,
},
);
}
}

#[inline]
fn load_files_from_dir(&mut self, dir: &Path, root_len: usize) {
if dir.is_dir() {
if let Ok(entries) = fs::read_dir(dir) {
for entry in entries.filter_map(Result::ok) {
let path = entry.path();
if path.is_dir() {
self.load_files_from_dir(&path, root_len);
} else {
self.insert_file(&path, root_len);
}
}
}
fn should_compress(mime_type: &str) -> bool {
matches!(
mime_type,
"text/html"
| "text/css"
| "application/javascript"
| "application/json"
| "image/svg+xml"
)
}

#[inline]
fn compress_data(data: &[u8]) -> &'static [u8] {
let mut compressed_data = Vec::new();
let params = BrotliEncoderParams::default();

{
let mut writer = CompressorWriter::with_params(&mut compressed_data, 4096, &params);

writer.write_all(data).unwrap();
writer.flush().unwrap();
}

Box::leak(compressed_data.into_boxed_slice())
}

#[inline]
fn generate_key(path: &Path, root_len: usize) -> String {
let key = path.to_str().unwrap().to_string().replace('\\', "/");
key[root_len..].to_string()
}
}
43 changes: 22 additions & 21 deletions server/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,39 +2,35 @@ use cache::Cache;
use mimalloc::MiMalloc;
use ntex::{
http::header::{HeaderValue, CONTENT_ENCODING},
web::{self, App, HttpRequest, HttpResponse},
web::{self, types::State, App, HttpRequest, HttpResponse},
};
use once_cell::sync::Lazy;
use tokio::time::Instant;

mod cache;

#[global_allocator]
static GLOBAL: MiMalloc = MiMalloc;

static CACHE: Lazy<Cache> = Lazy::new(|| {
println!("Loading files into cache");

let mut cache = Cache::new();
cache.initialize("./dist");

println!("Files loaded & saved in cache");
cache
});

async fn cached_files(req: HttpRequest) -> HttpResponse {
async fn cached_files(req: HttpRequest, cache: State<Cache>) -> HttpResponse {
let path = req.path();

if let Some(file) = CACHE.get(path) {
let time = Instant::now();
if let Some(cached_file) = cache.get(path) {
let mut response = HttpResponse::Ok()
.content_type(&file.content_type)
.body(&file.data[..]);
.content_type(unsafe {
HeaderValue::from_shared_unchecked(cached_file.content_type.clone())
})
.body(cached_file.data);

if file.is_compressed {
if cached_file.is_compressed {
response
.headers_mut()
.insert(CONTENT_ENCODING, HeaderValue::from_static("br"));
}

let time = time.elapsed();
println!("Time to process request: {:#?}", time);

response
} else {
HttpResponse::NotFound().finish()
Expand All @@ -43,10 +39,15 @@ async fn cached_files(req: HttpRequest) -> HttpResponse {

#[ntex::main]
async fn main() -> std::io::Result<()> {
let cache = Cache::new("./dist");
println!("Initializing web server");

web::server(move || App::new().default_service(web::route().to(cached_files)))
.bind("0.0.0.0:5174")?
.run()
.await
web::server(move || {
App::new()
.default_service(web::route().to(cached_files))
.state(cache.clone())
})
.bind("0.0.0.0:5174")?
.run()
.await
}

0 comments on commit bd2316a

Please sign in to comment.