From 0191d3f183b4588a20f9453bbc74f7679458336c Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Thu, 9 Nov 2023 15:53:30 -0800 Subject: [PATCH 1/4] basic redis add --- src/database/models/categories.rs | 14 ++ src/database/models/collection_item.rs | 9 +- src/database/models/flow_item.rs | 6 + src/database/models/image_item.rs | 8 +- src/database/models/notification_item.rs | 6 +- src/database/models/organization_item.rs | 19 +- src/database/models/pat_item.rs | 26 ++- src/database/models/project_item.rs | 28 ++- src/database/models/session_item.rs | 25 ++- src/database/models/team_item.rs | 10 +- src/database/models/user_item.rs | 30 ++- src/database/models/version_item.rs | 23 ++- src/database/redis.rs | 152 ++++++++------- src/util/mod.rs | 1 + src/util/redis.rs | 18 ++ tests/analytics.rs | 234 +++++++++++------------ tests/project.rs | 38 ++-- 17 files changed, 402 insertions(+), 245 deletions(-) create mode 100644 src/util/redis.rs diff --git a/src/database/models/categories.rs b/src/database/models/categories.rs index abb6d7f2..961c05c8 100644 --- a/src/database/models/categories.rs +++ b/src/database/models/categories.rs @@ -102,6 +102,8 @@ impl Category { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; + let res: Option> = redis .get_deserialized_from_json(TAGS_NAMESPACE, "category") .await?; @@ -161,6 +163,8 @@ impl Loader { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; + let res: Option> = redis .get_deserialized_from_json(TAGS_NAMESPACE, "loader") .await?; @@ -240,6 +244,8 @@ impl GameVersion { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; + let res: Option> = redis .get_deserialized_from_json(TAGS_NAMESPACE, "game_version") .await?; @@ -382,6 +388,8 @@ impl DonationPlatform { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; + let res: Option> = redis .get_deserialized_from_json(TAGS_NAMESPACE, "donation_platform") .await?; @@ -436,6 +444,8 @@ impl ReportType { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; + let res: Option> = redis .get_deserialized_from_json(TAGS_NAMESPACE, "report_type") .await?; @@ -484,6 +494,8 @@ impl ProjectType { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; + let res: Option> = redis .get_deserialized_from_json(TAGS_NAMESPACE, "project_type") .await?; @@ -532,6 +544,8 @@ impl SideType { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; + let res: Option> = redis .get_deserialized_from_json(TAGS_NAMESPACE, "side_type") .await?; diff --git a/src/database/models/collection_item.rs b/src/database/models/collection_item.rs index d000e2ce..e0b17d1b 100644 --- a/src/database/models/collection_item.rs +++ b/src/database/models/collection_item.rs @@ -157,6 +157,8 @@ impl Collection { { use futures::TryStreamExt; + let mut redis = redis.connect().await?; + if collection_ids.is_empty() { return Ok(Vec::new()); } @@ -166,7 +168,10 @@ impl Collection { if !collection_ids.is_empty() { let collections = redis - .multi_get::(COLLECTIONS_NAMESPACE, collection_ids.iter().map(|x| x.0)) + .multi_get( + COLLECTIONS_NAMESPACE, + collection_ids.iter().map(|x| x.0.to_string()).collect(), + ) .await?; for collection in collections { @@ -240,6 +245,8 @@ impl Collection { } pub async fn clear_cache(id: CollectionId, redis: &RedisPool) -> Result<(), DatabaseError> { + let mut redis = redis.connect().await?; + redis.delete(COLLECTIONS_NAMESPACE, id.0).await?; Ok(()) } diff --git a/src/database/models/flow_item.rs b/src/database/models/flow_item.rs index fe81e4a8..22d30895 100644 --- a/src/database/models/flow_item.rs +++ b/src/database/models/flow_item.rs @@ -58,6 +58,8 @@ impl Flow { expires: Duration, redis: &RedisPool, ) -> Result { + let mut redis = redis.connect().await?; + let flow = ChaCha20Rng::from_entropy() .sample_iter(&Alphanumeric) .take(32) @@ -71,6 +73,8 @@ impl Flow { } pub async fn get(id: &str, redis: &RedisPool) -> Result, DatabaseError> { + let mut redis = redis.connect().await?; + redis.get_deserialized_from_json(FLOWS_NAMESPACE, id).await } @@ -91,6 +95,8 @@ impl Flow { } pub async fn remove(id: &str, redis: &RedisPool) -> Result, DatabaseError> { + let mut redis = redis.connect().await?; + redis.delete(FLOWS_NAMESPACE, id).await?; Ok(Some(())) } diff --git a/src/database/models/image_item.rs b/src/database/models/image_item.rs index 34badd65..ac33caa3 100644 --- a/src/database/models/image_item.rs +++ b/src/database/models/image_item.rs @@ -180,6 +180,7 @@ impl Image { { use futures::TryStreamExt; + let mut redis = redis.connect().await?; if image_ids.is_empty() { return Ok(Vec::new()); } @@ -191,7 +192,10 @@ impl Image { if !image_ids.is_empty() { let images = redis - .multi_get::(IMAGES_NAMESPACE, image_ids) + .multi_get( + IMAGES_NAMESPACE, + image_ids.iter().map(|x| x.to_string()).collect(), + ) .await?; for image in images { if let Some(image) = image.and_then(|x| serde_json::from_str::(&x).ok()) { @@ -246,6 +250,8 @@ impl Image { } pub async fn clear_cache(id: ImageId, redis: &RedisPool) -> Result<(), DatabaseError> { + let mut redis = redis.connect().await?; + redis.delete(IMAGES_NAMESPACE, id.0).await?; Ok(()) } diff --git a/src/database/models/notification_item.rs b/src/database/models/notification_item.rs index 2b15a4bd..2bc89fec 100644 --- a/src/database/models/notification_item.rs +++ b/src/database/models/notification_item.rs @@ -174,8 +174,10 @@ impl Notification { where E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, { + let mut redis = redis.connect().await?; + let cached_notifications: Option> = redis - .get_deserialized_from_json(USER_NOTIFICATIONS_NAMESPACE, user_id.0) + .get_deserialized_from_json(USER_NOTIFICATIONS_NAMESPACE, &user_id.0.to_string()) .await?; if let Some(notifications) = cached_notifications { @@ -319,6 +321,8 @@ impl Notification { user_ids: impl IntoIterator, redis: &RedisPool, ) -> Result<(), DatabaseError> { + let mut redis = redis.connect().await?; + redis .delete_many( user_ids diff --git a/src/database/models/organization_item.rs b/src/database/models/organization_item.rs index f92622df..b802c85f 100644 --- a/src/database/models/organization_item.rs +++ b/src/database/models/organization_item.rs @@ -103,6 +103,8 @@ impl Organization { { use futures::stream::TryStreamExt; + let mut redis = redis.connect().await?; + if organization_strings.is_empty() { return Ok(Vec::new()); } @@ -120,21 +122,26 @@ impl Organization { organization_ids.append( &mut redis - .multi_get::( + .multi_get( ORGANIZATIONS_TITLES_NAMESPACE, organization_strings .iter() - .map(|x| x.to_string().to_lowercase()), + .map(|x| x.to_string().to_lowercase()) + .collect::>(), ) .await? .into_iter() .flatten() + .filter_map(|x| x.parse::().ok()) .collect(), ); if !organization_ids.is_empty() { let organizations = redis - .multi_get::(ORGANIZATIONS_NAMESPACE, organization_ids) + .multi_get( + ORGANIZATIONS_NAMESPACE, + organization_ids.iter().map(|x| x.to_string()).collect(), + ) .await?; for organization in organizations { @@ -197,8 +204,8 @@ impl Organization { redis .set( ORGANIZATIONS_TITLES_NAMESPACE, - organization.title.to_lowercase(), - organization.id.0, + &organization.title.to_lowercase(), + &organization.id.0.to_string(), None, ) .await?; @@ -318,6 +325,8 @@ impl Organization { title: Option, redis: &RedisPool, ) -> Result<(), super::DatabaseError> { + let mut redis = redis.connect().await?; + redis .delete_many([ (ORGANIZATIONS_NAMESPACE, Some(id.0.to_string())), diff --git a/src/database/models/pat_item.rs b/src/database/models/pat_item.rs index fc2432ae..4571ca81 100644 --- a/src/database/models/pat_item.rs +++ b/src/database/models/pat_item.rs @@ -89,6 +89,8 @@ impl PersonalAccessToken { { use futures::TryStreamExt; + let mut redis = redis.connect().await?; + if pat_strings.is_empty() { return Ok(Vec::new()); } @@ -106,19 +108,23 @@ impl PersonalAccessToken { pat_ids.append( &mut redis - .multi_get::( + .multi_get( PATS_TOKENS_NAMESPACE, - pat_strings.iter().map(|x| x.to_string()), + pat_strings.iter().map(|x| x.to_string()).collect(), ) .await? .into_iter() .flatten() + .filter_map(|x| x.parse::().ok()) .collect(), ); if !pat_ids.is_empty() { let pats = redis - .multi_get::(PATS_NAMESPACE, pat_ids) + .multi_get( + PATS_NAMESPACE, + pat_ids.iter().map(|x| x.to_string()).collect(), + ) .await?; for pat in pats { if let Some(pat) = @@ -174,8 +180,8 @@ impl PersonalAccessToken { redis .set( PATS_TOKENS_NAMESPACE, - pat.access_token.clone(), - pat.id.0, + &pat.access_token, + &pat.id.0.to_string(), None, ) .await?; @@ -194,8 +200,10 @@ impl PersonalAccessToken { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; + let res = redis - .get_deserialized_from_json::, _>(PATS_USERS_NAMESPACE, user_id.0) + .get_deserialized_from_json::>(PATS_USERS_NAMESPACE, &user_id.0.to_string()) .await?; if let Some(res) = res { @@ -220,8 +228,8 @@ impl PersonalAccessToken { redis .set( PATS_USERS_NAMESPACE, - user_id.0, - serde_json::to_string(&db_pats)?, + &user_id.0.to_string(), + &serde_json::to_string(&db_pats)?, None, ) .await?; @@ -232,6 +240,8 @@ impl PersonalAccessToken { clear_pats: Vec<(Option, Option, Option)>, redis: &RedisPool, ) -> Result<(), DatabaseError> { + let mut redis = redis.connect().await?; + if clear_pats.is_empty() { return Ok(()); } diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index 365dd473..a09f32a4 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -530,6 +530,8 @@ impl Project { return Ok(Vec::new()); } + let mut redis = redis.connect().await?; + let mut found_projects = Vec::new(); let mut remaining_strings = project_strings .iter() @@ -543,19 +545,26 @@ impl Project { project_ids.append( &mut redis - .multi_get::( + .multi_get( PROJECTS_SLUGS_NAMESPACE, - project_strings.iter().map(|x| x.to_string().to_lowercase()), + project_strings + .iter() + .map(|x| x.to_string().to_lowercase()) + .collect(), ) .await? .into_iter() .flatten() + .filter_map(|x| x.parse::().ok()) .collect(), ); if !project_ids.is_empty() { let projects = redis - .multi_get::(PROJECTS_NAMESPACE, project_ids) + .multi_get( + PROJECTS_NAMESPACE, + project_ids.iter().map(|x| x.to_string()).collect(), + ) .await?; for project in projects { if let Some(project) = @@ -705,8 +714,8 @@ impl Project { redis .set( PROJECTS_SLUGS_NAMESPACE, - slug.to_lowercase(), - project.inner.id.0, + &slug.to_lowercase(), + &project.inner.id.0.to_string(), None, ) .await?; @@ -728,8 +737,13 @@ impl Project { { type Dependencies = Vec<(Option, Option, Option)>; + let mut redis = redis.connect().await?; + let dependencies = redis - .get_deserialized_from_json::(PROJECTS_DEPENDENCIES_NAMESPACE, id.0) + .get_deserialized_from_json::( + PROJECTS_DEPENDENCIES_NAMESPACE, + &id.0.to_string(), + ) .await?; if let Some(dependencies) = dependencies { return Ok(dependencies); @@ -824,6 +838,8 @@ impl Project { clear_dependencies: Option, redis: &RedisPool, ) -> Result<(), DatabaseError> { + let mut redis = redis.connect().await?; + redis .delete_many([ (PROJECTS_NAMESPACE, Some(id.0.to_string())), diff --git a/src/database/models/session_item.rs b/src/database/models/session_item.rs index ff9a874e..815cfbca 100644 --- a/src/database/models/session_item.rs +++ b/src/database/models/session_item.rs @@ -130,6 +130,8 @@ impl Session { { use futures::TryStreamExt; + let mut redis = redis.connect().await?; + if session_strings.is_empty() { return Ok(Vec::new()); } @@ -147,19 +149,23 @@ impl Session { session_ids.append( &mut redis - .multi_get::( + .multi_get( SESSIONS_IDS_NAMESPACE, - session_strings.iter().map(|x| x.to_string()), + session_strings.iter().map(|x| x.to_string()).collect(), ) .await? .into_iter() .flatten() + .filter_map(|x| x.parse::().ok()) .collect(), ); if !session_ids.is_empty() { let sessions = redis - .multi_get::(SESSIONS_NAMESPACE, session_ids) + .multi_get( + SESSIONS_NAMESPACE, + session_ids.iter().map(|x| x.to_string()).collect(), + ) .await?; for session in sessions { if let Some(session) = @@ -218,8 +224,8 @@ impl Session { redis .set( SESSIONS_IDS_NAMESPACE, - session.session.clone(), - session.id.0, + &session.session, + &session.id.0.to_string(), None, ) .await?; @@ -238,8 +244,13 @@ impl Session { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; + let res = redis - .get_deserialized_from_json::, _>(SESSIONS_USERS_NAMESPACE, user_id.0) + .get_deserialized_from_json::>( + SESSIONS_USERS_NAMESPACE, + &user_id.0.to_string(), + ) .await?; if let Some(res) = res { @@ -272,6 +283,8 @@ impl Session { clear_sessions: Vec<(Option, Option, Option)>, redis: &RedisPool, ) -> Result<(), DatabaseError> { + let mut redis = redis.connect().await?; + if clear_sessions.is_empty() { return Ok(()); } diff --git a/src/database/models/team_item.rs b/src/database/models/team_item.rs index a513aefe..41057c3e 100644 --- a/src/database/models/team_item.rs +++ b/src/database/models/team_item.rs @@ -197,18 +197,23 @@ impl TeamMember { where E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, { + use futures::stream::TryStreamExt; + if team_ids.is_empty() { return Ok(Vec::new()); } - use futures::stream::TryStreamExt; + let mut redis = redis.connect().await?; let mut team_ids_parsed: Vec = team_ids.iter().map(|x| x.0).collect(); let mut found_teams = Vec::new(); let teams = redis - .multi_get::(TEAMS_NAMESPACE, team_ids_parsed.clone()) + .multi_get( + TEAMS_NAMESPACE, + team_ids_parsed.iter().map(|x| x.to_string()).collect(), + ) .await?; for team_raw in teams { @@ -271,6 +276,7 @@ impl TeamMember { } pub async fn clear_cache(id: TeamId, redis: &RedisPool) -> Result<(), super::DatabaseError> { + let mut redis = redis.connect().await?; redis.delete(TEAMS_NAMESPACE, id.0).await?; Ok(()) } diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index 5ab27abe..7b65e487 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -134,6 +134,8 @@ impl User { { use futures::TryStreamExt; + let mut redis = redis.connect().await?; + if users_strings.is_empty() { return Ok(Vec::new()); } @@ -151,19 +153,26 @@ impl User { user_ids.append( &mut redis - .multi_get::( + .multi_get( USER_USERNAMES_NAMESPACE, - users_strings.iter().map(|x| x.to_string().to_lowercase()), + users_strings + .iter() + .map(|x| x.to_string().to_lowercase()) + .collect(), ) .await? .into_iter() .flatten() + .filter_map(|x| x.parse::().ok()) .collect(), ); if !user_ids.is_empty() { let users = redis - .multi_get::(USERS_NAMESPACE, user_ids) + .multi_get( + USERS_NAMESPACE, + user_ids.iter().map(|x| x.to_string()).collect(), + ) .await?; for user in users { if let Some(user) = user.and_then(|x| serde_json::from_str::(&x).ok()) { @@ -239,8 +248,8 @@ impl User { redis .set( USER_USERNAMES_NAMESPACE, - user.username.to_lowercase(), - user.id.0, + &user.username.to_lowercase(), + &user.id.0.to_string(), None, ) .await?; @@ -278,8 +287,13 @@ impl User { { use futures::stream::TryStreamExt; + let mut redis = redis.connect().await?; + let cached_projects = redis - .get_deserialized_from_json::, _>(USERS_PROJECTS_NAMESPACE, user_id.0) + .get_deserialized_from_json::>( + USERS_PROJECTS_NAMESPACE, + &user_id.0.to_string(), + ) .await?; if let Some(projects) = cached_projects { @@ -384,6 +398,8 @@ impl User { user_ids: &[(UserId, Option)], redis: &RedisPool, ) -> Result<(), DatabaseError> { + let mut redis = redis.connect().await?; + redis .delete_many(user_ids.iter().flat_map(|(id, username)| { [ @@ -402,6 +418,8 @@ impl User { user_ids: &[UserId], redis: &RedisPool, ) -> Result<(), DatabaseError> { + let mut redis = redis.connect().await?; + redis .delete_many( user_ids diff --git a/src/database/models/version_item.rs b/src/database/models/version_item.rs index 565e3aae..a66ae959 100644 --- a/src/database/models/version_item.rs +++ b/src/database/models/version_item.rs @@ -529,18 +529,27 @@ impl Version { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + use futures::stream::TryStreamExt; + if version_ids.is_empty() { return Ok(Vec::new()); } - use futures::stream::TryStreamExt; + let mut redis = redis.connect().await?; let mut version_ids_parsed: Vec = version_ids.iter().map(|x| x.0).collect(); let mut found_versions = Vec::new(); let versions = redis - .multi_get::(VERSIONS_NAMESPACE, version_ids_parsed.clone()) + .multi_get( + VERSIONS_NAMESPACE, + version_ids_parsed + .clone() + .iter() + .map(|x| x.to_string()) + .collect::>(), + ) .await?; for version in versions { @@ -732,18 +741,20 @@ impl Version { where E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, { + use futures::stream::TryStreamExt; + + let mut redis = redis.connect().await?; + if hashes.is_empty() { return Ok(Vec::new()); } - use futures::stream::TryStreamExt; - let mut file_ids_parsed = hashes.to_vec(); let mut found_files = Vec::new(); let files = redis - .multi_get::( + .multi_get( VERSION_FILES_NAMESPACE, file_ids_parsed .iter() @@ -841,6 +852,8 @@ impl Version { version: &QueryVersion, redis: &RedisPool, ) -> Result<(), DatabaseError> { + let mut redis = redis.connect().await?; + redis .delete_many( iter::once((VERSIONS_NAMESPACE, Some(version.inner.id.0.to_string()))).chain( diff --git a/src/database/redis.rs b/src/database/redis.rs index 2a517264..50a27796 100644 --- a/src/database/redis.rs +++ b/src/database/redis.rs @@ -1,6 +1,9 @@ +use crate::util::redis::{redis_args, redis_execute}; + use super::models::DatabaseError; use deadpool_redis::{Config, Runtime}; -use redis::{cmd, FromRedisValue, ToRedisArgs}; +use itertools::Itertools; +use redis::cmd; use std::fmt::Display; const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes @@ -11,6 +14,11 @@ pub struct RedisPool { meta_namespace: String, } +pub struct RedisConnection { + pub connection: deadpool_redis::Connection, + meta_namespace: String, +} + impl RedisPool { // initiate a new redis pool // testing pool uses a hashmap to mimic redis behaviour for very small data sizes (ie: tests) @@ -35,32 +43,39 @@ impl RedisPool { } } - pub async fn set( - &self, + pub async fn connect(&self) -> Result { + Ok(RedisConnection { + connection: self.pool.get().await?, + meta_namespace: self.meta_namespace.clone(), + }) + } +} + +impl RedisConnection { + pub async fn set( + &mut self, namespace: &str, - id: T1, - data: T2, + id: &str, + data: &str, expiry: Option, - ) -> Result<(), DatabaseError> - where - T1: Display, - T2: ToRedisArgs, - { - let mut redis_connection = self.pool.get().await?; - - cmd("SET") - .arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)) - .arg(data) - .arg("EX") - .arg(expiry.unwrap_or(DEFAULT_EXPIRY)) - .query_async::<_, ()>(&mut redis_connection) - .await?; - + ) -> Result<(), DatabaseError> { + let mut cmd = cmd("SET"); + redis_args( + &mut cmd, + vec![ + format!("{}_{}:{}", self.meta_namespace, namespace, id), + data.to_string(), + "EX".to_string(), + expiry.unwrap_or(DEFAULT_EXPIRY).to_string(), + ] + .as_slice(), + ); + redis_execute(&mut cmd, &mut self.connection).await?; Ok(()) } pub async fn set_serialized_to_json( - &self, + &mut self, namespace: &str, id: Id, data: D, @@ -70,90 +85,91 @@ impl RedisPool { Id: Display, D: serde::Serialize, { - self.set(namespace, id, serde_json::to_string(&data)?, expiry) - .await + self.set( + namespace, + &id.to_string(), + &serde_json::to_string(&data)?, + expiry, + ) + .await } - pub async fn get(&self, namespace: &str, id: Id) -> Result, DatabaseError> - where - Id: Display, - R: FromRedisValue, - { - let mut redis_connection = self.pool.get().await?; - - let res = cmd("GET") - .arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)) - .query_async::<_, Option>(&mut redis_connection) - .await?; + pub async fn get( + &mut self, + namespace: &str, + id: &str, + ) -> Result, DatabaseError> { + let mut cmd = cmd("GET"); + redis_args( + &mut cmd, + vec![format!("{}_{}:{}", self.meta_namespace, namespace, id)].as_slice(), + ); + let res = redis_execute(&mut cmd, &mut self.connection).await?; Ok(res) } - pub async fn get_deserialized_from_json( - &self, + pub async fn get_deserialized_from_json( + &mut self, namespace: &str, - id: Id, + id: &str, ) -> Result, DatabaseError> where - Id: Display, R: for<'a> serde::Deserialize<'a>, { Ok(self - .get::(namespace, id) + .get(namespace, id) .await? .and_then(|x| serde_json::from_str(&x).ok())) } - pub async fn multi_get( - &self, + pub async fn multi_get( + &mut self, namespace: &str, - ids: impl IntoIterator, - ) -> Result>, DatabaseError> - where - T1: Display, - R: FromRedisValue, - { - let mut redis_connection = self.pool.get().await?; - let res = cmd("MGET") - .arg( - ids.into_iter() - .map(|x| format!("{}_{}:{}", self.meta_namespace, namespace, x)) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis_connection) - .await?; + ids: Vec, + ) -> Result>, DatabaseError> { + let mut cmd = cmd("MGET"); + + redis_args( + &mut cmd, + &ids.into_iter() + .map(|x| format!("{}_{}:{}", self.meta_namespace, namespace, x)) + .collect_vec(), + ); + let res: Vec> = redis_execute(&mut cmd, &mut self.connection).await?; Ok(res) } - pub async fn delete(&self, namespace: &str, id: T1) -> Result<(), DatabaseError> + pub async fn delete(&mut self, namespace: &str, id: T1) -> Result<(), DatabaseError> where T1: Display, { - let mut redis_connection = self.pool.get().await?; - - cmd("DEL") - .arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)) - .query_async::<_, ()>(&mut redis_connection) - .await?; - + let mut cmd = cmd("DEL"); + redis_args( + &mut cmd, + vec![format!("{}_{}:{}", self.meta_namespace, namespace, id)].as_slice(), + ); + redis_execute(&mut cmd, &mut self.connection).await?; Ok(()) } pub async fn delete_many( - &self, + &mut self, iter: impl IntoIterator)>, ) -> Result<(), DatabaseError> { let mut cmd = cmd("DEL"); let mut any = false; for (namespace, id) in iter { if let Some(id) = id { - cmd.arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)); + redis_args( + &mut cmd, + [format!("{}_{}:{}", self.meta_namespace, namespace, id)].as_slice(), + ); any = true; } } if any { - let mut redis_connection = self.pool.get().await?; - cmd.query_async::<_, ()>(&mut redis_connection).await?; + redis_execute(&mut cmd, &mut self.connection).await?; } Ok(()) diff --git a/src/util/mod.rs b/src/util/mod.rs index 74588dd7..0068413c 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -6,6 +6,7 @@ pub mod env; pub mod ext; pub mod guards; pub mod img; +pub mod redis; pub mod routes; pub mod validate; pub mod webhook; diff --git a/src/util/redis.rs b/src/util/redis.rs new file mode 100644 index 00000000..b5d33219 --- /dev/null +++ b/src/util/redis.rs @@ -0,0 +1,18 @@ +use redis::Cmd; + +pub fn redis_args(cmd: &mut Cmd, args: &[String]) { + for arg in args { + cmd.arg(arg); + } +} + +pub async fn redis_execute( + cmd: &mut Cmd, + redis: &mut deadpool_redis::Connection, +) -> Result +where + T: redis::FromRedisValue, +{ + let res = cmd.query_async::<_, T>(redis).await?; + Ok(res) +} diff --git a/tests/analytics.rs b/tests/analytics.rs index e762566f..9bd6e380 100644 --- a/tests/analytics.rs +++ b/tests/analytics.rs @@ -1,137 +1,137 @@ -use chrono::{DateTime, Duration, Utc}; -use common::database::*; -use itertools::Itertools; -use labrinth::models::ids::base62_impl::parse_base62; -use rust_decimal::{prelude::ToPrimitive, Decimal}; +// use chrono::{DateTime, Duration, Utc}; +// use common::database::*; +// use itertools::Itertools; +// use labrinth::models::ids::base62_impl::parse_base62; +// use rust_decimal::{prelude::ToPrimitive, Decimal}; -use crate::common::environment::TestEnvironment; +// use crate::common::environment::TestEnvironment; // importing common module. mod common; #[actix_rt::test] pub async fn analytics_revenue() { - let test_env = TestEnvironment::build(None).await; - let api = &test_env.v2; + // let test_env = TestEnvironment::build(None).await; + // let api = &test_env.v2; - let alpha_project_id = test_env - .dummy - .as_ref() - .unwrap() - .project_alpha - .project_id - .clone(); + // let alpha_project_id = test_env + // .dummy + // .as_ref() + // .unwrap() + // .project_alpha + // .project_id + // .clone(); - let pool = test_env.db.pool.clone(); + // let pool = test_env.db.pool.clone(); - // Generate sample revenue data- directly insert into sql - let (mut insert_user_ids, mut insert_project_ids, mut insert_payouts, mut insert_starts) = - (Vec::new(), Vec::new(), Vec::new(), Vec::new()); + // // Generate sample revenue data- directly insert into sql + // let (mut insert_user_ids, mut insert_project_ids, mut insert_payouts, mut insert_starts) = + // (Vec::new(), Vec::new(), Vec::new(), Vec::new()); - // Note: these go from most recent to least recent - let money_time_pairs: [(f64, DateTime); 10] = [ - (50.0, Utc::now() - Duration::minutes(5)), - (50.1, Utc::now() - Duration::minutes(10)), - (101.0, Utc::now() - Duration::days(1)), - (200.0, Utc::now() - Duration::days(2)), - (311.0, Utc::now() - Duration::days(3)), - (400.0, Utc::now() - Duration::days(4)), - (526.0, Utc::now() - Duration::days(5)), - (633.0, Utc::now() - Duration::days(6)), - (800.0, Utc::now() - Duration::days(14)), - (800.0, Utc::now() - Duration::days(800)), - ]; + // // Note: these go from most recent to least recent + // let money_time_pairs: [(f64, DateTime); 10] = [ + // (50.0, Utc::now() - Duration::minutes(5)), + // (50.1, Utc::now() - Duration::minutes(10)), + // (101.0, Utc::now() - Duration::days(1)), + // (200.0, Utc::now() - Duration::days(2)), + // (311.0, Utc::now() - Duration::days(3)), + // (400.0, Utc::now() - Duration::days(4)), + // (526.0, Utc::now() - Duration::days(5)), + // (633.0, Utc::now() - Duration::days(6)), + // (800.0, Utc::now() - Duration::days(14)), + // (800.0, Utc::now() - Duration::days(800)), + // ]; - let project_id = parse_base62(&alpha_project_id).unwrap() as i64; - for (money, time) in money_time_pairs.iter() { - insert_user_ids.push(USER_USER_ID_PARSED); - insert_project_ids.push(project_id); - insert_payouts.push(Decimal::from_f64_retain(*money).unwrap()); - insert_starts.push(*time); - } + // let project_id = parse_base62(&alpha_project_id).unwrap() as i64; + // for (money, time) in money_time_pairs.iter() { + // insert_user_ids.push(USER_USER_ID_PARSED); + // insert_project_ids.push(project_id); + // insert_payouts.push(Decimal::from_f64_retain(*money).unwrap()); + // insert_starts.push(*time); + // } - sqlx::query!( - " - INSERT INTO payouts_values (user_id, mod_id, amount, created) - SELECT * FROM UNNEST ($1::bigint[], $2::bigint[], $3::numeric[], $4::timestamptz[]) - ", - &insert_user_ids[..], - &insert_project_ids[..], - &insert_payouts[..], - &insert_starts[..] - ) - .execute(&pool) - .await - .unwrap(); + // sqlx::query!( + // " + // INSERT INTO payouts_values (user_id, mod_id, amount, created) + // SELECT * FROM UNNEST ($1::bigint[], $2::bigint[], $3::numeric[], $4::timestamptz[]) + // ", + // &insert_user_ids[..], + // &insert_project_ids[..], + // &insert_payouts[..], + // &insert_starts[..] + // ) + // .execute(&pool) + // .await + // .unwrap(); - let day = 86400; + // let day = 86400; - // Test analytics endpoint with default values - // - all time points in the last 2 weeks - // - 1 day resolution - let analytics = api - .get_analytics_revenue_deserialized( - vec![&alpha_project_id], - None, - None, - None, - USER_USER_PAT, - ) - .await; - assert_eq!(analytics.len(), 1); // 1 project - let project_analytics = analytics.get(&alpha_project_id).unwrap(); - assert_eq!(project_analytics.len(), 8); // 1 days cut off, and 2 points take place on the same day. note that the day exactly 14 days ago is included - // sorted_by_key, values in the order of smallest to largest key - let (sorted_keys, sorted_by_key): (Vec, Vec) = project_analytics - .iter() - .sorted_by_key(|(k, _)| *k) - .rev() - .unzip(); - assert_eq!( - vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0], - to_f64_vec_rounded_up(sorted_by_key) - ); - // Ensure that the keys are in multiples of 1 day - for k in sorted_keys { - assert_eq!(k % day, 0); - } + // // Test analytics endpoint with default values + // // - all time points in the last 2 weeks + // // - 1 day resolution + // let analytics = api + // .get_analytics_revenue_deserialized( + // vec![&alpha_project_id], + // None, + // None, + // None, + // USER_USER_PAT, + // ) + // .await; + // assert_eq!(analytics.len(), 1); // 1 project + // let project_analytics = analytics.get(&alpha_project_id).unwrap(); + // assert_eq!(project_analytics.len(), 8); // 1 days cut off, and 2 points take place on the same day. note that the day exactly 14 days ago is included + // // sorted_by_key, values in the order of smallest to largest key + // let (sorted_keys, sorted_by_key): (Vec, Vec) = project_analytics + // .iter() + // .sorted_by_key(|(k, _)| *k) + // .rev() + // .unzip(); + // assert_eq!( + // vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0], + // to_f64_vec_rounded_up(sorted_by_key) + // ); + // // Ensure that the keys are in multiples of 1 day + // for k in sorted_keys { + // assert_eq!(k % day, 0); + // } - // Test analytics with last 900 days to include all data - // keep resolution at default - let analytics = api - .get_analytics_revenue_deserialized( - vec![&alpha_project_id], - Some(Utc::now() - Duration::days(801)), - None, - None, - USER_USER_PAT, - ) - .await; - let project_analytics = analytics.get(&alpha_project_id).unwrap(); - assert_eq!(project_analytics.len(), 9); // and 2 points take place on the same day - let (sorted_keys, sorted_by_key): (Vec, Vec) = project_analytics - .iter() - .sorted_by_key(|(k, _)| *k) - .rev() - .unzip(); - assert_eq!( - vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0, 800.0], - to_f64_vec_rounded_up(sorted_by_key) - ); - for k in sorted_keys { - assert_eq!(k % day, 0); - } + // // Test analytics with last 900 days to include all data + // // keep resolution at default + // let analytics = api + // .get_analytics_revenue_deserialized( + // vec![&alpha_project_id], + // Some(Utc::now() - Duration::days(801)), + // None, + // None, + // USER_USER_PAT, + // ) + // .await; + // let project_analytics = analytics.get(&alpha_project_id).unwrap(); + // assert_eq!(project_analytics.len(), 9); // and 2 points take place on the same day + // let (sorted_keys, sorted_by_key): (Vec, Vec) = project_analytics + // .iter() + // .sorted_by_key(|(k, _)| *k) + // .rev() + // .unzip(); + // assert_eq!( + // vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0, 800.0], + // to_f64_vec_rounded_up(sorted_by_key) + // ); + // for k in sorted_keys { + // assert_eq!(k % day, 0); + // } - // Cleanup test db - test_env.cleanup().await; + // // Cleanup test db + // test_env.cleanup().await; } -fn to_f64_rounded_up(d: Decimal) -> f64 { - d.round_dp_with_strategy(1, rust_decimal::RoundingStrategy::MidpointAwayFromZero) - .to_f64() - .unwrap() -} +// fn to_f64_rounded_up(d: Decimal) -> f64 { +// d.round_dp_with_strategy(1, rust_decimal::RoundingStrategy::MidpointAwayFromZero) +// .to_f64() +// .unwrap() +// } -fn to_f64_vec_rounded_up(d: Vec) -> Vec { - d.into_iter().map(to_f64_rounded_up).collect_vec() -} +// fn to_f64_vec_rounded_up(d: Vec) -> Vec { +// d.into_iter().map(to_f64_rounded_up).collect_vec() +// } diff --git a/tests/project.rs b/tests/project.rs index 2a34dae1..a2ec7fb1 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -44,20 +44,21 @@ async fn test_get_project() { assert_eq!(versions[0], json!(alpha_version_id)); // Confirm that the request was cached + let mut redis_pool = test_env.db.redis_pool.connect().await.unwrap(); assert_eq!( - test_env - .db - .redis_pool - .get::(PROJECTS_SLUGS_NAMESPACE, alpha_project_slug) + redis_pool + .get(PROJECTS_SLUGS_NAMESPACE, alpha_project_slug) .await - .unwrap(), + .unwrap() + .and_then(|x| x.parse::().ok()), Some(parse_base62(alpha_project_id).unwrap() as i64) ); - let cached_project = test_env - .db - .redis_pool - .get::(PROJECTS_NAMESPACE, parse_base62(alpha_project_id).unwrap()) + let cached_project = redis_pool + .get( + PROJECTS_NAMESPACE, + &parse_base62(alpha_project_id).unwrap().to_string(), + ) .await .unwrap() .unwrap(); @@ -256,22 +257,21 @@ async fn test_add_remove_project() { assert_eq!(resp.status(), 204); // Confirm that the project is gone from the cache + let mut redis_pool = test_env.db.redis_pool.connect().await.unwrap(); assert_eq!( - test_env - .db - .redis_pool - .get::(PROJECTS_SLUGS_NAMESPACE, "demo") + redis_pool + .get(PROJECTS_SLUGS_NAMESPACE, "demo") .await - .unwrap(), + .unwrap() + .and_then(|x| x.parse::().ok()), None ); assert_eq!( - test_env - .db - .redis_pool - .get::(PROJECTS_SLUGS_NAMESPACE, id) + redis_pool + .get(PROJECTS_SLUGS_NAMESPACE, &id) .await - .unwrap(), + .unwrap() + .and_then(|x| x.parse::().ok()), None ); From fb6a92696b313bb01233fb92b9cacf0dabf61525 Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Thu, 9 Nov 2023 18:37:34 -0800 Subject: [PATCH 2/4] toml; reverted unnecessary changes --- Cargo.toml | 6 +++++ src/database/models/collection_item.rs | 4 +-- src/database/models/image_item.rs | 4 +-- src/database/models/organization_item.rs | 7 +++-- src/database/models/pat_item.rs | 9 +++---- src/database/models/project_item.rs | 10 +++---- src/database/models/session_item.rs | 9 +++---- src/database/models/team_item.rs | 4 +-- src/database/models/user_item.rs | 10 +++---- src/database/models/version_item.rs | 4 +-- src/database/redis.rs | 34 +++++++++++++++++++----- src/models/images.rs | 7 +++++ 12 files changed, 67 insertions(+), 41 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index bb38733c..040ed0c2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -109,3 +109,9 @@ derive-new = "0.5.9" [dev-dependencies] actix-http = "3.4.0" + +[profile.dev] +opt-level = 0 # Minimal optimization, speeds up compilation +lto = false # Disables Link Time Optimization +incremental = true # Enables incremental compilation +codegen-units = 16 # Higher number can improve compile times but reduce runtime performance diff --git a/src/database/models/collection_item.rs b/src/database/models/collection_item.rs index e0b17d1b..4a4f7424 100644 --- a/src/database/models/collection_item.rs +++ b/src/database/models/collection_item.rs @@ -168,9 +168,9 @@ impl Collection { if !collection_ids.is_empty() { let collections = redis - .multi_get( + .multi_get::( COLLECTIONS_NAMESPACE, - collection_ids.iter().map(|x| x.0.to_string()).collect(), + collection_ids.iter().map(|x| x.0.to_string()), ) .await?; diff --git a/src/database/models/image_item.rs b/src/database/models/image_item.rs index ac33caa3..28679eb2 100644 --- a/src/database/models/image_item.rs +++ b/src/database/models/image_item.rs @@ -192,9 +192,9 @@ impl Image { if !image_ids.is_empty() { let images = redis - .multi_get( + .multi_get::( IMAGES_NAMESPACE, - image_ids.iter().map(|x| x.to_string()).collect(), + image_ids.iter().map(|x| x.to_string()), ) .await?; for image in images { diff --git a/src/database/models/organization_item.rs b/src/database/models/organization_item.rs index b802c85f..137d7ae0 100644 --- a/src/database/models/organization_item.rs +++ b/src/database/models/organization_item.rs @@ -122,7 +122,7 @@ impl Organization { organization_ids.append( &mut redis - .multi_get( + .multi_get::( ORGANIZATIONS_TITLES_NAMESPACE, organization_strings .iter() @@ -132,15 +132,14 @@ impl Organization { .await? .into_iter() .flatten() - .filter_map(|x| x.parse::().ok()) .collect(), ); if !organization_ids.is_empty() { let organizations = redis - .multi_get( + .multi_get::( ORGANIZATIONS_NAMESPACE, - organization_ids.iter().map(|x| x.to_string()).collect(), + organization_ids.iter().map(|x| x.to_string()), ) .await?; diff --git a/src/database/models/pat_item.rs b/src/database/models/pat_item.rs index 4571ca81..bcab326d 100644 --- a/src/database/models/pat_item.rs +++ b/src/database/models/pat_item.rs @@ -108,22 +108,21 @@ impl PersonalAccessToken { pat_ids.append( &mut redis - .multi_get( + .multi_get::( PATS_TOKENS_NAMESPACE, - pat_strings.iter().map(|x| x.to_string()).collect(), + pat_strings.iter().map(|x| x.to_string()), ) .await? .into_iter() .flatten() - .filter_map(|x| x.parse::().ok()) .collect(), ); if !pat_ids.is_empty() { let pats = redis - .multi_get( + .multi_get::( PATS_NAMESPACE, - pat_ids.iter().map(|x| x.to_string()).collect(), + pat_ids.iter().map(|x| x.to_string()), ) .await?; for pat in pats { diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index a09f32a4..061904f0 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -545,25 +545,23 @@ impl Project { project_ids.append( &mut redis - .multi_get( + .multi_get::( PROJECTS_SLUGS_NAMESPACE, project_strings .iter() - .map(|x| x.to_string().to_lowercase()) - .collect(), + .map(|x| x.to_string().to_lowercase()), ) .await? .into_iter() .flatten() - .filter_map(|x| x.parse::().ok()) .collect(), ); if !project_ids.is_empty() { let projects = redis - .multi_get( + .multi_get::( PROJECTS_NAMESPACE, - project_ids.iter().map(|x| x.to_string()).collect(), + project_ids.iter().map(|x| x.to_string()), ) .await?; for project in projects { diff --git a/src/database/models/session_item.rs b/src/database/models/session_item.rs index 815cfbca..f27af5bb 100644 --- a/src/database/models/session_item.rs +++ b/src/database/models/session_item.rs @@ -149,22 +149,21 @@ impl Session { session_ids.append( &mut redis - .multi_get( + .multi_get::( SESSIONS_IDS_NAMESPACE, - session_strings.iter().map(|x| x.to_string()).collect(), + session_strings.iter().map(|x| x.to_string()), ) .await? .into_iter() .flatten() - .filter_map(|x| x.parse::().ok()) .collect(), ); if !session_ids.is_empty() { let sessions = redis - .multi_get( + .multi_get::( SESSIONS_NAMESPACE, - session_ids.iter().map(|x| x.to_string()).collect(), + session_ids.iter().map(|x| x.to_string()), ) .await?; for session in sessions { diff --git a/src/database/models/team_item.rs b/src/database/models/team_item.rs index 41057c3e..a0a92f70 100644 --- a/src/database/models/team_item.rs +++ b/src/database/models/team_item.rs @@ -210,9 +210,9 @@ impl TeamMember { let mut found_teams = Vec::new(); let teams = redis - .multi_get( + .multi_get::( TEAMS_NAMESPACE, - team_ids_parsed.iter().map(|x| x.to_string()).collect(), + team_ids_parsed.iter().map(|x| x.to_string()), ) .await?; diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index 7b65e487..322e7fb1 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -153,25 +153,23 @@ impl User { user_ids.append( &mut redis - .multi_get( + .multi_get::( USER_USERNAMES_NAMESPACE, users_strings .iter() - .map(|x| x.to_string().to_lowercase()) - .collect(), + .map(|x| x.to_string().to_lowercase()), ) .await? .into_iter() .flatten() - .filter_map(|x| x.parse::().ok()) .collect(), ); if !user_ids.is_empty() { let users = redis - .multi_get( + .multi_get::( USERS_NAMESPACE, - user_ids.iter().map(|x| x.to_string()).collect(), + user_ids.iter().map(|x| x.to_string()), ) .await?; for user in users { diff --git a/src/database/models/version_item.rs b/src/database/models/version_item.rs index a66ae959..c9d067c2 100644 --- a/src/database/models/version_item.rs +++ b/src/database/models/version_item.rs @@ -542,7 +542,7 @@ impl Version { let mut found_versions = Vec::new(); let versions = redis - .multi_get( + .multi_get::( VERSIONS_NAMESPACE, version_ids_parsed .clone() @@ -754,7 +754,7 @@ impl Version { let mut found_files = Vec::new(); let files = redis - .multi_get( + .multi_get::( VERSION_FILES_NAMESPACE, file_ids_parsed .iter() diff --git a/src/database/redis.rs b/src/database/redis.rs index 50a27796..a2fd82a7 100644 --- a/src/database/redis.rs +++ b/src/database/redis.rs @@ -1,9 +1,7 @@ -use crate::util::redis::{redis_args, redis_execute}; - use super::models::DatabaseError; use deadpool_redis::{Config, Runtime}; use itertools::Itertools; -use redis::cmd; +use redis::{cmd, Cmd}; use std::fmt::Display; const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes @@ -122,11 +120,14 @@ impl RedisConnection { .and_then(|x| serde_json::from_str(&x).ok())) } - pub async fn multi_get( + pub async fn multi_get( &mut self, namespace: &str, - ids: Vec, - ) -> Result>, DatabaseError> { + ids: impl IntoIterator, + ) -> Result>, DatabaseError> + where + R: for<'a> serde::Deserialize<'a>, + { let mut cmd = cmd("MGET"); redis_args( @@ -136,7 +137,9 @@ impl RedisConnection { .collect_vec(), ); let res: Vec> = redis_execute(&mut cmd, &mut self.connection).await?; - Ok(res) + Ok(res.into_iter() + .map(|x| x.and_then(|x| serde_json::from_str(&x).ok())) + .collect()) } pub async fn delete(&mut self, namespace: &str, id: T1) -> Result<(), DatabaseError> @@ -175,3 +178,20 @@ impl RedisConnection { Ok(()) } } + +pub fn redis_args(cmd: &mut Cmd, args: &[String]) { + for arg in args { + cmd.arg(arg); + } +} + +pub async fn redis_execute( + cmd: &mut Cmd, + redis: &mut deadpool_redis::Connection, +) -> Result +where + T: redis::FromRedisValue, +{ + let res = cmd.query_async::<_, T>(redis).await?; + Ok(res) +} diff --git a/src/models/images.rs b/src/models/images.rs index 1cff0481..f2f93d24 100644 --- a/src/models/images.rs +++ b/src/models/images.rs @@ -13,6 +13,13 @@ use serde::{Deserialize, Serialize}; #[serde(into = "Base62Id")] pub struct ImageId(pub u64); + + + + + + + #[derive(Serialize, Deserialize)] pub struct Image { pub id: ImageId, From a6a410a646a10395f9bd5a5e74716884ffb7813a Mon Sep 17 00:00:00 2001 From: thesuzerain Date: Thu, 16 Nov 2023 16:22:18 -0800 Subject: [PATCH 3/4] merge issues --- src/database/models/image_item.rs | 5 +- src/database/models/loader_fields.rs | 12 +- src/database/models/pat_item.rs | 5 +- src/database/models/project_item.rs | 4 +- src/database/models/user_item.rs | 9 +- src/database/redis.rs | 7 +- src/models/v3/images.rs | 7 - tests/analytics.rs | 218 +++++++++++++-------------- tests/v2/project.rs | 19 ++- tests/version.rs | 10 +- 10 files changed, 143 insertions(+), 153 deletions(-) diff --git a/src/database/models/image_item.rs b/src/database/models/image_item.rs index 28679eb2..68477304 100644 --- a/src/database/models/image_item.rs +++ b/src/database/models/image_item.rs @@ -192,10 +192,7 @@ impl Image { if !image_ids.is_empty() { let images = redis - .multi_get::( - IMAGES_NAMESPACE, - image_ids.iter().map(|x| x.to_string()), - ) + .multi_get::(IMAGES_NAMESPACE, image_ids.iter().map(|x| x.to_string())) .await?; for image in images { if let Some(image) = image.and_then(|x| serde_json::from_str::(&x).ok()) { diff --git a/src/database/models/loader_fields.rs b/src/database/models/loader_fields.rs index a4b101c3..63e1d6fd 100644 --- a/src/database/models/loader_fields.rs +++ b/src/database/models/loader_fields.rs @@ -44,6 +44,7 @@ impl Game { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; let cached_games: Option> = redis .get_deserialized_from_json(GAMES_LIST_NAMESPACE, "games") .await?; @@ -95,6 +96,7 @@ impl Loader { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; let cached_id: Option = redis.get_deserialized_from_json(LOADER_ID, name).await?; if let Some(cached_id) = cached_id { return Ok(Some(LoaderId(cached_id))); @@ -124,6 +126,7 @@ impl Loader { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; let cached_loaders: Option> = redis .get_deserialized_from_json(LOADERS_LIST_NAMESPACE, "all") .await?; @@ -318,9 +321,11 @@ impl LoaderField { { type RedisLoaderFieldTuple = (LoaderId, Vec); + let mut redis = redis.connect().await?; + let mut loader_ids = loader_ids.to_vec(); let cached_fields: Vec = redis - .multi_get::(LOADER_FIELDS_NAMESPACE, loader_ids.iter().map(|x| x.0)) + .multi_get::(LOADER_FIELDS_NAMESPACE, loader_ids.iter().map(|x| x.0)) .await? .into_iter() .flatten() @@ -399,6 +404,8 @@ impl LoaderFieldEnum { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; + let cached_enum = redis .get_deserialized_from_json(LOADER_FIELD_ENUMS_ID_NAMESPACE, enum_name) .await?; @@ -488,12 +495,13 @@ impl LoaderFieldEnumValue { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let mut redis = redis.connect().await?; let mut found_enums = Vec::new(); let mut remaining_enums: Vec = loader_field_enum_ids.to_vec(); if !remaining_enums.is_empty() { let enums = redis - .multi_get::( + .multi_get::( LOADER_FIELD_ENUM_VALUES_NAMESPACE, loader_field_enum_ids.iter().map(|x| x.0), ) diff --git a/src/database/models/pat_item.rs b/src/database/models/pat_item.rs index bcab326d..9352d637 100644 --- a/src/database/models/pat_item.rs +++ b/src/database/models/pat_item.rs @@ -120,10 +120,7 @@ impl PersonalAccessToken { if !pat_ids.is_empty() { let pats = redis - .multi_get::( - PATS_NAMESPACE, - pat_ids.iter().map(|x| x.to_string()), - ) + .multi_get::(PATS_NAMESPACE, pat_ids.iter().map(|x| x.to_string())) .await?; for pat in pats { if let Some(pat) = diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index 6e554fe3..6be0f01b 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -530,9 +530,7 @@ impl Project { &mut redis .multi_get::( PROJECTS_SLUGS_NAMESPACE, - project_strings - .iter() - .map(|x| x.to_string().to_lowercase()), + project_strings.iter().map(|x| x.to_string().to_lowercase()), ) .await? .into_iter() diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index 322e7fb1..8230ff58 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -155,9 +155,7 @@ impl User { &mut redis .multi_get::( USER_USERNAMES_NAMESPACE, - users_strings - .iter() - .map(|x| x.to_string().to_lowercase()), + users_strings.iter().map(|x| x.to_string().to_lowercase()), ) .await? .into_iter() @@ -167,10 +165,7 @@ impl User { if !user_ids.is_empty() { let users = redis - .multi_get::( - USERS_NAMESPACE, - user_ids.iter().map(|x| x.to_string()), - ) + .multi_get::(USERS_NAMESPACE, user_ids.iter().map(|x| x.to_string())) .await?; for user in users { if let Some(user) = user.and_then(|x| serde_json::from_str::(&x).ok()) { diff --git a/src/database/redis.rs b/src/database/redis.rs index a2fd82a7..f121e3e9 100644 --- a/src/database/redis.rs +++ b/src/database/redis.rs @@ -124,8 +124,8 @@ impl RedisConnection { &mut self, namespace: &str, ids: impl IntoIterator, - ) -> Result>, DatabaseError> - where + ) -> Result>, DatabaseError> + where R: for<'a> serde::Deserialize<'a>, { let mut cmd = cmd("MGET"); @@ -137,7 +137,8 @@ impl RedisConnection { .collect_vec(), ); let res: Vec> = redis_execute(&mut cmd, &mut self.connection).await?; - Ok(res.into_iter() + Ok(res + .into_iter() .map(|x| x.and_then(|x| serde_json::from_str(&x).ok())) .collect()) } diff --git a/src/models/v3/images.rs b/src/models/v3/images.rs index f2f93d24..1cff0481 100644 --- a/src/models/v3/images.rs +++ b/src/models/v3/images.rs @@ -13,13 +13,6 @@ use serde::{Deserialize, Serialize}; #[serde(into = "Base62Id")] pub struct ImageId(pub u64); - - - - - - - #[derive(Serialize, Deserialize)] pub struct Image { pub id: ImageId, diff --git a/tests/analytics.rs b/tests/analytics.rs index 84466d5a..bc3d80d4 100644 --- a/tests/analytics.rs +++ b/tests/analytics.rs @@ -12,124 +12,124 @@ pub async fn analytics_revenue() { let test_env = TestEnvironment::build(None).await; let api = &test_env.v3; - // let alpha_project_id = test_env - // .dummy - // .as_ref() - // .unwrap() - // .project_alpha - // .project_id - // .clone(); + let alpha_project_id = test_env + .dummy + .as_ref() + .unwrap() + .project_alpha + .project_id + .clone(); - // let pool = test_env.db.pool.clone(); + let pool = test_env.db.pool.clone(); - // // Generate sample revenue data- directly insert into sql - // let (mut insert_user_ids, mut insert_project_ids, mut insert_payouts, mut insert_starts) = - // (Vec::new(), Vec::new(), Vec::new(), Vec::new()); + // Generate sample revenue data- directly insert into sql + let (mut insert_user_ids, mut insert_project_ids, mut insert_payouts, mut insert_starts) = + (Vec::new(), Vec::new(), Vec::new(), Vec::new()); - // // Note: these go from most recent to least recent - // let money_time_pairs: [(f64, DateTime); 10] = [ - // (50.0, Utc::now() - Duration::minutes(5)), - // (50.1, Utc::now() - Duration::minutes(10)), - // (101.0, Utc::now() - Duration::days(1)), - // (200.0, Utc::now() - Duration::days(2)), - // (311.0, Utc::now() - Duration::days(3)), - // (400.0, Utc::now() - Duration::days(4)), - // (526.0, Utc::now() - Duration::days(5)), - // (633.0, Utc::now() - Duration::days(6)), - // (800.0, Utc::now() - Duration::days(14)), - // (800.0, Utc::now() - Duration::days(800)), - // ]; + // Note: these go from most recent to least recent + let money_time_pairs: [(f64, DateTime); 10] = [ + (50.0, Utc::now() - Duration::minutes(5)), + (50.1, Utc::now() - Duration::minutes(10)), + (101.0, Utc::now() - Duration::days(1)), + (200.0, Utc::now() - Duration::days(2)), + (311.0, Utc::now() - Duration::days(3)), + (400.0, Utc::now() - Duration::days(4)), + (526.0, Utc::now() - Duration::days(5)), + (633.0, Utc::now() - Duration::days(6)), + (800.0, Utc::now() - Duration::days(14)), + (800.0, Utc::now() - Duration::days(800)), + ]; - // let project_id = parse_base62(&alpha_project_id).unwrap() as i64; - // for (money, time) in money_time_pairs.iter() { - // insert_user_ids.push(USER_USER_ID_PARSED); - // insert_project_ids.push(project_id); - // insert_payouts.push(Decimal::from_f64_retain(*money).unwrap()); - // insert_starts.push(*time); - // } + let project_id = parse_base62(&alpha_project_id).unwrap() as i64; + for (money, time) in money_time_pairs.iter() { + insert_user_ids.push(USER_USER_ID_PARSED); + insert_project_ids.push(project_id); + insert_payouts.push(Decimal::from_f64_retain(*money).unwrap()); + insert_starts.push(*time); + } - // sqlx::query!( - // " - // INSERT INTO payouts_values (user_id, mod_id, amount, created) - // SELECT * FROM UNNEST ($1::bigint[], $2::bigint[], $3::numeric[], $4::timestamptz[]) - // ", - // &insert_user_ids[..], - // &insert_project_ids[..], - // &insert_payouts[..], - // &insert_starts[..] - // ) - // .execute(&pool) - // .await - // .unwrap(); + sqlx::query!( + " + INSERT INTO payouts_values (user_id, mod_id, amount, created) + SELECT * FROM UNNEST ($1::bigint[], $2::bigint[], $3::numeric[], $4::timestamptz[]) + ", + &insert_user_ids[..], + &insert_project_ids[..], + &insert_payouts[..], + &insert_starts[..] + ) + .execute(&pool) + .await + .unwrap(); - // let day = 86400; + let day = 86400; - // // Test analytics endpoint with default values - // // - all time points in the last 2 weeks - // // - 1 day resolution - // let analytics = api - // .get_analytics_revenue_deserialized( - // vec![&alpha_project_id], - // None, - // None, - // None, - // USER_USER_PAT, - // ) - // .await; - // assert_eq!(analytics.len(), 1); // 1 project - // let project_analytics = analytics.get(&alpha_project_id).unwrap(); - // assert_eq!(project_analytics.len(), 8); // 1 days cut off, and 2 points take place on the same day. note that the day exactly 14 days ago is included - // // sorted_by_key, values in the order of smallest to largest key - // let (sorted_keys, sorted_by_key): (Vec, Vec) = project_analytics - // .iter() - // .sorted_by_key(|(k, _)| *k) - // .rev() - // .unzip(); - // assert_eq!( - // vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0], - // to_f64_vec_rounded_up(sorted_by_key) - // ); - // // Ensure that the keys are in multiples of 1 day - // for k in sorted_keys { - // assert_eq!(k % day, 0); - // } + // Test analytics endpoint with default values + // - all time points in the last 2 weeks + // - 1 day resolution + let analytics = api + .get_analytics_revenue_deserialized( + vec![&alpha_project_id], + None, + None, + None, + USER_USER_PAT, + ) + .await; + assert_eq!(analytics.len(), 1); // 1 project + let project_analytics = analytics.get(&alpha_project_id).unwrap(); + assert_eq!(project_analytics.len(), 8); // 1 days cut off, and 2 points take place on the same day. note that the day exactly 14 days ago is included + // sorted_by_key, values in the order of smallest to largest key + let (sorted_keys, sorted_by_key): (Vec, Vec) = project_analytics + .iter() + .sorted_by_key(|(k, _)| *k) + .rev() + .unzip(); + assert_eq!( + vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0], + to_f64_vec_rounded_up(sorted_by_key) + ); + // Ensure that the keys are in multiples of 1 day + for k in sorted_keys { + assert_eq!(k % day, 0); + } - // // Test analytics with last 900 days to include all data - // // keep resolution at default - // let analytics = api - // .get_analytics_revenue_deserialized( - // vec![&alpha_project_id], - // Some(Utc::now() - Duration::days(801)), - // None, - // None, - // USER_USER_PAT, - // ) - // .await; - // let project_analytics = analytics.get(&alpha_project_id).unwrap(); - // assert_eq!(project_analytics.len(), 9); // and 2 points take place on the same day - // let (sorted_keys, sorted_by_key): (Vec, Vec) = project_analytics - // .iter() - // .sorted_by_key(|(k, _)| *k) - // .rev() - // .unzip(); - // assert_eq!( - // vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0, 800.0], - // to_f64_vec_rounded_up(sorted_by_key) - // ); - // for k in sorted_keys { - // assert_eq!(k % day, 0); - // } + // Test analytics with last 900 days to include all data + // keep resolution at default + let analytics = api + .get_analytics_revenue_deserialized( + vec![&alpha_project_id], + Some(Utc::now() - Duration::days(801)), + None, + None, + USER_USER_PAT, + ) + .await; + let project_analytics = analytics.get(&alpha_project_id).unwrap(); + assert_eq!(project_analytics.len(), 9); // and 2 points take place on the same day + let (sorted_keys, sorted_by_key): (Vec, Vec) = project_analytics + .iter() + .sorted_by_key(|(k, _)| *k) + .rev() + .unzip(); + assert_eq!( + vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0, 800.0], + to_f64_vec_rounded_up(sorted_by_key) + ); + for k in sorted_keys { + assert_eq!(k % day, 0); + } - // // Cleanup test db - // test_env.cleanup().await; + // Cleanup test db + test_env.cleanup().await; } -// fn to_f64_rounded_up(d: Decimal) -> f64 { -// d.round_dp_with_strategy(1, rust_decimal::RoundingStrategy::MidpointAwayFromZero) -// .to_f64() -// .unwrap() -// } +fn to_f64_rounded_up(d: Decimal) -> f64 { + d.round_dp_with_strategy(1, rust_decimal::RoundingStrategy::MidpointAwayFromZero) + .to_f64() + .unwrap() +} -// fn to_f64_vec_rounded_up(d: Vec) -> Vec { -// d.into_iter().map(to_f64_rounded_up).collect_vec() -// } +fn to_f64_vec_rounded_up(d: Vec) -> Vec { + d.into_iter().map(to_f64_rounded_up).collect_vec() +} diff --git a/tests/v2/project.rs b/tests/v2/project.rs index 609b8481..7e56d3a6 100644 --- a/tests/v2/project.rs +++ b/tests/v2/project.rs @@ -221,22 +221,21 @@ async fn test_add_remove_project() { assert_eq!(resp.status(), 204); // Confirm that the project is gone from the cache + let mut redis_conn = test_env.db.redis_pool.connect().await.unwrap(); assert_eq!( - test_env - .db - .redis_pool - .get::(PROJECTS_SLUGS_NAMESPACE, "demo") + redis_conn + .get(PROJECTS_SLUGS_NAMESPACE, "demo") .await - .unwrap(), + .unwrap() + .map(|x| x.parse::().unwrap()), None ); assert_eq!( - test_env - .db - .redis_pool - .get::(PROJECTS_SLUGS_NAMESPACE, id) + redis_conn + .get(PROJECTS_SLUGS_NAMESPACE, &id) .await - .unwrap(), + .unwrap() + .map(|x| x.parse::().unwrap()), None ); diff --git a/tests/version.rs b/tests/version.rs index 665cbf58..57c5c710 100644 --- a/tests/version.rs +++ b/tests/version.rs @@ -33,10 +33,12 @@ async fn test_get_version() { assert_eq!(&version.project_id.to_string(), alpha_project_id); assert_eq!(&version.id.to_string(), alpha_version_id); - let cached_project = test_env - .db - .redis_pool - .get::(VERSIONS_NAMESPACE, parse_base62(alpha_version_id).unwrap()) + let mut redis_conn = test_env.db.redis_pool.connect().await.unwrap(); + let cached_project = redis_conn + .get( + VERSIONS_NAMESPACE, + &parse_base62(alpha_version_id).unwrap().to_string(), + ) .await .unwrap() .unwrap(); From ee7b2c0574cf941386b409baa396fcd6d8641135 Mon Sep 17 00:00:00 2001 From: thesuzerain Date: Thu, 16 Nov 2023 22:38:55 -0800 Subject: [PATCH 4/4] increased test connections --- tests/search.rs | 2 +- tests/v2/search.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/search.rs b/tests/search.rs index 36483547..120aedd6 100644 --- a/tests/search.rs +++ b/tests/search.rs @@ -20,7 +20,7 @@ mod common; #[actix_rt::test] async fn search_projects() { // Test setup and dummy data - let test_env = TestEnvironment::build(Some(8)).await; + let test_env = TestEnvironment::build(Some(10)).await; let api = &test_env.v3; let test_name = test_env.db.database_name.clone(); diff --git a/tests/v2/search.rs b/tests/v2/search.rs index fbe39ca6..1e3ccbdf 100644 --- a/tests/v2/search.rs +++ b/tests/v2/search.rs @@ -17,7 +17,7 @@ async fn search_projects() { // It should drastically simplify this function // Test setup and dummy data - let test_env = TestEnvironment::build(Some(8)).await; + let test_env = TestEnvironment::build(Some(10)).await; let api = &test_env.v2; let test_name = test_env.db.database_name.clone();