From d540f6d3608ecca1fdd3d7cb025e92da84daa783 Mon Sep 17 00:00:00 2001 From: Matthew Date: Thu, 23 Feb 2023 01:18:04 -0800 Subject: [PATCH 1/7] WIP: adds route to save content to a user profile --- src/main.rs | 25 +++++++++++ src/services/mod.rs | 1 + src/services/posts.rs | 2 +- src/services/saved.rs | 96 +++++++++++++++++++++++++++++++++++++++++++ src/types.rs | 16 ++++++++ 5 files changed, 139 insertions(+), 1 deletion(-) create mode 100644 src/services/saved.rs diff --git a/src/main.rs b/src/main.rs index 9660dbf..c650463 100644 --- a/src/main.rs +++ b/src/main.rs @@ -43,6 +43,7 @@ use mongodb::options::{ ReadConcernLevel, UpdateOptions, }; +use types::SavedContent; use crate::masked_oid::MaskingKey; use crate::middleware::HostCheckWrap; @@ -62,6 +63,7 @@ async fn initialize_database(db: &Database) -> mongodb::error::Result<()> { let sessions = db.collection::("sessions"); let posts = db.collection::("posts"); let votes = db.collection::("votes"); + let saved = db.collection::("saved"); try_join!( users.create_index( @@ -82,6 +84,28 @@ async fn initialize_database(db: &Database) -> mongodb::error::Result<()> { None, ), + // Create index on `user_id` field. + saved.create_index( + IndexModel::builder() + .keys(doc! {"user_id": 1}) + .build(), + None, + ), + + // Create a unique index on `content_id` field (unique so there) + // can't be any duplicate content saved. + saved.create_index( + IndexModel::builder() + .keys(doc! {"content_id": 1}) + .options( + IndexOptions::builder() + .unique(true) + .build() + ) + .build(), + None, + ), + sessions.create_index( IndexModel::builder() .keys(doc! {"user": 1}) @@ -235,6 +259,7 @@ async fn main() -> Result<(), Box> { .service(services::profile::update_profile) .service(services::profile::get_profile) .service(services::posts::get_single_post) + .service(services::saved::save_content) }) .bind(("0.0.0.0", 3000))? .run() diff --git a/src/services/mod.rs b/src/services/mod.rs index 126bb09..8f29a8e 100644 --- a/src/services/mod.rs +++ b/src/services/mod.rs @@ -1,6 +1,7 @@ pub mod auth; pub mod posts; pub mod profile; +pub mod saved; use actix_web::{ HttpRequest, diff --git a/src/services/posts.rs b/src/services/posts.rs index ab6b879..abf4fda 100644 --- a/src/services/posts.rs +++ b/src/services/posts.rs @@ -107,7 +107,7 @@ pub async fn get_single_post( // Query the database for the post. let possible_post = db.collection::("posts").find_one(doc! {"_id": post_id}, None).await; let post: Post; - // Return 400 if the post doesn't exist, 500 if there's a query error, or the [`Detail`] post itself + // Return 400 if the post doesn't exist, 500 if there's a query error, or the [`Detail`] post itself // if everything works. match possible_post { Ok(possible_post) => match possible_post { diff --git a/src/services/saved.rs b/src/services/saved.rs new file mode 100644 index 0000000..3d00983 --- /dev/null +++ b/src/services/saved.rs @@ -0,0 +1,96 @@ +use actix_web::{ + get, + post, + delete +}; +use actix_web::web; +use mongodb::{ + Database, +}; +use mongodb::bson::{ + DateTime, + Document, + doc, to_bson, +}; +use mongodb::error::{ + ErrorKind, + WriteFailure, +}; +use serde::{Deserialize}; + +use crate::auth::AuthenticatedUser; +use crate::api_types::{ + ApiResult, + Failure, + success, +}; +use crate::masked_oid::{ + self, + MaskingKey, + MaskedObjectId, +}; +use crate::types::{ + SavedType, +}; + + +#[derive(Deserialize)] +pub struct CreateSavedContent { + pub content_type: SavedType, + pub content_id: MaskedObjectId, +} + +// TODO: ensure proper indicies are used +#[post("/users/saved/")] +pub async fn save_content( + db: web::Data, + user: AuthenticatedUser, + request: web::Json, + masking_key: web::Data<&'static MaskingKey>, +) -> ApiResult<(), ()> { + + let content_type_bson = to_bson(&request.content_type).map_err(|_| Failure::Unexpected)?; + + let content_object_id = masking_key.unmask(&request.content_id) + .map_err(|masked_oid::PaddingError| Failure::BadRequest("bad masked id"))?; + + let content_id_bson = to_bson(&content_object_id).map_err(|_| Failure::Unexpected)?; + + let content_to_be_saved = doc! { + "user_id": user.id, + "content_type": content_type_bson, + "content_id": content_id_bson, + "saved_at": DateTime::now(), + }; + match db.collection::("saved").insert_one(content_to_be_saved, None).await { + Ok(_) => return success(()), + Err(err) => { + match err.kind.as_ref() { + ErrorKind::Write(WriteFailure::WriteError(write_err)) if write_err.code == 11000 => { + return Err(Failure::BadRequest("content already saved")) // TODO: make into a "CONFLICT" type + } + _ => { + return Err(Failure::Unexpected); + } + } + } + } +} + +#[delete("/users/saved/")] +pub async fn delete_content( + db: web::Data, + masking_key: web::Data<&'static MaskingKey>, + user: AuthenticatedUser, +) -> ApiResult<(), ()> { + todo!() +} + +#[get("/users/saved/")] +pub async fn get_content( + db: web::Data, + masking_key: web::Data<&'static MaskingKey>, + user: AuthenticatedUser, +) -> ApiResult<(), ()> { + todo!() +} diff --git a/src/types.rs b/src/types.rs index f8a1cb0..c0c4141 100644 --- a/src/types.rs +++ b/src/types.rs @@ -60,6 +60,22 @@ impl fmt::Display for UsernameInvalid { } } +#[derive(Deserialize)] +pub struct SavedContent { + #[serde(rename = "_id")] + pub id: ObjectId, + pub user_id: ObjectId, + pub content_type: SavedType, + pub content_id: ObjectId, + pub saved_at: String, +} + +#[derive(Serialize, Deserialize)] +pub enum SavedType { + Comment, + Post +} + #[derive(Deserialize)] pub struct User { #[serde(rename = "_id")] From 09197cb5be3fa084081288b0a3c26c23767e6114 Mon Sep 17 00:00:00 2001 From: Matthew Date: Thu, 23 Feb 2023 01:51:42 -0800 Subject: [PATCH 2/7] WIP: adds route to delete saved content from a user profile --- src/main.rs | 15 ++++---------- src/services/saved.rs | 48 +++++++++++++++++++++++++++++++++++-------- 2 files changed, 43 insertions(+), 20 deletions(-) diff --git a/src/main.rs b/src/main.rs index c650463..2ea66b6 100644 --- a/src/main.rs +++ b/src/main.rs @@ -84,19 +84,11 @@ async fn initialize_database(db: &Database) -> mongodb::error::Result<()> { None, ), - // Create index on `user_id` field. + // Create a unique index on the `content_id`-`user_id` field combination + // so a user can't have duplicate saved content. saved.create_index( IndexModel::builder() - .keys(doc! {"user_id": 1}) - .build(), - None, - ), - - // Create a unique index on `content_id` field (unique so there) - // can't be any duplicate content saved. - saved.create_index( - IndexModel::builder() - .keys(doc! {"content_id": 1}) + .keys(doc! {"content_id": 1, "user_id": 1}) .options( IndexOptions::builder() .unique(true) @@ -260,6 +252,7 @@ async fn main() -> Result<(), Box> { .service(services::profile::get_profile) .service(services::posts::get_single_post) .service(services::saved::save_content) + .service(services::saved::delete_content) }) .bind(("0.0.0.0", 3000))? .run() diff --git a/src/services/saved.rs b/src/services/saved.rs index 3d00983..cd55815 100644 --- a/src/services/saved.rs +++ b/src/services/saved.rs @@ -1,3 +1,4 @@ +use actix_web::http::StatusCode; use actix_web::{ get, post, @@ -16,13 +17,13 @@ use mongodb::error::{ ErrorKind, WriteFailure, }; -use serde::{Deserialize}; +use serde::{Deserialize, Serialize}; use crate::auth::AuthenticatedUser; use crate::api_types::{ ApiResult, Failure, - success, + success, ApiError, failure, }; use crate::masked_oid::{ self, @@ -30,25 +31,39 @@ use crate::masked_oid::{ MaskedObjectId, }; use crate::types::{ - SavedType, + SavedType, SavedContent, }; +#[derive(Debug, Serialize)] +pub enum SaveError { + AlreadySaved, +} + +impl ApiError for SaveError { + fn status_code(&self) -> StatusCode { + match self { + Self::AlreadySaved => StatusCode::CONFLICT, + } + } +} + #[derive(Deserialize)] -pub struct CreateSavedContent { +pub struct SavedContentRequest { pub content_type: SavedType, pub content_id: MaskedObjectId, } -// TODO: ensure proper indicies are used +// TODO: is is necessary to ensure the user is saving either a `Comment` or `Post`? #[post("/users/saved/")] pub async fn save_content( db: web::Data, user: AuthenticatedUser, - request: web::Json, + request: web::Json, masking_key: web::Data<&'static MaskingKey>, -) -> ApiResult<(), ()> { +) -> ApiResult<(), SaveError> { + // Save content let content_type_bson = to_bson(&request.content_type).map_err(|_| Failure::Unexpected)?; let content_object_id = masking_key.unmask(&request.content_id) @@ -67,7 +82,7 @@ pub async fn save_content( Err(err) => { match err.kind.as_ref() { ErrorKind::Write(WriteFailure::WriteError(write_err)) if write_err.code == 11000 => { - return Err(Failure::BadRequest("content already saved")) // TODO: make into a "CONFLICT" type + failure(SaveError::AlreadySaved) } _ => { return Err(Failure::Unexpected); @@ -82,8 +97,23 @@ pub async fn delete_content( db: web::Data, masking_key: web::Data<&'static MaskingKey>, user: AuthenticatedUser, + request: web::Json, ) -> ApiResult<(), ()> { - todo!() + + let content_object_id = masking_key.unmask(&request.content_id) + .map_err(|masked_oid::PaddingError| Failure::BadRequest("bad masked id"))?; + + let content_id_bson = to_bson(&content_object_id).map_err(|_| Failure::Unexpected)?; + + let to_delete_document = doc! { + "content_id": content_id_bson, + "user_id": user.id + }; + + match db.collection::("saved").delete_one(to_delete_document, None).await { + Ok(delete_result) => if delete_result.deleted_count == 1 {return success(())} else {return Err(Failure::BadRequest("this saved content doesn't exist"))}, + Err(_) => return Err(Failure::Unexpected), + } } #[get("/users/saved/")] From f308fc0c514f4f98abf600d74b48c56c916ad8a0 Mon Sep 17 00:00:00 2001 From: Matthew Date: Thu, 23 Feb 2023 06:02:36 -0800 Subject: [PATCH 3/7] WIP: adds template for fetching saved content --- Cargo.lock | 123 ++++++++++++++++++++++++++++++++++++++++-- Cargo.toml | 1 + src/conf.rs | 3 ++ src/main.rs | 1 + src/services/saved.rs | 83 +++++++++++++++++++++++++--- src/types.rs | 4 ++ 6 files changed, 206 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ed43ee9..3b481f4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -243,6 +243,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "async-trait" version = "0.1.56" @@ -355,14 +364,16 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.19" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f" dependencies = [ - "libc", + "iana-time-zone", + "js-sys", "num-integer", "num-traits", "time 0.1.44", + "wasm-bindgen", "winapi", ] @@ -376,6 +387,16 @@ dependencies = [ "inout", ] +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + [[package]] name = "confesi-server" version = "0.1.0" @@ -385,6 +406,7 @@ dependencies = [ "aes", "base64", "blake2", + "chrono", "env_logger", "futures", "hex", @@ -405,6 +427,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +[[package]] +name = "core-foundation-sys" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" + [[package]] name = "cpufeatures" version = "0.2.2" @@ -424,6 +452,50 @@ dependencies = [ "typenum", ] +[[package]] +name = "cxx" +version = "1.0.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86d3488e7665a7a483b57e25bdd90d0aeb2bc7608c8d0346acf2ad3f1caf1d62" +dependencies = [ + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48fcaf066a053a41a81dfb14d57d99738b767febb8b735c3016e469fac5da690" +dependencies = [ + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2", + "quote", + "scratch", + "syn", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2ef98b8b717a829ca5603af80e1f9e2e48013ab227b68ef37872ef84ee479bf" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "086c685979a698443656e5cf7856c95c642295a38599f12fb1ff76fb28d19892" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "darling" version = "0.13.4" @@ -761,6 +833,30 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +[[package]] +name = "iana-time-zone" +version = "0.1.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -857,6 +953,15 @@ version = "0.2.126" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" +[[package]] +name = "link-cplusplus" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5" +dependencies = [ + "cc", +] + [[package]] name = "linked-hash-map" version = "0.5.6" @@ -1305,6 +1410,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +[[package]] +name = "scratch" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddccb15bcce173023b3fedd9436f882a0739b8dfb45e4f6b6002bee5929f61b2" + [[package]] name = "sct" version = "0.7.0" @@ -1754,6 +1865,12 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + [[package]] name = "untrusted" version = "0.7.1" diff --git a/Cargo.toml b/Cargo.toml index d6d4686..ca2e122 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,6 +10,7 @@ actix-web = { version = '^4.1.0', default-features = false, features = ['macros' aes = { version = '^0.8.1' } base64 = '^0.13.0' blake2 = '^0.10.4' +chrono = "0.4.23" env_logger = '^0.9.0' futures = '^0.3.21' hex = '^0.4.3' diff --git a/src/conf.rs b/src/conf.rs index 3e716aa..a8be4b2 100644 --- a/src/conf.rs +++ b/src/conf.rs @@ -33,3 +33,6 @@ pub const PERMITTED_ORIGINS: &[&str] = &[ pub const TRENDING_EPOCH: i64 = 1640995200; // 2022-01-01T00:00:00Z pub const TRENDING_DECAY: f64 = 103616.32918473207; // 45000 ln 10 + +/// How many comments or posts to return at once from a user's saved content. +pub const SAVED_CONTENT_PAGE_SIZE: i32 = 25; diff --git a/src/main.rs b/src/main.rs index 2ea66b6..ffd4bda 100644 --- a/src/main.rs +++ b/src/main.rs @@ -253,6 +253,7 @@ async fn main() -> Result<(), Box> { .service(services::posts::get_single_post) .service(services::saved::save_content) .service(services::saved::delete_content) + .service(services::saved::get_content) }) .bind(("0.0.0.0", 3000))? .run() diff --git a/src/services/saved.rs b/src/services/saved.rs index cd55815..63b1a85 100644 --- a/src/services/saved.rs +++ b/src/services/saved.rs @@ -1,17 +1,21 @@ use actix_web::http::StatusCode; +use chrono::{Utc, TimeZone, ParseError}; use actix_web::{ get, post, delete }; use actix_web::web; +use futures::TryStreamExt; +use mongodb::options::FindOptions; use mongodb::{ Database, + bson }; use mongodb::bson::{ DateTime, Document, - doc, to_bson, + doc, to_bson, Bson, }; use mongodb::error::{ ErrorKind, @@ -25,6 +29,8 @@ use crate::api_types::{ Failure, success, ApiError, failure, }; +use crate::services::posts::Detail; +use crate::{conf, to_unexpected}; use crate::masked_oid::{ self, MaskingKey, @@ -49,7 +55,7 @@ impl ApiError for SaveError { #[derive(Deserialize)] -pub struct SavedContentRequest { +pub struct SaveContentRequest { pub content_type: SavedType, pub content_id: MaskedObjectId, } @@ -59,7 +65,7 @@ pub struct SavedContentRequest { pub async fn save_content( db: web::Data, user: AuthenticatedUser, - request: web::Json, + request: web::Json, masking_key: web::Data<&'static MaskingKey>, ) -> ApiResult<(), SaveError> { @@ -97,7 +103,7 @@ pub async fn delete_content( db: web::Data, masking_key: web::Data<&'static MaskingKey>, user: AuthenticatedUser, - request: web::Json, + request: web::Json, ) -> ApiResult<(), ()> { let content_object_id = masking_key.unmask(&request.content_id) @@ -116,11 +122,76 @@ pub async fn delete_content( } } +#[derive(Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum Filter { + Comments, + Posts, + Mixed, +} + +#[derive(Deserialize)] +pub struct FetchSavedRequest { + pub filter: Filter, + pub after: Option +} + +#[derive(Serialize)] +pub struct FetchSavedDetail { + // pub results: Vec<> +} + + #[get("/users/saved/")] pub async fn get_content( db: web::Data, masking_key: web::Data<&'static MaskingKey>, user: AuthenticatedUser, -) -> ApiResult<(), ()> { - todo!() + query: web::Query, +) -> ApiResult, ()> { + let c = conf::SAVED_CONTENT_PAGE_SIZE; + let filter: Document; + if let Some(datetime) = &query.after { + match datetime.parse::>() { + Ok(date) => { + let date = Bson::DateTime(DateTime::from_millis(date.timestamp_millis())); + filter = doc! {"date": { "$gt": date }}; + }, + Err(_) => return Err(Failure::BadRequest("invalid date")), + } + } else { + filter = doc! {} + } + + let options = FindOptions::builder() + .limit(5) + .sort(doc! { "date": -1 }) + .build(); + + db.collection::("saved").find(filter, options).await + .map_err(to_unexpected!("Getting posts cursor failed"))? + .map_ok(|post| Ok(Detail { + id: masking_key.mask(&post.id), + sequential_id: masking_key.mask_sequential(u64::try_from(post.sequential_id).unwrap()), + reply_context: None, + text: post.text, + created_at: ( + post.id.timestamp() + .try_to_rfc3339_string() + .map_err(to_unexpected!("Formatting post timestamp failed"))? + ), + votes: Votes { + up: u32::try_from(post.votes_up).unwrap(), + down: u32::try_from(post.votes_down).unwrap(), + }, + })) + .try_collect::>>>() + .await + .map_err(to_unexpected!("Getting posts failed"))? + .into_iter() + .collect::, Failure<()>>>()?; + + println!("RESULTS: {:?}", {&query.after}); + + success(()) } diff --git a/src/types.rs b/src/types.rs index c0c4141..656df54 100644 --- a/src/types.rs +++ b/src/types.rs @@ -68,6 +68,10 @@ pub struct SavedContent { pub content_type: SavedType, pub content_id: ObjectId, pub saved_at: String, + #[serde(skip_serializing_if = "Option::is_none")] + post: Option, + #[serde(skip_serializing_if = "Option::is_none")] + comment: Option, } #[derive(Serialize, Deserialize)] From c5d77a790e506f317b9619b930777c4d020c799e Mon Sep 17 00:00:00 2001 From: Matthew Date: Fri, 24 Feb 2023 02:55:58 -0800 Subject: [PATCH 4/7] WIP: retrieving saved posts/comments basic functionality working with infinite cusor-based pagination --- src/conf.rs | 2 +- src/services/saved.rs | 195 ++++++++++++++++++++++++++++++++---------- src/types.rs | 50 +++++++++-- 3 files changed, 192 insertions(+), 55 deletions(-) diff --git a/src/conf.rs b/src/conf.rs index a8be4b2..0519197 100644 --- a/src/conf.rs +++ b/src/conf.rs @@ -35,4 +35,4 @@ pub const TRENDING_EPOCH: i64 = 1640995200; // 2022-01-01T00:00:00Z pub const TRENDING_DECAY: f64 = 103616.32918473207; // 45000 ln 10 /// How many comments or posts to return at once from a user's saved content. -pub const SAVED_CONTENT_PAGE_SIZE: i32 = 25; +pub const SAVED_CONTENT_PAGE_SIZE: i64 = 5; diff --git a/src/services/saved.rs b/src/services/saved.rs index 63b1a85..9d88ae7 100644 --- a/src/services/saved.rs +++ b/src/services/saved.rs @@ -1,13 +1,13 @@ use actix_web::http::StatusCode; -use chrono::{Utc, TimeZone, ParseError}; use actix_web::{ get, post, delete }; use actix_web::web; -use futures::TryStreamExt; -use mongodb::options::FindOptions; +use chrono::Utc; +use futures::{StreamExt}; +use mongodb::options::{FindOptions, AggregateOptions}; use mongodb::{ Database, bson @@ -29,17 +29,19 @@ use crate::api_types::{ Failure, success, ApiError, failure, }; -use crate::services::posts::Detail; -use crate::{conf, to_unexpected}; +use crate::services::posts::Votes; +use crate::{to_unexpected, conf}; use crate::masked_oid::{ self, MaskingKey, MaskedObjectId, }; use crate::types::{ - SavedType, SavedContent, + SavedType, SavedContent, Post, Rfc3339DateTime, }; +use super::posts::Detail; + #[derive(Debug, Serialize)] pub enum SaveError { AlreadySaved, @@ -69,7 +71,24 @@ pub async fn save_content( masking_key: web::Data<&'static MaskingKey>, ) -> ApiResult<(), SaveError> { - // Save content + // First verify the the passed `content_id` actually exists in the + // corresponding `SavedType` collection, else throw a 400. + + let content_id = masking_key.unmask(&request.content_id) + .map_err(|masked_oid::PaddingError| Failure::BadRequest("bad masked id"))?; + + let collection_to_verify = match &request.content_type { + SavedType::Comment => db.collection::("posts"), // TODO: change this to `Comment` collection once it is implemented. + SavedType::Post => db.collection::("posts"), + }; + + match collection_to_verify.find_one(doc! {"_id": {"$eq": content_id}}, None).await { + Ok(possible_post) => if let None = possible_post {return Err(Failure::BadRequest("content doesn't exist as specified type"))}, + Err(_) => return Err(Failure::Unexpected), + } + + // Save the content to the `saved` collection. + let content_type_bson = to_bson(&request.content_type).map_err(|_| Failure::Unexpected)?; let content_object_id = masking_key.unmask(&request.content_id) @@ -127,7 +146,6 @@ pub async fn delete_content( pub enum Filter { Comments, Posts, - Mixed, } #[derive(Deserialize)] @@ -137,61 +155,146 @@ pub struct FetchSavedRequest { } #[derive(Serialize)] -pub struct FetchSavedDetail { - // pub results: Vec<> +pub struct SavedContentDetail { + pub after: Option, + #[serde(skip_serializing_if = "<[_]>::is_empty")] + pub posts: Vec, + #[serde(skip_serializing_if = "<[_]>::is_empty")] + pub comments: Vec, // TODO: make into `Comment` } +// TODO: sort by 2 fields (objID & date) +// TODO: move `Detail` to `types.rs`? #[get("/users/saved/")] pub async fn get_content( db: web::Data, masking_key: web::Data<&'static MaskingKey>, user: AuthenticatedUser, query: web::Query, -) -> ApiResult, ()> { - let c = conf::SAVED_CONTENT_PAGE_SIZE; - let filter: Document; +) -> ApiResult, ()> { + let date_filter: Document; if let Some(datetime) = &query.after { match datetime.parse::>() { Ok(date) => { let date = Bson::DateTime(DateTime::from_millis(date.timestamp_millis())); - filter = doc! {"date": { "$gt": date }}; + date_filter = doc! {"saved_at": { "$gt": date }}; }, Err(_) => return Err(Failure::BadRequest("invalid date")), } } else { - filter = doc! {} + date_filter = doc! {} } - let options = FindOptions::builder() - .limit(5) - .sort(doc! { "date": -1 }) - .build(); - - db.collection::("saved").find(filter, options).await - .map_err(to_unexpected!("Getting posts cursor failed"))? - .map_ok(|post| Ok(Detail { - id: masking_key.mask(&post.id), - sequential_id: masking_key.mask_sequential(u64::try_from(post.sequential_id).unwrap()), - reply_context: None, - text: post.text, - created_at: ( - post.id.timestamp() - .try_to_rfc3339_string() - .map_err(to_unexpected!("Formatting post timestamp failed"))? - ), - votes: Votes { - up: u32::try_from(post.votes_up).unwrap(), - down: u32::try_from(post.votes_down).unwrap(), - }, - })) - .try_collect::>>>() - .await - .map_err(to_unexpected!("Getting posts failed"))? - .into_iter() - .collect::, Failure<()>>>()?; - - println!("RESULTS: {:?}", {&query.after}); - - success(()) + let lookup_comments_or_posts = match query.filter { + Filter::Posts => doc! { + "$lookup": { + "from": "posts", + "localField": "content_id", + "foreignField": "_id", + "as": "post" + } + }, + Filter::Comments => doc! { + "$lookup": { + "from": "comments", + "localField": "content_id", + "foreignField": "_id", + "as": "comment" + } + } + }; + + // TODO: make more idiomatic - DRY principle + let projection = match query.filter { + Filter::Posts => doc! { + "$project": { + "_id": 1, + "user_id": 1, + "content_type": 1, + "content_id": 1, + "saved_at": 1, + "post": { + "$arrayElemAt": ["$post", 0] + }, + } + }, + Filter::Comments => doc! { + "$project": { + "_id": 1, + "user_id": 1, + "content_type": 1, + "content_id": 1, + "saved_at": 1, + "comment": { + "$arrayElemAt": ["$comment", 0] + } + } + }, + }; + + let pipeline = vec![ + doc! { + "$match": { + "$and": [ + date_filter, + { "user_id": { "$eq": user.id } } + ] + } + }, + lookup_comments_or_posts, + projection, + doc! { "$limit": conf::SAVED_CONTENT_PAGE_SIZE } + ]; + + // TODO: sort results? + + let cursor = db.collection::("saved").aggregate(pipeline, None).await; + + let mut posts: Vec = Vec::new(); + + let mut comments: Vec = Vec::new(); // todo: change to `Comments` type when they're implemented + + let mut content_after: Option = None; + + match cursor { + Ok(mut cursor) => { + while let Some(content) = cursor.next().await { + match content { + Ok(content) => { + let saved_content: SavedContent = bson::from_bson(Bson::Document(content)).map_err(|_| return Failure::Unexpected)?; + content_after = Some(saved_content.saved_at); + // If it has posts + // TODO: implement comments + if let Some(post) = saved_content.post { + posts.push(Detail { + id: masking_key.mask(&post.id), + sequential_id: masking_key.mask_sequential(u64::try_from(post.sequential_id).unwrap()), + reply_context: None, + text: post.text, + created_at: ( + post.id.timestamp() + .try_to_rfc3339_string() + .map_err(|_| return Failure::Unexpected)? + ), + votes: Votes { + up: u32::try_from(post.votes_up).unwrap(), + down: u32::try_from(post.votes_down).unwrap(), + }, + }); + } + } + Err(_) => return Err(Failure::Unexpected), + } + } + } + Err(_) => return Err(Failure::Unexpected), + } + success(Box::new(SavedContentDetail { + after: content_after, + comments: comments, + posts: posts, + })) } + +// TODO: remove unnecessary `Serialize`s diff --git a/src/types.rs b/src/types.rs index 656df54..69879a9 100644 --- a/src/types.rs +++ b/src/types.rs @@ -14,12 +14,13 @@ use blake2::digest::consts::U16; use mongodb::bson::{ Binary, Bson, - DateTime, + DateTime, Document, doc, }; use mongodb::bson::oid::ObjectId; use mongodb::bson::spec::BinarySubtype; use rand::RngCore; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Serialize, Deserializer, Serializer}; +use serde::de::Error; use crate::conf; @@ -60,21 +61,23 @@ impl fmt::Display for UsernameInvalid { } } -#[derive(Deserialize)] +#[derive(Deserialize, Debug, Serialize)] pub struct SavedContent { #[serde(rename = "_id")] pub id: ObjectId, pub user_id: ObjectId, pub content_type: SavedType, pub content_id: ObjectId, - pub saved_at: String, + #[serde(with = "Rfc3339DateTime")] + pub saved_at: Rfc3339DateTime, #[serde(skip_serializing_if = "Option::is_none")] - post: Option, + pub post: Option, #[serde(skip_serializing_if = "Option::is_none")] - comment: Option, + pub comment: Option, // TODO: make into `Comment` } -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "snake_case")] pub enum SavedType { Comment, Post @@ -101,7 +104,7 @@ pub struct Session { pub last_used: DateTime, } -#[derive(Deserialize)] +#[derive(Deserialize, Debug, Serialize)] pub struct Post { #[serde(rename = "_id")] pub id: ObjectId, @@ -252,3 +255,34 @@ mod token { } } } + +#[derive(Debug, Clone, Copy)] +pub struct Rfc3339DateTime(DateTime); + +// Custom deserializer for `Rfc3339DateTime` (`bson::DateTime` wrapper). +impl<'de> Deserialize<'de> for Rfc3339DateTime { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let datetime = DateTime::deserialize(deserializer)?; + datetime.try_to_rfc3339_string().map_err(|e| D::Error::custom(format!("Error deserializing: {e}")))?; + Ok(Rfc3339DateTime(datetime)) + } +} + +use serde::ser; + + +// Custom serializer for `Rfc3339DateTime` (`bson::DateTime` wrapper). +impl Serialize for Rfc3339DateTime { + fn serialize(&self, serializer: S) -> Result + where + S: ser::Serializer, + { + let timestamp = self.0.try_to_rfc3339_string().map_err(|e| ser::Error::custom(format!("Error serializing: {e}")))?; + // let doc = doc! { "$date": timestamp }; + timestamp.serialize(serializer) + // self.0.serialize(serializer) + } +} From 1b5e18bd33c80558d52a8aaab42b543cafbcdcfd Mon Sep 17 00:00:00 2001 From: Matthew Date: Fri, 24 Feb 2023 06:03:12 -0800 Subject: [PATCH 5/7] adds docs to saved routes; finishes up saved routes --- src/conf.rs | 2 +- src/main.rs | 8 +++ src/services/saved.rs | 136 +++++++++++++++++++++++++++++++----------- src/types.rs | 6 +- 4 files changed, 111 insertions(+), 41 deletions(-) diff --git a/src/conf.rs b/src/conf.rs index 0519197..8f72e7e 100644 --- a/src/conf.rs +++ b/src/conf.rs @@ -35,4 +35,4 @@ pub const TRENDING_EPOCH: i64 = 1640995200; // 2022-01-01T00:00:00Z pub const TRENDING_DECAY: f64 = 103616.32918473207; // 45000 ln 10 /// How many comments or posts to return at once from a user's saved content. -pub const SAVED_CONTENT_PAGE_SIZE: i64 = 5; +pub const SAVED_CONTENT_PAGE_SIZE: i64 = 3; diff --git a/src/main.rs b/src/main.rs index ffd4bda..859323c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -98,6 +98,14 @@ async fn initialize_database(db: &Database) -> mongodb::error::Result<()> { None, ), + // Creates index so that you can sort by `saved_at`. + saved.create_index( + IndexModel::builder() + .keys(doc! {"saved_at": -1}) + .build(), + None, + ), + sessions.create_index( IndexModel::builder() .keys(doc! {"user": 1}) diff --git a/src/services/saved.rs b/src/services/saved.rs index 9d88ae7..0832723 100644 --- a/src/services/saved.rs +++ b/src/services/saved.rs @@ -7,7 +7,6 @@ use actix_web::{ use actix_web::web; use chrono::Utc; use futures::{StreamExt}; -use mongodb::options::{FindOptions, AggregateOptions}; use mongodb::{ Database, bson @@ -30,7 +29,7 @@ use crate::api_types::{ success, ApiError, failure, }; use crate::services::posts::Votes; -use crate::{to_unexpected, conf}; +use crate::conf; use crate::masked_oid::{ self, MaskingKey, @@ -42,6 +41,7 @@ use crate::types::{ use super::posts::Detail; +// The unique error(s) that can occur when saving content. #[derive(Debug, Serialize)] pub enum SaveError { AlreadySaved, @@ -55,14 +55,13 @@ impl ApiError for SaveError { } } - #[derive(Deserialize)] pub struct SaveContentRequest { pub content_type: SavedType, pub content_id: MaskedObjectId, } -// TODO: is is necessary to ensure the user is saving either a `Comment` or `Post`? +/// Allows a user to save comments or posts to view later. #[post("/users/saved/")] pub async fn save_content( db: web::Data, @@ -71,19 +70,20 @@ pub async fn save_content( masking_key: web::Data<&'static MaskingKey>, ) -> ApiResult<(), SaveError> { - // First verify the the passed `content_id` actually exists in the - // corresponding `SavedType` collection, else throw a 400. + // First, verifies the the passed `content_id` actually exists in the + // corresponding `SavedType` (comments or posts) collection, else throw a 400. let content_id = masking_key.unmask(&request.content_id) .map_err(|masked_oid::PaddingError| Failure::BadRequest("bad masked id"))?; - let collection_to_verify = match &request.content_type { - SavedType::Comment => db.collection::("posts"), // TODO: change this to `Comment` collection once it is implemented. - SavedType::Post => db.collection::("posts"), - }; + let collection_to_verify = match &request.content_type { + // TODO: Change this to a Comment collection once it is implemented. + SavedType::Comment => db.collection::("posts"), + SavedType::Post => db.collection::("posts"), + }; match collection_to_verify.find_one(doc! {"_id": {"$eq": content_id}}, None).await { - Ok(possible_post) => if let None = possible_post {return Err(Failure::BadRequest("content doesn't exist as specified type"))}, + Ok(possible_content) => if let None = possible_content {return Err(Failure::BadRequest("content doesn't exist as specified type"))}, Err(_) => return Err(Failure::Unexpected), } @@ -94,14 +94,19 @@ pub async fn save_content( let content_object_id = masking_key.unmask(&request.content_id) .map_err(|masked_oid::PaddingError| Failure::BadRequest("bad masked id"))?; - let content_id_bson = to_bson(&content_object_id).map_err(|_| Failure::Unexpected)?; + let content_object_id_bson = to_bson(&content_object_id).map_err(|_| Failure::Unexpected)?; + // Document that respresents a saved bit of content. let content_to_be_saved = doc! { "user_id": user.id, "content_type": content_type_bson, - "content_id": content_id_bson, + "content_id": content_object_id_bson, "saved_at": DateTime::now(), }; + + // Insert the saved content reference to the database. If it already exists (content already saved), + // then throw a custom 409 error ("AlreadySaved"). If successful, return a 200. If unknown error occurs, + // then throw a 500. match db.collection::("saved").insert_one(content_to_be_saved, None).await { Ok(_) => return success(()), Err(err) => { @@ -117,6 +122,7 @@ pub async fn save_content( } } +/// Allows a user to delete a comment or post they've previously saved. #[delete("/users/saved/")] pub async fn delete_content( db: web::Data, @@ -125,22 +131,27 @@ pub async fn delete_content( request: web::Json, ) -> ApiResult<(), ()> { + // Unmasks ID of content to be deleted. let content_object_id = masking_key.unmask(&request.content_id) .map_err(|masked_oid::PaddingError| Failure::BadRequest("bad masked id"))?; - let content_id_bson = to_bson(&content_object_id).map_err(|_| Failure::Unexpected)?; + // Document that'll match against content to be deleted. let to_delete_document = doc! { "content_id": content_id_bson, "user_id": user.id }; + // Upon successful deletion, returns a 200. If nothing is deleted, but no errors are thrown, + // then a 400 is returned because the resource didn't exist in the first place. If + // something else goes wrong, a 500 is returned. match db.collection::("saved").delete_one(to_delete_document, None).await { Ok(delete_result) => if delete_result.deleted_count == 1 {return success(())} else {return Err(Failure::BadRequest("this saved content doesn't exist"))}, Err(_) => return Err(Failure::Unexpected), } } +/// The two ways you can filter viewing your saved content. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub enum Filter { @@ -148,24 +159,37 @@ pub enum Filter { Posts, } +/// The query request for fetching your saved content. #[derive(Deserialize)] pub struct FetchSavedRequest { pub filter: Filter, - pub after: Option + pub after_date: Option, + pub after_id: Option } +/// Type of return for fetching saved content. +/// +/// Returns the desired saved content in a vector. +/// +/// Also returns `after_date` and `after_id`, which allow you to call for the next set of data. #[derive(Serialize)] pub struct SavedContentDetail { - pub after: Option, + pub after_date: Option, + pub after_id: Option, #[serde(skip_serializing_if = "<[_]>::is_empty")] pub posts: Vec, #[serde(skip_serializing_if = "<[_]>::is_empty")] - pub comments: Vec, // TODO: make into `Comment` + pub comments: Vec, // TODO: Make for Comment once commenting is implemented. } -// TODO: sort by 2 fields (objID & date) -// TODO: move `Detail` to `types.rs`? +/// The query request for fetching your saved content. +/// +/// Allows you to specify a `filter` to determine if you want comments or posts returned. +/// +/// Also allows for you to optionally add an `after_date` and `after_id` to return content +/// after these 2 fields (static cursor-based pagination). Requires both because `ObjectId`s alone +/// aren't fully accurate, and `after_date` isn't guaranteed to be unique. #[get("/users/saved/")] pub async fn get_content( db: web::Data, @@ -173,19 +197,39 @@ pub async fn get_content( user: AuthenticatedUser, query: web::Query, ) -> ApiResult, ()> { - let date_filter: Document; - if let Some(datetime) = &query.after { + + // The two filters used to determine which batch of documents to return. + let mut date_filter = doc! {}; + let mut id_filter = doc! {}; + + // Counts how many filters are applied via query params. + let mut filters_added = 0; + + // Sets the date filter. + if let Some(datetime) = &query.after_date { + filters_added += 1; match datetime.parse::>() { Ok(date) => { let date = Bson::DateTime(DateTime::from_millis(date.timestamp_millis())); - date_filter = doc! {"saved_at": { "$gt": date }}; + date_filter = doc! {"saved_at": { "$lte": date }}; }, Err(_) => return Err(Failure::BadRequest("invalid date")), } - } else { - date_filter = doc! {} } + // Sets the id filter. + if let Some(id) = &query.after_id { + filters_added += 1; + let unmasked_id = masking_key.unmask(id) + .map_err(|masked_oid::PaddingError| Failure::BadRequest("bad masked id"))?; + id_filter = doc! {"_id": { "$ne": unmasked_id }}; + } + + // You can either use no filters (to get the first bit of data), or both filters (to get subsequent data). + // Using only 1 filter doesn't guarentee accurate data, thus, doing so yields a 400-level response. + if filters_added != 2 && filters_added != 0 {return Err(Failure::BadRequest("must use no filters, or both"))} + + // Determines which collection to do the `$lookup`s from. let lookup_comments_or_posts = match query.filter { Filter::Posts => doc! { "$lookup": { @@ -205,7 +249,7 @@ pub async fn get_content( } }; - // TODO: make more idiomatic - DRY principle + // Determines which fields to return using `$project`. let projection = match query.filter { Filter::Posts => doc! { "$project": { @@ -233,39 +277,56 @@ pub async fn get_content( }, }; + // The aggregation pipeline. let pipeline = vec![ + // Sorts by `saved_at` date. + doc! { "$sort": {"saved_at": -1 }}, + // Applies the filters determined above via query params. Also ensures a user only + // can search through their own saved posts. doc! { "$match": { "$and": [ + id_filter, date_filter, { "user_id": { "$eq": user.id } } ] } }, + // Applies the `$lookup` and `$project` stages created above. lookup_comments_or_posts, projection, - doc! { "$limit": conf::SAVED_CONTENT_PAGE_SIZE } + // Limits the number of results returned. + doc! { "$limit": conf::SAVED_CONTENT_PAGE_SIZE }, ]; - // TODO: sort results? - + // Executes query. let cursor = db.collection::("saved").aggregate(pipeline, None).await; + // Found posts. let mut posts: Vec = Vec::new(); - let mut comments: Vec = Vec::new(); // todo: change to `Comments` type when they're implemented + // Found comments. + // TODO: Change this to a Comment vector once commenting is implemented. + let comments: Vec = Vec::new(); - let mut content_after: Option = None; + // The two cursors for retrieving subsequent data, explicitly set to `None` to start. + let mut date_after: Option = None; + let mut id_after: Option = None; match cursor { Ok(mut cursor) => { while let Some(content) = cursor.next().await { match content { Ok(content) => { + + // Convert results found to `SavedContent` type. let saved_content: SavedContent = bson::from_bson(Bson::Document(content)).map_err(|_| return Failure::Unexpected)?; - content_after = Some(saved_content.saved_at); - // If it has posts - // TODO: implement comments + + // Set the cursors with the newest found details. + date_after = Some(saved_content.saved_at); + id_after = Some(masking_key.mask(&saved_content.id)); + + // If it has posts, add them to the `posts` vector to be returned. if let Some(post) = saved_content.post { posts.push(Detail { id: masking_key.mask(&post.id), @@ -283,6 +344,8 @@ pub async fn get_content( }, }); } + + // TODO: Implement adding Comments to the comments vector once commenting is implemented. } Err(_) => return Err(Failure::Unexpected), } @@ -290,11 +353,12 @@ pub async fn get_content( } Err(_) => return Err(Failure::Unexpected), } + // Return resulting vector of either comments or posts alongside cursors needed + // to access the next set of data. success(Box::new(SavedContentDetail { - after: content_after, + after_id: id_after, + after_date: date_after, comments: comments, posts: posts, })) } - -// TODO: remove unnecessary `Serialize`s diff --git a/src/types.rs b/src/types.rs index 69879a9..de24a33 100644 --- a/src/types.rs +++ b/src/types.rs @@ -61,7 +61,7 @@ impl fmt::Display for UsernameInvalid { } } -#[derive(Deserialize, Debug, Serialize)] +#[derive(Deserialize, Debug)] pub struct SavedContent { #[serde(rename = "_id")] pub id: ObjectId, @@ -104,7 +104,7 @@ pub struct Session { pub last_used: DateTime, } -#[derive(Deserialize, Debug, Serialize)] +#[derive(Deserialize, Debug)] pub struct Post { #[serde(rename = "_id")] pub id: ObjectId, @@ -281,8 +281,6 @@ impl Serialize for Rfc3339DateTime { S: ser::Serializer, { let timestamp = self.0.try_to_rfc3339_string().map_err(|e| ser::Error::custom(format!("Error serializing: {e}")))?; - // let doc = doc! { "$date": timestamp }; timestamp.serialize(serializer) - // self.0.serialize(serializer) } } From b07546827a821030b47e85ea9e22d88884bee03d Mon Sep 17 00:00:00 2001 From: Matthew Date: Fri, 24 Feb 2023 06:57:40 -0800 Subject: [PATCH 6/7] adds openapi.yaml docs --- docs/openapi.yaml | 175 ++++++++++++++++++++++++++++++++++++++++++ src/services/saved.rs | 12 ++- src/types.rs | 2 +- 3 files changed, 184 insertions(+), 5 deletions(-) diff --git a/docs/openapi.yaml b/docs/openapi.yaml index 7a6cac3..13d50fb 100644 --- a/docs/openapi.yaml +++ b/docs/openapi.yaml @@ -302,6 +302,181 @@ paths: votes: $ref: '#/components/schemas/votes' + /users/saved/: + post: + summary: Save content (post or comment) to view later. + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - content_type + - content_id + properties: + content_type: + type: string + enum: + - post + - comment + content_id: + type: string + $ref: '#/components/schemas/masked-id' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/success' + '400': + content: + application/json: + schema: + type: 'object' + properties: + BadRequest: + type: string + '401': + $ref: '#/components/responses/unauthenticated' + '409': + description: An error occurred saving the content. + content: + application/json: + schema: + type: object + required: + - error + error: + type: string + enum: + - AlreadySaved + '500': + $ref: '#/components/responses/unexpected' + delete: + summary: Delete some saved content. + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - content_type + - content_id + properties: + content_type: + type: string + enum: + - post + - comment + content_id: + type: string + $ref: '#/components/schemas/masked-id' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/success' + '400': + content: + application/json: + schema: + type: 'object' + properties: + BadRequest: + type: string + '401': + $ref: '#/components/responses/unauthenticated' + '500': + $ref: '#/components/responses/unexpected' + get: + summary: View the paginated saved user content. + parameters: + - in: query + name: filter + schema: + type: string + example: post + enum: + - post + - comment + description: Allows you to choose between fetching comments or posts. + - in: query + name: after_date + schema: + type: string + example: "2023-02-24T12:22:37.989Z" + description: Allows you to return content with a saved-date after this. + - in: query + name: after_id + schema: + type: string + $ref: '#/components/schemas/masked-id' + description: Helps in ensuring no content is skipped (as could happen using just the `after_date`). + responses: + '200': + content: + application/json: + schema: + type: object + required: + - after_date + - after_id + properties: + after_date: + type: string + example: "2023-02-24T12:21:54.061Z" + after_id: + type: string + $ref: '#/components/schemas/masked-id' + posts: + type: array + items: + type: object + required: + - id + - sequential_id + - reply_context + - text + - created_at + - votes + properties: + id: + $ref: '#/components/schemas/masked-id' + sequential_id: + $ref: '#/components/schemas/masked-sequential-id' + reply_context: + anyOf: + - type: 'null' + - type: object + required: + - id + properties: + id: + $ref: '#/components/schemas/masked-id' + text: + type: string + created_at: + type: string + votes: + $ref: '#/components/schemas/votes' + comments: + type: string + example: to be implemented + '400': + content: + application/json: + schema: + type: 'object' + properties: + BadRequest: + type: string + '401': + $ref: '#/components/responses/unauthenticated' + '500': + $ref: '#/components/responses/unexpected' /posts/: get: summary: List posts diff --git a/src/services/saved.rs b/src/services/saved.rs index 0832723..548bea0 100644 --- a/src/services/saved.rs +++ b/src/services/saved.rs @@ -77,7 +77,7 @@ pub async fn save_content( .map_err(|masked_oid::PaddingError| Failure::BadRequest("bad masked id"))?; let collection_to_verify = match &request.content_type { - // TODO: Change this to a Comment collection once it is implemented. + // TODO: Change this to a `Comment` collection once it is implemented. SavedType::Comment => db.collection::("posts"), SavedType::Post => db.collection::("posts"), }; @@ -183,13 +183,17 @@ pub struct SavedContentDetail { } -/// The query request for fetching your saved content. +/// The query request for fetching a user's saved content. /// /// Allows you to specify a `filter` to determine if you want comments or posts returned. /// /// Also allows for you to optionally add an `after_date` and `after_id` to return content /// after these 2 fields (static cursor-based pagination). Requires both because `ObjectId`s alone /// aren't fully accurate, and `after_date` isn't guaranteed to be unique. +/// +/// Technically speaking, it doesn't look for `ObjectId`s AFTER `after_id`, it just ensures +/// that it doesn't return the same `ObjectId`, hence allowing you to not miss any bits of content +/// with duplicate saved-dates. Named `after_id` for simplicity. #[get("/users/saved/")] pub async fn get_content( db: web::Data, @@ -306,7 +310,7 @@ pub async fn get_content( let mut posts: Vec = Vec::new(); // Found comments. - // TODO: Change this to a Comment vector once commenting is implemented. + // TODO: Change this to a `Comment` vector once commenting is implemented. let comments: Vec = Vec::new(); // The two cursors for retrieving subsequent data, explicitly set to `None` to start. @@ -345,7 +349,7 @@ pub async fn get_content( }); } - // TODO: Implement adding Comments to the comments vector once commenting is implemented. + // TODO: Implement adding `Comment`s to the comments vector once commenting is implemented. } Err(_) => return Err(Failure::Unexpected), } diff --git a/src/types.rs b/src/types.rs index de24a33..ccef214 100644 --- a/src/types.rs +++ b/src/types.rs @@ -73,7 +73,7 @@ pub struct SavedContent { #[serde(skip_serializing_if = "Option::is_none")] pub post: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub comment: Option, // TODO: make into `Comment` + pub comment: Option, // TODO: make into `Comment` once commenting is implemented. } #[derive(Serialize, Deserialize, Debug)] From d35863cb93e6b419659661f4a38f97bcf9cefbc6 Mon Sep 17 00:00:00 2001 From: Matthew Date: Fri, 24 Feb 2023 07:26:17 -0800 Subject: [PATCH 7/7] some formatting fixes --- src/conf.rs | 2 +- src/services/saved.rs | 29 +++++++++++++++-------------- src/types.rs | 11 ++++------- 3 files changed, 20 insertions(+), 22 deletions(-) diff --git a/src/conf.rs b/src/conf.rs index 8f72e7e..0519197 100644 --- a/src/conf.rs +++ b/src/conf.rs @@ -35,4 +35,4 @@ pub const TRENDING_EPOCH: i64 = 1640995200; // 2022-01-01T00:00:00Z pub const TRENDING_DECAY: f64 = 103616.32918473207; // 45000 ln 10 /// How many comments or posts to return at once from a user's saved content. -pub const SAVED_CONTENT_PAGE_SIZE: i64 = 3; +pub const SAVED_CONTENT_PAGE_SIZE: i64 = 5; diff --git a/src/services/saved.rs b/src/services/saved.rs index 548bea0..45be4f4 100644 --- a/src/services/saved.rs +++ b/src/services/saved.rs @@ -70,7 +70,7 @@ pub async fn save_content( masking_key: web::Data<&'static MaskingKey>, ) -> ApiResult<(), SaveError> { - // First, verifies the the passed `content_id` actually exists in the + // First, verifies the passed `content_id` actually exists in the // corresponding `SavedType` (comments or posts) collection, else throw a 400. let content_id = masking_key.unmask(&request.content_id) @@ -89,12 +89,14 @@ pub async fn save_content( // Save the content to the `saved` collection. - let content_type_bson = to_bson(&request.content_type).map_err(|_| Failure::Unexpected)?; + let content_type_bson = to_bson(&request.content_type) + .map_err(|_| Failure::Unexpected)?; let content_object_id = masking_key.unmask(&request.content_id) - .map_err(|masked_oid::PaddingError| Failure::BadRequest("bad masked id"))?; + .map_err(|masked_oid::PaddingError| Failure::BadRequest("bad masked id"))?; - let content_object_id_bson = to_bson(&content_object_id).map_err(|_| Failure::Unexpected)?; + let content_object_id_bson = to_bson(&content_object_id) + .map_err(|_| Failure::Unexpected)?; // Document that respresents a saved bit of content. let content_to_be_saved = doc! { @@ -104,7 +106,7 @@ pub async fn save_content( "saved_at": DateTime::now(), }; - // Insert the saved content reference to the database. If it already exists (content already saved), + // Insert the saved content to the database. If it already exists (content already saved), // then throw a custom 409 error ("AlreadySaved"). If successful, return a 200. If unknown error occurs, // then throw a 500. match db.collection::("saved").insert_one(content_to_be_saved, None).await { @@ -134,7 +136,8 @@ pub async fn delete_content( // Unmasks ID of content to be deleted. let content_object_id = masking_key.unmask(&request.content_id) .map_err(|masked_oid::PaddingError| Failure::BadRequest("bad masked id"))?; - let content_id_bson = to_bson(&content_object_id).map_err(|_| Failure::Unexpected)?; + let content_id_bson = to_bson(&content_object_id) + .map_err(|_| Failure::Unexpected)?; // Document that'll match against content to be deleted. let to_delete_document = doc! { @@ -146,7 +149,7 @@ pub async fn delete_content( // then a 400 is returned because the resource didn't exist in the first place. If // something else goes wrong, a 500 is returned. match db.collection::("saved").delete_one(to_delete_document, None).await { - Ok(delete_result) => if delete_result.deleted_count == 1 {return success(())} else {return Err(Failure::BadRequest("this saved content doesn't exist"))}, + Ok(delete_result) => if delete_result.deleted_count == 1 {return success(())} else {return Err(Failure::BadRequest("this content doesn't exist"))}, Err(_) => return Err(Failure::Unexpected), } } @@ -176,9 +179,7 @@ pub struct FetchSavedRequest { pub struct SavedContentDetail { pub after_date: Option, pub after_id: Option, - #[serde(skip_serializing_if = "<[_]>::is_empty")] pub posts: Vec, - #[serde(skip_serializing_if = "<[_]>::is_empty")] pub comments: Vec, // TODO: Make for Comment once commenting is implemented. } @@ -193,7 +194,7 @@ pub struct SavedContentDetail { /// /// Technically speaking, it doesn't look for `ObjectId`s AFTER `after_id`, it just ensures /// that it doesn't return the same `ObjectId`, hence allowing you to not miss any bits of content -/// with duplicate saved-dates. Named `after_id` for simplicity. +/// with multiple saved-dates that are the exact same. Named `after_id` for simplicity. #[get("/users/saved/")] pub async fn get_content( db: web::Data, @@ -231,7 +232,7 @@ pub async fn get_content( // You can either use no filters (to get the first bit of data), or both filters (to get subsequent data). // Using only 1 filter doesn't guarentee accurate data, thus, doing so yields a 400-level response. - if filters_added != 2 && filters_added != 0 {return Err(Failure::BadRequest("must use no filters, or both"))} + if filters_added == 1 {return Err(Failure::BadRequest("must use no filters, or both"))} // Determines which collection to do the `$lookup`s from. let lookup_comments_or_posts = match query.filter { @@ -324,13 +325,14 @@ pub async fn get_content( Ok(content) => { // Convert results found to `SavedContent` type. - let saved_content: SavedContent = bson::from_bson(Bson::Document(content)).map_err(|_| return Failure::Unexpected)?; + let saved_content: SavedContent = bson::from_bson(Bson::Document(content)) + .map_err(|_| return Failure::Unexpected)?; // Set the cursors with the newest found details. date_after = Some(saved_content.saved_at); id_after = Some(masking_key.mask(&saved_content.id)); - // If it has posts, add them to the `posts` vector to be returned. + // If this `saved_content` has posts, add them to the `posts` vector to be returned. if let Some(post) = saved_content.post { posts.push(Detail { id: masking_key.mask(&post.id), @@ -348,7 +350,6 @@ pub async fn get_content( }, }); } - // TODO: Implement adding `Comment`s to the comments vector once commenting is implemented. } Err(_) => return Err(Failure::Unexpected), diff --git a/src/types.rs b/src/types.rs index ccef214..1dd8026 100644 --- a/src/types.rs +++ b/src/types.rs @@ -2,7 +2,7 @@ pub use self::token::{ SessionToken, SessionTokenHash, }; - +use serde::ser; use std::convert::TryFrom; use std::fmt; use std::str::{self, FromStr}; @@ -61,7 +61,7 @@ impl fmt::Display for UsernameInvalid { } } -#[derive(Deserialize, Debug)] +#[derive(Deserialize)] pub struct SavedContent { #[serde(rename = "_id")] pub id: ObjectId, @@ -76,7 +76,7 @@ pub struct SavedContent { pub comment: Option, // TODO: make into `Comment` once commenting is implemented. } -#[derive(Serialize, Deserialize, Debug)] +#[derive(Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum SavedType { Comment, @@ -104,7 +104,7 @@ pub struct Session { pub last_used: DateTime, } -#[derive(Deserialize, Debug)] +#[derive(Deserialize)] pub struct Post { #[serde(rename = "_id")] pub id: ObjectId, @@ -271,9 +271,6 @@ impl<'de> Deserialize<'de> for Rfc3339DateTime { } } -use serde::ser; - - // Custom serializer for `Rfc3339DateTime` (`bson::DateTime` wrapper). impl Serialize for Rfc3339DateTime { fn serialize(&self, serializer: S) -> Result