diff --git a/sqlx-data.json b/sqlx-data.json index 9eb5386e..d28348a4 100644 --- a/sqlx-data.json +++ b/sqlx-data.json @@ -156,6 +156,18 @@ }, "query": "\n UPDATE mods\n SET source_url = $1\n WHERE (id = $2)\n " }, + "0461463e3e14f6c8ede5571a2905b8171e8caf4ebbd3ec844ef2cebd83980247": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n DELETE FROM reports\n WHERE user_id = $1 OR reporter = $1\n " + }, "0472045549758d8eef84592908c438d6222a26926f4b06865b84979fc92564ba": { "describe": { "columns": [], @@ -2085,18 +2097,6 @@ }, "query": "\n SELECT mod_id FROM versions WHERE id = $1\n " }, - "4c58727309e5c79cc0505e57aeba0c977f308429f97b0ed296ab3bc0ebebb435": { - "describe": { - "columns": [], - "nullable": [], - "parameters": { - "Left": [ - "Int8" - ] - } - }, - "query": "\n DELETE FROM reports\n WHERE user_id = $1\n " - }, "4c9e2190e2a68ffc093a69aaa1fc9384957138f57ac9cd85cbc6179613c13a08": { "describe": { "columns": [ @@ -2299,26 +2299,6 @@ }, "query": "\n UPDATE versions\n SET version_number = $1\n WHERE (id = $2)\n " }, - "5586d60c8f3d58a31e6635ffb3cb30bac389bf21b190dfd1e64a44e837f3879c": { - "describe": { - "columns": [ - { - "name": "id", - "ordinal": 0, - "type_info": "Int8" - } - ], - "nullable": [ - false - ], - "parameters": { - "Left": [ - "Text" - ] - } - }, - "query": "\n SELECT id FROM mods\n WHERE status = $1 AND queued < NOW() - INTERVAL '40 hours'\n ORDER BY updated ASC\n " - }, "5627b3516fc7c3799154098a663b1586aac11b2dc736810f06630ee5d8a54946": { "describe": { "columns": [ @@ -6274,6 +6254,26 @@ }, "query": "\n UPDATE versions\n SET featured = $1\n WHERE (id = $2)\n " }, + "e60ea75112db37d3e73812e21b1907716e4762e06aa883af878e3be82e3f87d3": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT c.id FROM collections c\n WHERE c.user_id = $1\n " + }, "e6db02891be261e61a25716b83c1298482eb9a04f0c026532030aeb374405f13": { "describe": { "columns": [ diff --git a/src/clickhouse/fetch.rs b/src/clickhouse/fetch.rs index bee6fa12..7fddedd5 100644 --- a/src/clickhouse/fetch.rs +++ b/src/clickhouse/fetch.rs @@ -29,6 +29,13 @@ pub struct ReturnViews { pub total_views: u64, } +#[derive(clickhouse::Row, Serialize, Deserialize, Clone, Debug)] +pub struct ReturnDownloads { + pub time: u64, + pub id: u64, + pub total_downloads: u64, +} + // Only one of project_id or version_id should be used // Fetches playtimes as a Vec of ReturnPlaytimes pub async fn fetch_playtimes( @@ -125,6 +132,53 @@ pub async fn fetch_views( Ok(query.fetch_all().await?) } +// Fetches downloads as a Vec of ReturnDownloads +pub async fn fetch_downloads( + projects: Option>, + versions: Option>, + start_date: NaiveDate, + end_date: NaiveDate, + resolution_minutes: u32, + client: Arc, +) -> Result, ApiError> { + let project_or_version = if projects.is_some() && versions.is_none() { + "project_id" + } else if versions.is_some() { + "version_id" + } else { + return Err(ApiError::InvalidInput( + "Only one of 'project_id' or 'version_id' should be used.".to_string(), + )); + }; + + let mut query = client + .query(&format!( + " + SELECT + toYYYYMMDDhhmmss((toStartOfInterval(recorded, toIntervalMinute(?)) AS time)), + {project_or_version}, + count(id) AS total_downloads + FROM downloads + WHERE time >= toDate(?) AND time <= toDate(?) + AND {project_or_version} IN ? + GROUP BY + time, + {project_or_version} + " + )) + .bind(resolution_minutes) + .bind(start_date) + .bind(end_date); + + if projects.is_some() { + query = query.bind(projects.unwrap().iter().map(|x| x.0).collect::>()); + } else if versions.is_some() { + query = query.bind(versions.unwrap().iter().map(|x| x.0).collect::>()); + } + + Ok(query.fetch_all().await?) +} + // Fetches countries as a Vec of ReturnCountry pub async fn fetch_countries( projects: Option>, diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index f93a137f..a46456e8 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -1,4 +1,5 @@ use super::ids::{ProjectId, UserId}; +use super::CollectionId; use crate::database::models::DatabaseError; use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::models::users::{Badges, RecipientType, RecipientWallet}; @@ -320,6 +321,30 @@ impl User { Ok(projects) } + pub async fn get_collections<'a, E>( + user_id: UserId, + exec: E, + ) -> Result, sqlx::Error> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, + { + use futures::stream::TryStreamExt; + + let projects = sqlx::query!( + " + SELECT c.id FROM collections c + WHERE c.user_id = $1 + ", + user_id as UserId, + ) + .fetch_many(exec) + .try_filter_map(|e| async { Ok(e.right().map(|m| CollectionId(m.id))) }) + .try_collect::>() + .await?; + + Ok(projects) + } + pub async fn get_backup_codes<'a, E>( user_id: UserId, exec: E, @@ -463,7 +488,7 @@ impl User { sqlx::query!( " DELETE FROM reports - WHERE user_id = $1 + WHERE user_id = $1 OR reporter = $1 ", id as UserId, ) diff --git a/src/main.rs b/src/main.rs index 427311b4..5a6aed60 100644 --- a/src/main.rs +++ b/src/main.rs @@ -10,7 +10,6 @@ use crate::ratelimit::middleware::RateLimiter; use crate::util::cors::default_cors; use crate::util::env::{parse_strings_from_var, parse_var}; use actix_web::{web, App, HttpServer}; -use chrono::{DateTime, Utc}; use deadpool_redis::{Config, Runtime}; use env_logger::Env; use log::{error, info, warn}; @@ -181,77 +180,6 @@ async fn main() -> std::io::Result<()> { } }); - // Reminding moderators to review projects which have been in the queue longer than 40hr - let pool_ref = pool.clone(); - let redis_ref = redis_pool.clone(); - let webhook_message_sent = Arc::new(Mutex::new(Vec::<( - database::models::ProjectId, - DateTime, - )>::new())); - - scheduler.run(std::time::Duration::from_secs(10 * 60), move || { - let pool_ref = pool_ref.clone(); - let redis_ref = redis_ref.clone(); - let webhook_message_sent_ref = webhook_message_sent.clone(); - info!("Checking reviewed projects submitted more than 40hrs ago"); - - async move { - let do_steps = async { - use futures::TryStreamExt; - - let project_ids = sqlx::query!( - " - SELECT id FROM mods - WHERE status = $1 AND queued < NOW() - INTERVAL '40 hours' - ORDER BY updated ASC - ", - crate::models::projects::ProjectStatus::Processing.as_str(), - ) - .fetch_many(&pool_ref) - .try_filter_map(|e| async { - Ok(e.right().map(|m| database::models::ProjectId(m.id))) - }) - .try_collect::>() - .await?; - - let mut webhook_message_sent_ref = webhook_message_sent_ref.lock().await; - - webhook_message_sent_ref.retain(|x| Utc::now() - x.1 < chrono::Duration::hours(12)); - - for project in project_ids { - if webhook_message_sent_ref.iter().any(|x| x.0 == project) { continue; } - - if let Ok(webhook_url) = - dotenvy::var("MODERATION_DISCORD_WEBHOOK") - { - util::webhook::send_discord_webhook( - project.into(), - &pool_ref, - &redis_ref, - webhook_url, - Some("<@&783155186491195394> This project has been in the queue for over 40 hours!".to_string()), - ) - .await - .ok(); - - webhook_message_sent_ref.push((project, Utc::now())); - } - } - - Ok::<(), routes::ApiError>(()) - }; - - if let Err(e) = do_steps.await { - warn!( - "Checking reviewed projects submitted more than 40hrs ago failed: {:?}", - e - ); - } - - info!("Finished checking reviewed projects submitted more than 40hrs ago"); - } - }); - scheduler::schedule_versions(&mut scheduler, pool.clone()); let download_queue = web::Data::new(DownloadQueue::new()); diff --git a/src/models/collections.rs b/src/models/collections.rs index dc2a6284..cd8cd90c 100644 --- a/src/models/collections.rs +++ b/src/models/collections.rs @@ -68,6 +68,7 @@ impl From for Collection { pub enum CollectionStatus { Listed, Unlisted, + Private, Rejected, Unknown, } @@ -83,6 +84,7 @@ impl CollectionStatus { match string { "listed" => CollectionStatus::Listed, "unlisted" => CollectionStatus::Unlisted, + "private" => CollectionStatus::Private, "rejected" => CollectionStatus::Rejected, _ => CollectionStatus::Unknown, } @@ -91,6 +93,7 @@ impl CollectionStatus { match self { CollectionStatus::Listed => "listed", CollectionStatus::Unlisted => "unlisted", + CollectionStatus::Private => "private", CollectionStatus::Rejected => "rejected", CollectionStatus::Unknown => "unknown", } @@ -100,7 +103,7 @@ impl CollectionStatus { pub fn is_hidden(&self) -> bool { match self { CollectionStatus::Rejected => true, - + CollectionStatus::Private => true, CollectionStatus::Listed => false, CollectionStatus::Unlisted => false, CollectionStatus::Unknown => false, @@ -110,6 +113,7 @@ impl CollectionStatus { pub fn is_approved(&self) -> bool { match self { CollectionStatus::Listed => true, + CollectionStatus::Private => true, CollectionStatus::Unlisted => true, CollectionStatus::Rejected => false, CollectionStatus::Unknown => false, @@ -119,6 +123,7 @@ impl CollectionStatus { pub fn can_be_requested(&self) -> bool { match self { CollectionStatus::Listed => true, + CollectionStatus::Private => true, CollectionStatus::Unlisted => true, CollectionStatus::Rejected => false, CollectionStatus::Unknown => false, diff --git a/src/models/pack.rs b/src/models/pack.rs index c6feac60..67bb7c26 100644 --- a/src/models/pack.rs +++ b/src/models/pack.rs @@ -84,6 +84,7 @@ pub enum EnvType { #[serde(rename_all = "kebab-case")] pub enum PackDependency { Forge, + Neoforge, FabricLoader, QuiltLoader, Minecraft, @@ -100,6 +101,7 @@ impl PackDependency { pub fn as_str(&self) -> &'static str { match self { PackDependency::Forge => "forge", + PackDependency::Neoforge => "neoforge", PackDependency::FabricLoader => "fabric-loader", PackDependency::Minecraft => "minecraft", PackDependency::QuiltLoader => "quilt-loader", diff --git a/src/routes/updates.rs b/src/routes/updates.rs index 13c8e397..d3674f35 100644 --- a/src/routes/updates.rs +++ b/src/routes/updates.rs @@ -1,7 +1,7 @@ use std::collections::HashMap; use actix_web::{get, web, HttpRequest, HttpResponse}; -use serde::Serialize; +use serde::{Deserialize, Serialize}; use sqlx::PgPool; use crate::auth::{filter_authorized_versions, get_user_from_headers, is_authorized}; @@ -16,9 +16,20 @@ pub fn config(cfg: &mut web::ServiceConfig) { cfg.service(forge_updates); } +#[derive(Serialize, Deserialize)] +pub struct NeoForge { + #[serde(default = "default_neoforge")] + pub neoforge: String, +} + +fn default_neoforge() -> String { + "none".into() +} + #[get("{id}/forge_updates.json")] pub async fn forge_updates( req: HttpRequest, + web::Query(neo): web::Query, info: web::Path<(String,)>, pool: web::Data, redis: web::Data, @@ -49,10 +60,16 @@ pub async fn forge_updates( let versions = database::models::Version::get_many(&project.versions, &**pool, &redis).await?; + let loaders = match &*neo.neoforge { + "only" => |x: &String| *x == "neoforge", + "include" => |x: &String| *x == "forge" || *x == "neoforge", + _ => |x: &String| *x == "forge", + }; + let mut versions = filter_authorized_versions( versions .into_iter() - .filter(|x| x.loaders.iter().any(|y| *y == "forge")) + .filter(|x| x.loaders.iter().any(loaders)) .collect(), &user_option, &pool, diff --git a/src/routes/v2/analytics_get.rs b/src/routes/v2/analytics_get.rs index 21f80b31..d09932a9 100644 --- a/src/routes/v2/analytics_get.rs +++ b/src/routes/v2/analytics_get.rs @@ -23,6 +23,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { web::scope("analytics") .service(playtimes_get) .service(views_get) + .service(downloads_get) .service(countries_downloads_get) .service(countries_views_get), ); @@ -36,8 +37,8 @@ pub fn config(cfg: &mut web::ServiceConfig) { pub struct GetData { // only one of project_ids or version_ids should be used // if neither are provided, all projects the user has access to will be used - pub project_ids: Option>, - pub version_ids: Option>, + pub project_ids: Option, + pub version_ids: Option, pub start_date: Option, // defaults to 2 weeks ago pub end_date: Option, // defaults to now @@ -66,7 +67,7 @@ pub struct FetchedPlaytime { pub async fn playtimes_get( req: HttpRequest, clickhouse: web::Data, - data: web::Json, + data: web::Query, session_queue: web::Data, pool: web::Data, redis: web::Data, @@ -82,8 +83,16 @@ pub async fn playtimes_get( .map(|x| x.1) .ok(); - let project_ids = data.project_ids.clone(); - let version_ids = data.version_ids.clone(); + let project_ids = data + .project_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + let version_ids = data + .version_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; if project_ids.is_some() && version_ids.is_some() { return Err(ApiError::InvalidInput( @@ -141,7 +150,7 @@ pub async fn playtimes_get( pub async fn views_get( req: HttpRequest, clickhouse: web::Data, - data: web::Json, + data: web::Query, session_queue: web::Data, pool: web::Data, redis: web::Data, @@ -157,8 +166,16 @@ pub async fn views_get( .map(|x| x.1) .ok(); - let project_ids = data.project_ids.clone(); - let version_ids = data.version_ids.clone(); + let project_ids = data + .project_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + let version_ids = data + .version_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; if project_ids.is_some() && version_ids.is_some() { return Err(ApiError::InvalidInput( @@ -203,6 +220,89 @@ pub async fn views_get( Ok(HttpResponse::Ok().json(hm)) } +/// Get download data for a set of projects or versions +/// Data is returned as a hashmap of project/version ids to a hashmap of days to downloads +/// eg: +/// { +/// "4N1tEhnO": { +/// "20230824": 32 +/// } +///} +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +#[get("downloads")] +pub async fn downloads_get( + req: HttpRequest, + clickhouse: web::Data, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ANALYTICS]), + ) + .await + .map(|x| x.1) + .ok(); + + let project_ids = data + .project_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + let version_ids = data + .version_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + + if project_ids.is_some() && version_ids.is_some() { + return Err(ApiError::InvalidInput( + "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), + )); + } + + let start_date = data + .start_date + .unwrap_or(Utc::now().naive_utc().date() - Duration::weeks(2)); + let end_date = data.end_date.unwrap_or(Utc::now().naive_utc().date()); + let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24); + + // Convert String list to list of ProjectIds or VersionIds + // - Filter out unauthorized projects/versions + // - If no project_ids or version_ids are provided, we default to all projects the user has access to + let (project_ids, version_ids) = + filter_allowed_ids(project_ids, version_ids, user_option, &pool, &redis).await?; + + // Get the downloads + let downloads = crate::clickhouse::fetch_downloads( + project_ids, + version_ids, + start_date, + end_date, + resolution_minutes, + clickhouse.into_inner(), + ) + .await?; + + let mut hm = HashMap::new(); + for downloads in downloads { + let id_string = to_base62(downloads.id); + if !hm.contains_key(&id_string) { + hm.insert(id_string.clone(), HashMap::new()); + } + if let Some(hm) = hm.get_mut(&id_string) { + hm.insert(downloads.time.to_string(), downloads.total_downloads); + } + } + + Ok(HttpResponse::Ok().json(hm)) +} + /// Get country data for a set of projects or versions /// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to downloads. /// Unknown countries are labeled "". @@ -219,7 +319,7 @@ pub async fn views_get( pub async fn countries_downloads_get( req: HttpRequest, clickhouse: web::Data, - data: web::Json, + data: web::Query, session_queue: web::Data, pool: web::Data, redis: web::Data, @@ -235,8 +335,16 @@ pub async fn countries_downloads_get( .map(|x| x.1) .ok(); - let project_ids = data.project_ids.clone(); - let version_ids = data.version_ids.clone(); + let project_ids = data + .project_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + let version_ids = data + .version_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; if project_ids.is_some() && version_ids.is_some() { return Err(ApiError::InvalidInput( @@ -295,7 +403,7 @@ pub async fn countries_downloads_get( pub async fn countries_views_get( req: HttpRequest, clickhouse: web::Data, - data: web::Json, + data: web::Query, session_queue: web::Data, pool: web::Data, redis: web::Data, @@ -311,8 +419,16 @@ pub async fn countries_views_get( .map(|x| x.1) .ok(); - let project_ids = data.project_ids.clone(); - let version_ids = data.version_ids.clone(); + let project_ids = data + .project_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + let version_ids = data + .version_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; if project_ids.is_some() && version_ids.is_some() { return Err(ApiError::InvalidInput( diff --git a/src/routes/v2/project_creation.rs b/src/routes/v2/project_creation.rs index 22a180c7..a0e057d8 100644 --- a/src/routes/v2/project_creation.rs +++ b/src/routes/v2/project_creation.rs @@ -883,20 +883,6 @@ async fn project_create_inner( monetization_status: MonetizationStatus::Monetized, }; - if status == ProjectStatus::Processing { - if let Ok(webhook_url) = dotenvy::var("MODERATION_DISCORD_WEBHOOK") { - crate::util::webhook::send_discord_webhook( - response.id, - pool, - redis, - webhook_url, - None, - ) - .await - .ok(); - } - } - Ok(HttpResponse::Ok().json(response)) } } diff --git a/src/routes/v2/projects.rs b/src/routes/v2/projects.rs index 1e7f8484..c0b621af 100644 --- a/src/routes/v2/projects.rs +++ b/src/routes/v2/projects.rs @@ -498,18 +498,6 @@ pub async fn project_edit( ) .execute(&mut *transaction) .await?; - - if let Ok(webhook_url) = dotenvy::var("MODERATION_DISCORD_WEBHOOK") { - crate::util::webhook::send_discord_webhook( - project_item.inner.id.into(), - &pool, - &redis, - webhook_url, - None, - ) - .await - .ok(); - } } if status.is_approved() && !project_item.inner.status.is_approved() { @@ -550,6 +538,26 @@ pub async fn project_edit( } } + if user.role.is_mod() { + if let Ok(webhook_url) = dotenvy::var("MODERATION_DISCORD_WEBHOOK") { + crate::util::webhook::send_discord_webhook( + project_item.inner.id.into(), + &pool, + &redis, + webhook_url, + Some( + format!( + "**[{}]({}/user/{})** changed project status from **{}** to **{}**", + user.username, dotenvy::var("SITE_URL")?, user.username, &project_item.inner.status, status + ) + .to_string(), + ), + ) + .await + .ok(); + } + } + if team_member.map(|x| !x.accepted).unwrap_or(true) { let notified_members = sqlx::query!( " diff --git a/src/routes/v2/threads.rs b/src/routes/v2/threads.rs index 8ec21805..c2e6c096 100644 --- a/src/routes/v2/threads.rs +++ b/src/routes/v2/threads.rs @@ -412,11 +412,9 @@ pub async fn thread_send_message( ) .await?; } - - project.inner.status == ProjectStatus::Processing && !user.role.is_mod() - } else { - !user.role.is_mod() } + + !user.role.is_mod() } else if let Some(report_id) = thread.report_id { let report = database::models::report_item::Report::get(report_id, &**pool).await?; diff --git a/src/routes/v2/users.rs b/src/routes/v2/users.rs index ef590adb..6adfe6a8 100644 --- a/src/routes/v2/users.rs +++ b/src/routes/v2/users.rs @@ -1,6 +1,7 @@ use crate::auth::{get_user_from_headers, AuthenticationError}; use crate::database::models::User; use crate::file_hosting::FileHost; +use crate::models::collections::{Collection, CollectionStatus}; use crate::models::notifications::Notification; use crate::models::pats::Scopes; use crate::models::projects::Project; @@ -30,6 +31,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { web::scope("user") .service(user_get) .service(projects_list) + .service(collections_list) .service(user_delete) .service(user_edit) .service(user_icon_edit) @@ -157,6 +159,50 @@ pub async fn projects_list( } } +#[get("{user_id}/collections")] +pub async fn collections_list( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::COLLECTION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(id) = id_option.map(|x| x.id) { + let user_id: UserId = id.into(); + + let can_view_private = user + .map(|y| y.role.is_mod() || y.id == user_id) + .unwrap_or(false); + + let project_data = User::get_collections(id, &**pool).await?; + + let response: Vec<_> = + crate::database::models::Collection::get_many(&project_data, &**pool, &redis) + .await? + .into_iter() + .filter(|x| can_view_private || matches!(x.status, CollectionStatus::Listed)) + .map(Collection::from) + .collect(); + + Ok(HttpResponse::Ok().json(response)) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + lazy_static! { static ref RE_URL_SAFE: Regex = Regex::new(r"^[a-zA-Z0-9_-]*$").unwrap(); } diff --git a/src/search/indexing/mod.rs b/src/search/indexing/mod.rs index cea9dd02..d0cd58d7 100644 --- a/src/search/indexing/mod.rs +++ b/src/search/indexing/mod.rs @@ -197,7 +197,9 @@ const DEFAULT_ATTRIBUTES_FOR_FACETING: &[&str] = &[ "author", "title", "date_created", + "created_timestamp", "date_modified", + "modified_timestamp", "project_id", "open_source", "color", diff --git a/src/validate/modpack.rs b/src/validate/modpack.rs index 67ded782..02ffd75b 100644 --- a/src/validate/modpack.rs +++ b/src/validate/modpack.rs @@ -85,9 +85,10 @@ impl super::Validator for ModpackValidator { files: archive .file_names() .filter(|x| { - x.starts_with("overrides/mods") - || x.starts_with("client-overrides/mods") - || x.starts_with("server-overrides/mods") + (x.ends_with("jar") || x.ends_with("zip")) + && (x.starts_with("overrides/mods") + || x.starts_with("client-overrides/mods") + || x.starts_with("server-overrides/mods")) }) .flat_map(|x| x.rsplit('/').next().map(|x| x.to_string())) .collect::>(),