From 10704608ec87e35efdb41f4cce2bb9945ec76a71 Mon Sep 17 00:00:00 2001 From: Erdogan Yoksul Date: Wed, 18 Sep 2024 00:56:36 +0300 Subject: [PATCH 1/4] chore: refactor tests folder structure --- Cargo.toml | 1 + tests/cache_test.rs | 5 +- tests/extractors/common.rs | 20 +++ tests/extractors/html_test.rs | 33 ++++ tests/extractors/main.rs | 1 + tests/extractors/regex_test.rs | 33 ++++ tests/extractors_test.rs | 82 --------- tests/module_engines_test.rs | 66 ------- tests/module_generics_test.rs | 31 ---- tests/{common/mod.rs => modules/common.rs} | 23 +-- tests/modules/engines/bing_test.rs | 16 ++ tests/modules/engines/duckduckgo_test.rs | 20 +++ tests/modules/engines/google_test.rs | 16 ++ tests/modules/engines/mod.rs | 1 + tests/modules/engines/yahoo_test.rs | 16 ++ tests/modules/generics/mod.rs | 1 + tests/modules/generics/searchengine_test.rs | 26 +++ tests/modules/main.rs | 3 + tests/requesters/chrome_test.rs | 90 ++++++++++ tests/requesters/client_test.rs | 90 ++++++++++ tests/requesters/common.rs | 3 + tests/requesters/main.rs | 1 + tests/requesters_test.rs | 183 -------------------- tests/stubs/module/engines/yahoo.json | 2 +- 24 files changed, 375 insertions(+), 388 deletions(-) create mode 100644 tests/extractors/common.rs create mode 100644 tests/extractors/html_test.rs create mode 100644 tests/extractors/main.rs create mode 100644 tests/extractors/regex_test.rs delete mode 100644 tests/extractors_test.rs delete mode 100644 tests/module_engines_test.rs delete mode 100644 tests/module_generics_test.rs rename tests/{common/mod.rs => modules/common.rs} (65%) create mode 100644 tests/modules/engines/bing_test.rs create mode 100644 tests/modules/engines/duckduckgo_test.rs create mode 100644 tests/modules/engines/google_test.rs create mode 100644 tests/modules/engines/mod.rs create mode 100644 tests/modules/engines/yahoo_test.rs create mode 100644 tests/modules/generics/mod.rs create mode 100644 tests/modules/generics/searchengine_test.rs create mode 100644 tests/modules/main.rs create mode 100644 tests/requesters/chrome_test.rs create mode 100644 tests/requesters/client_test.rs create mode 100644 tests/requesters/common.rs create mode 100644 tests/requesters/main.rs delete mode 100644 tests/requesters_test.rs diff --git a/Cargo.toml b/Cargo.toml index 7c1d60f3..d6128edc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,4 +22,5 @@ strum = "0.26.3" strum_macros = "0.26.4" [dev-dependencies] +automod = "1.0.14" stubr = "0.6.2" diff --git a/tests/cache_test.rs b/tests/cache_test.rs index 9f67729b..c5777844 100644 --- a/tests/cache_test.rs +++ b/tests/cache_test.rs @@ -1,6 +1,3 @@ -mod common; - -use common::constants::TEST_URL; use reqwest::header::{HeaderMap, HeaderValue, CONTENT_LENGTH, USER_AGENT}; use std::time::Duration; use strum::IntoEnumIterator; @@ -30,7 +27,7 @@ mod requesters { (USER_AGENT, HeaderValue::from_static("x-api-key")), (CONTENT_LENGTH, HeaderValue::from_static("10000")), ]), - proxy: Some(TEST_URL.to_string()), + proxy: Some("http://foo.com".to_string()), }; for requester in cache::ALL_REQUESTERS.values() { diff --git a/tests/extractors/common.rs b/tests/extractors/common.rs new file mode 100644 index 00000000..71809290 --- /dev/null +++ b/tests/extractors/common.rs @@ -0,0 +1,20 @@ +pub mod constants { + pub const TEST_DOMAIN: &str = "foo.com"; + pub const TEST_BAR_SUBDOMAIN: &str = "bar.foo.com"; + pub const TEST_BAZ_SUBDOMAIN: &str = "baz.foo.com"; + pub const READ_ERROR: &str = "Cannot read file!"; +} + +pub mod funcs { + use super::constants::READ_ERROR; + use std::fs; + use std::path::{Path, PathBuf}; + + fn testdata_path() -> PathBuf { + Path::new(env!("CARGO_MANIFEST_DIR")).join("testing/testdata") + } + + pub fn read_testdata(path: &str) -> String { + fs::read_to_string(testdata_path().join(path)).expect(READ_ERROR) + } +} diff --git a/tests/extractors/html_test.rs b/tests/extractors/html_test.rs new file mode 100644 index 00000000..5b1d8486 --- /dev/null +++ b/tests/extractors/html_test.rs @@ -0,0 +1,33 @@ +use crate::common::{ + constants::{TEST_BAR_SUBDOMAIN, TEST_BAZ_SUBDOMAIN, TEST_DOMAIN}, + funcs::read_testdata, +}; +use subscan::extractors::html::HTMLExtractor; +use subscan::interfaces::extractor::SubdomainExtractorInterface; + +#[tokio::test] +async fn extract_without_removes() { + let html = read_testdata("html/subdomains.html"); + + let selector = String::from("article > div > a > span:first-child"); + let extractor = HTMLExtractor::new(selector, vec![]); + let result = extractor.extract(html, TEST_DOMAIN.to_string()).await; + + assert_eq!(result, [TEST_BAR_SUBDOMAIN.to_string()].into()); +} + +#[tokio::test] +async fn extract_with_removes() { + let html = read_testdata("html/subdomains-with-removes.html"); + + let selector = String::from("article > div > a > span"); + let extractor = HTMLExtractor::new(selector, vec!["
".to_string()]); + let result = extractor.extract(html, TEST_DOMAIN.to_string()).await; + + let expected = [ + TEST_BAR_SUBDOMAIN.to_string(), + TEST_BAZ_SUBDOMAIN.to_string(), + ]; + + assert_eq!(result, expected.into()); +} diff --git a/tests/extractors/main.rs b/tests/extractors/main.rs new file mode 100644 index 00000000..de11f47d --- /dev/null +++ b/tests/extractors/main.rs @@ -0,0 +1 @@ +automod::dir!("tests/extractors"); diff --git a/tests/extractors/regex_test.rs b/tests/extractors/regex_test.rs new file mode 100644 index 00000000..c68b21c9 --- /dev/null +++ b/tests/extractors/regex_test.rs @@ -0,0 +1,33 @@ +use crate::common::constants::{TEST_BAR_SUBDOMAIN, TEST_BAZ_SUBDOMAIN, TEST_DOMAIN}; +use subscan::extractors::regex::RegexExtractor; +use subscan::interfaces::extractor::SubdomainExtractorInterface; + +#[tokio::test] +async fn extract_one_test() { + let extractor = RegexExtractor::default(); + + let matches = String::from(TEST_BAR_SUBDOMAIN); + let no_match = String::from("foobarbaz"); + + assert!(extractor + .extract_one(matches, TEST_DOMAIN.to_string()) + .is_some()); + assert!(extractor + .extract_one(no_match, TEST_DOMAIN.to_string()) + .is_none()); +} + +#[tokio::test] +async fn extract_test() { + let content = String::from("bar.foo.com\nbaz.foo.com"); + + let extractor = RegexExtractor::default(); + let result = extractor.extract(content, TEST_DOMAIN.to_string()).await; + + let expected = [ + TEST_BAR_SUBDOMAIN.to_string(), + TEST_BAZ_SUBDOMAIN.to_string(), + ]; + + assert_eq!(result, expected.into()); +} diff --git a/tests/extractors_test.rs b/tests/extractors_test.rs deleted file mode 100644 index 8c7e46f5..00000000 --- a/tests/extractors_test.rs +++ /dev/null @@ -1,82 +0,0 @@ -mod common; - -use common::{ - constants::{TEST_BAR_SUBDOMAIN, TEST_BAZ_SUBDOMAIN, TEST_DOMAIN}, - funcs::read_testdata, -}; -use subscan::interfaces::extractor::SubdomainExtractorInterface; - -#[cfg(test)] -mod html { - use super::*; - use subscan::extractors::html::HTMLExtractor; - - #[tokio::test] - async fn extract_without_removes() { - let html = read_testdata("html/subdomains.html"); - - let selector = String::from("article > div > a > span:first-child"); - let extractor = HTMLExtractor::new(selector, vec![]); - let result = extractor.extract(html, TEST_DOMAIN.to_string()).await; - - assert_eq!(result, [TEST_BAR_SUBDOMAIN.to_string()].into()); - } - - #[tokio::test] - async fn extract_with_removes() { - let html = read_testdata("html/subdomains-with-removes.html"); - - let selector = String::from("article > div > a > span"); - let extractor = HTMLExtractor::new(selector, vec!["
".to_string()]); - let result = extractor.extract(html, TEST_DOMAIN.to_string()).await; - - assert_eq!(result.len(), 2); - assert_eq!( - result, - [ - TEST_BAR_SUBDOMAIN.to_string(), - TEST_BAZ_SUBDOMAIN.to_string() - ] - .into() - ); - } -} - -#[cfg(test)] -mod regex { - use super::*; - use subscan::extractors::regex::RegexExtractor; - - #[tokio::test] - async fn extract_one_test() { - let extractor = RegexExtractor::default(); - - let match_content = String::from(TEST_BAR_SUBDOMAIN); - let no_match_content = String::from("foobarbaz"); - - assert!(extractor - .extract_one(match_content, TEST_DOMAIN.to_string()) - .is_some()); - assert!(extractor - .extract_one(no_match_content, TEST_DOMAIN.to_string()) - .is_none()); - } - - #[tokio::test] - async fn extract_test() { - let content = String::from("bar.foo.com\nbaz.foo.com"); - - let extractor = RegexExtractor::default(); - let result = extractor.extract(content, TEST_DOMAIN.to_string()).await; - - assert_eq!(result.len(), 2); - assert_eq!( - result, - [ - TEST_BAR_SUBDOMAIN.to_string(), - TEST_BAZ_SUBDOMAIN.to_string(), - ] - .into() - ); - } -} diff --git a/tests/module_engines_test.rs b/tests/module_engines_test.rs deleted file mode 100644 index 38d63e89..00000000 --- a/tests/module_engines_test.rs +++ /dev/null @@ -1,66 +0,0 @@ -mod common; - -#[cfg(test)] -mod searchengine { - use super::common::constants::{TEST_BAR_SUBDOMAIN, TEST_BAZ_SUBDOMAIN, TEST_DOMAIN}; - use reqwest::Url; - use subscan::{ - cache::requesters, - enums::RequesterType, - interfaces::module::SubscanModuleInterface, - modules::engines::{bing, duckduckgo, google, yahoo}, - }; - - #[tokio::test] - #[stubr::mock("module/engines/google.json")] - async fn google_run_test() { - let mut google = google::Google::new(); - - google.url = Url::parse(stubr.path("/search").as_str()).unwrap(); - - let result = google.run(TEST_DOMAIN.to_string()).await; - - assert_eq!(google.name().await, "Google"); - assert_eq!(result, [TEST_BAR_SUBDOMAIN.to_string()].into()); - } - - #[tokio::test] - #[stubr::mock("module/engines/yahoo.json")] - async fn yahoo_run_test() { - let mut yahoo = yahoo::Yahoo::new(); - - yahoo.url = Url::parse(stubr.path("/search").as_str()).unwrap(); - - let result = yahoo.run(TEST_DOMAIN.to_string()).await; - - assert_eq!(yahoo.name().await, "Yahoo"); - assert_eq!(result, [TEST_BAZ_SUBDOMAIN.to_string()].into()); - } - - #[tokio::test] - #[stubr::mock("module/engines/bing.json")] - async fn bin_run_test() { - let mut bing = bing::Bing::new(); - - bing.url = Url::parse(stubr.path("/search").as_str()).unwrap(); - - let result = bing.run(TEST_DOMAIN.to_string()).await; - - assert_eq!(bing.name().await, "Bing"); - assert_eq!(result, [TEST_BAR_SUBDOMAIN.to_string()].into()); - } - - #[tokio::test] - #[stubr::mock("module/engines/duckduckgo.json")] - async fn duckduckgo_run_test() { - let mut duckduckgo = duckduckgo::DuckDuckGo::new(); - - duckduckgo.requester = requesters::get_by_type(&RequesterType::HTTPClient); - duckduckgo.url = Url::parse(stubr.uri().as_str()).unwrap(); - - let result = duckduckgo.run(TEST_DOMAIN.to_string()).await; - - assert_eq!(duckduckgo.name().await, "DuckDuckGo"); - assert_eq!(result, [TEST_BAR_SUBDOMAIN.to_string()].into()); - } -} diff --git a/tests/module_generics_test.rs b/tests/module_generics_test.rs deleted file mode 100644 index 2d22173d..00000000 --- a/tests/module_generics_test.rs +++ /dev/null @@ -1,31 +0,0 @@ -mod common; - -#[cfg(test)] -mod searchengine { - use super::common::{ - constants::{TEST_BAR_SUBDOMAIN, TEST_DOMAIN, TEST_MODULE_NAME, TEST_URL}, - mocks::generic_search_engine, - }; - use subscan::interfaces::module::SubscanModuleInterface; - - #[tokio::test] - async fn get_search_query_test() { - let module = generic_search_engine(TEST_URL); - - let mut query = module.get_search_query(TEST_DOMAIN.to_string()).await; - - assert_eq!(query.as_search_str(), "site:foo.com"); - assert_eq!(module.name().await, TEST_MODULE_NAME.to_string()); - } - - #[tokio::test] - #[stubr::mock("module/generics/search-engine.json")] - async fn run_test() { - let mut module = generic_search_engine(&stubr.path("/search")); - - let result = module.run(TEST_DOMAIN.to_string()).await; - - assert_eq!(module.name().await, TEST_MODULE_NAME.to_string()); - assert_eq!(result, [TEST_BAR_SUBDOMAIN.to_string()].into()); - } -} diff --git a/tests/common/mod.rs b/tests/modules/common.rs similarity index 65% rename from tests/common/mod.rs rename to tests/modules/common.rs index 3c725131..6f3b82c1 100644 --- a/tests/common/mod.rs +++ b/tests/modules/common.rs @@ -1,30 +1,11 @@ pub mod constants { - #![allow(dead_code)] - pub const TEST_DOMAIN: &str = "foo.com"; - pub const TEST_BAR_SUBDOMAIN: &str = "bar.foo.com"; - pub const TEST_BAZ_SUBDOMAIN: &str = "baz.foo.com"; pub const TEST_MODULE_NAME: &str = "foo-module"; pub const TEST_URL: &str = "http://foo.com"; -} - -pub mod funcs { - #![allow(dead_code)] - use std::fs; - use std::path::{Path, PathBuf}; - - const READ_ERROR: &str = "Cannot read file!"; - - fn testdata_path() -> PathBuf { - Path::new(env!("CARGO_MANIFEST_DIR")).join("testing/testdata") - } - - pub fn read_testdata(path: &str) -> String { - fs::read_to_string(testdata_path().join(path)).expect(READ_ERROR) - } + pub const TEST_DOMAIN: &str = "foo.com"; + pub const TEST_BAR_SUBDOMAIN: &str = "bar.foo.com"; } pub mod mocks { - #![allow(dead_code)] use super::constants::TEST_MODULE_NAME; use reqwest::Url; use subscan::{ diff --git a/tests/modules/engines/bing_test.rs b/tests/modules/engines/bing_test.rs new file mode 100644 index 00000000..255e2907 --- /dev/null +++ b/tests/modules/engines/bing_test.rs @@ -0,0 +1,16 @@ +use crate::common::constants::{TEST_BAR_SUBDOMAIN, TEST_DOMAIN}; +use reqwest::Url; +use subscan::{interfaces::module::SubscanModuleInterface, modules::engines::bing}; + +#[tokio::test] +#[stubr::mock("module/engines/bing.json")] +async fn bing_run_test() { + let mut bing = bing::Bing::new(); + + bing.url = Url::parse(stubr.path("/search").as_str()).unwrap(); + + let result = bing.run(TEST_DOMAIN.to_string()).await; + + assert_eq!(bing.name().await, "Bing"); + assert_eq!(result, [TEST_BAR_SUBDOMAIN.to_string()].into()); +} diff --git a/tests/modules/engines/duckduckgo_test.rs b/tests/modules/engines/duckduckgo_test.rs new file mode 100644 index 00000000..740e3a31 --- /dev/null +++ b/tests/modules/engines/duckduckgo_test.rs @@ -0,0 +1,20 @@ +use crate::common::constants::{TEST_BAR_SUBDOMAIN, TEST_DOMAIN}; +use reqwest::Url; +use subscan::{ + cache::requesters, enums::RequesterType, interfaces::module::SubscanModuleInterface, + modules::engines::duckduckgo, +}; + +#[tokio::test] +#[stubr::mock("module/engines/duckduckgo.json")] +async fn duckduckgo_run_test() { + let mut duckduckgo = duckduckgo::DuckDuckGo::new(); + + duckduckgo.requester = requesters::get_by_type(&RequesterType::HTTPClient); + duckduckgo.url = Url::parse(stubr.uri().as_str()).unwrap(); + + let result = duckduckgo.run(TEST_DOMAIN.to_string()).await; + + assert_eq!(duckduckgo.name().await, "DuckDuckGo"); + assert_eq!(result, [TEST_BAR_SUBDOMAIN.to_string()].into()); +} diff --git a/tests/modules/engines/google_test.rs b/tests/modules/engines/google_test.rs new file mode 100644 index 00000000..510cd1ea --- /dev/null +++ b/tests/modules/engines/google_test.rs @@ -0,0 +1,16 @@ +use crate::common::constants::{TEST_BAR_SUBDOMAIN, TEST_DOMAIN}; +use reqwest::Url; +use subscan::{interfaces::module::SubscanModuleInterface, modules::engines::google}; + +#[tokio::test] +#[stubr::mock("module/engines/google.json")] +async fn foo_test() { + let mut google = google::Google::new(); + + google.url = Url::parse(stubr.path("/search").as_str()).unwrap(); + + let result = google.run(TEST_DOMAIN.to_string()).await; + + assert_eq!(google.name().await, "Google"); + assert_eq!(result, [TEST_BAR_SUBDOMAIN.to_string()].into()); +} diff --git a/tests/modules/engines/mod.rs b/tests/modules/engines/mod.rs new file mode 100644 index 00000000..1169149e --- /dev/null +++ b/tests/modules/engines/mod.rs @@ -0,0 +1 @@ +automod::dir!("tests/modules/engines"); diff --git a/tests/modules/engines/yahoo_test.rs b/tests/modules/engines/yahoo_test.rs new file mode 100644 index 00000000..1da61818 --- /dev/null +++ b/tests/modules/engines/yahoo_test.rs @@ -0,0 +1,16 @@ +use crate::common::constants::{TEST_BAR_SUBDOMAIN, TEST_DOMAIN}; +use reqwest::Url; +use subscan::{interfaces::module::SubscanModuleInterface, modules::engines::yahoo}; + +#[tokio::test] +#[stubr::mock("module/engines/yahoo.json")] +async fn yahoo_run_test() { + let mut yahoo = yahoo::Yahoo::new(); + + yahoo.url = Url::parse(stubr.path("/search").as_str()).unwrap(); + + let result = yahoo.run(TEST_DOMAIN.to_string()).await; + + assert_eq!(yahoo.name().await, "Yahoo"); + assert_eq!(result, [TEST_BAR_SUBDOMAIN.to_string()].into()); +} diff --git a/tests/modules/generics/mod.rs b/tests/modules/generics/mod.rs new file mode 100644 index 00000000..ed175b59 --- /dev/null +++ b/tests/modules/generics/mod.rs @@ -0,0 +1 @@ +automod::dir!("tests/modules/generics"); diff --git a/tests/modules/generics/searchengine_test.rs b/tests/modules/generics/searchengine_test.rs new file mode 100644 index 00000000..a1e8ace3 --- /dev/null +++ b/tests/modules/generics/searchengine_test.rs @@ -0,0 +1,26 @@ +use crate::common::{ + constants::{TEST_BAR_SUBDOMAIN, TEST_DOMAIN, TEST_MODULE_NAME, TEST_URL}, + mocks::generic_search_engine, +}; +use subscan::interfaces::module::SubscanModuleInterface; + +#[tokio::test] +async fn get_search_query_test() { + let module = generic_search_engine(TEST_URL); + + let mut query = module.get_search_query(TEST_DOMAIN.to_string()).await; + + assert_eq!(query.as_search_str(), "site:foo.com"); + assert_eq!(module.name().await, TEST_MODULE_NAME.to_string()); +} + +#[tokio::test] +#[stubr::mock("module/generics/search-engine.json")] +async fn run_test() { + let mut module = generic_search_engine(&stubr.path("/search")); + + let result = module.run(TEST_DOMAIN.to_string()).await; + + assert_eq!(module.name().await, TEST_MODULE_NAME.to_string()); + assert_eq!(result, [TEST_BAR_SUBDOMAIN.to_string()].into()); +} diff --git a/tests/modules/main.rs b/tests/modules/main.rs new file mode 100644 index 00000000..433450fd --- /dev/null +++ b/tests/modules/main.rs @@ -0,0 +1,3 @@ +mod common; +mod engines; +mod generics; diff --git a/tests/requesters/chrome_test.rs b/tests/requesters/chrome_test.rs new file mode 100644 index 00000000..3b330b35 --- /dev/null +++ b/tests/requesters/chrome_test.rs @@ -0,0 +1,90 @@ +use crate::common::constants::TEST_URL; +use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; +use reqwest::header::{CONTENT_LENGTH, USER_AGENT}; +use reqwest::Url; +use std::time::Duration; +use subscan::{ + enums::RequesterType, + interfaces::requester::RequesterInterface, + requesters::chrome::ChromeBrowser, + types::config::{RequesterConfig, DEFAULT_HTTP_TIMEOUT}, +}; + +#[tokio::test] +async fn chrome_configure_test() { + let mut browser = ChromeBrowser::new(); + let mut config = browser.config().await; + + let new_headers = HeaderMap::from_iter([ + (USER_AGENT, HeaderValue::from_static("foo")), + (CONTENT_LENGTH, HeaderValue::from_static("20")), + ]); + let new_config = RequesterConfig { + headers: new_headers.clone(), + timeout: Duration::from_secs(120), + proxy: Some(TEST_URL.to_string()), + }; + + assert_eq!(config.timeout, DEFAULT_HTTP_TIMEOUT); + assert_eq!(config.headers.len(), 0); + assert_eq!(config.proxy, None); + + browser.configure(new_config.clone()).await; + config = browser.config().await; + + assert_eq!(config.timeout, new_config.timeout); + assert_eq!(config.headers, new_config.headers); + assert_eq!(config.headers.len(), new_headers.len()); + assert_eq!(config.proxy, new_config.proxy); + + assert_eq!(browser.r#type().await, RequesterType::ChromeBrowser); +} + +#[tokio::test] +#[stubr::mock("hello/hello.json")] +async fn chrome_get_content_test() { + let browser = ChromeBrowser::new(); + let url = Url::parse(&stubr.path("/hello")).unwrap(); + + let content = browser.get_content(url).await.unwrap(); + + assert!(content.contains("hello")); +} + +#[tokio::test] +#[stubr::mock("hello/hello-delayed.json")] +#[should_panic] +async fn chrome_get_content_timeout_test() { + let config = RequesterConfig { + timeout: Duration::from_millis(500), + headers: HeaderMap::default(), + proxy: None, + }; + + let browser = ChromeBrowser::with_config(config); + let url = Url::parse(&stubr.path("/hello-delayed")).unwrap(); + + browser.get_content(url).await.unwrap(); +} + +#[tokio::test] +#[stubr::mock("hello/hello-with-headers.json")] +async fn chrome_get_content_extra_header_test() { + let mut config = RequesterConfig::default(); + + config.add_header( + HeaderName::from_static("x-api-key"), + HeaderValue::from_static("hello-api"), + ); + + let browser = ChromeBrowser::with_config(config); + let url = Url::parse_with_params( + &stubr.path("/hello-with-headers"), + &[("search", "site:foo.com")], + ) + .unwrap(); + + let content = browser.get_content(url).await.unwrap(); + + assert!(content.contains("hello")); +} diff --git a/tests/requesters/client_test.rs b/tests/requesters/client_test.rs new file mode 100644 index 00000000..90c31793 --- /dev/null +++ b/tests/requesters/client_test.rs @@ -0,0 +1,90 @@ +use crate::common::constants::TEST_URL; +use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; +use reqwest::header::{CONTENT_LENGTH, USER_AGENT}; +use reqwest::Url; +use std::time::Duration; +use subscan::requesters::client::HTTPClient; +use subscan::{ + enums::RequesterType, + interfaces::requester::RequesterInterface, + types::config::{RequesterConfig, DEFAULT_HTTP_TIMEOUT}, +}; + +#[tokio::test] +async fn client_configure_test() { + let mut client = HTTPClient::default(); + let mut config = client.config().await; + + let new_headers = HeaderMap::from_iter([ + (USER_AGENT, HeaderValue::from_static("foo")), + (CONTENT_LENGTH, HeaderValue::from_static("20")), + ]); + let new_config = RequesterConfig { + headers: new_headers.clone(), + timeout: Duration::from_secs(120), + proxy: Some(TEST_URL.to_string()), + }; + + assert_eq!(config.timeout, DEFAULT_HTTP_TIMEOUT); + assert_eq!(config.headers.len(), 0); + assert_eq!(config.proxy, None); + + client.configure(new_config.clone()).await; + config = client.config().await; + + assert_eq!(config.timeout, new_config.timeout); + assert_eq!(config.headers, new_config.headers); + assert_eq!(config.headers.len(), new_headers.len()); + assert_eq!(config.proxy, new_config.proxy); + + assert_eq!(client.r#type().await, RequesterType::HTTPClient); +} + +#[tokio::test] +#[stubr::mock("hello/hello.json")] +async fn client_get_content_test() { + let client = HTTPClient::default(); + let url = Url::parse(&stubr.path("/hello")).unwrap(); + + let content = client.get_content(url).await.unwrap(); + + assert_eq!(content, "hello"); +} + +#[tokio::test] +#[stubr::mock("hello/hello-delayed.json")] +#[should_panic] +async fn client_get_content_timeout_test() { + let config = RequesterConfig { + timeout: Duration::from_millis(500), + headers: HeaderMap::default(), + proxy: None, + }; + + let client = HTTPClient::with_config(config); + let url = Url::parse(&stubr.path("/hello-delayed")).unwrap(); + + client.get_content(url).await.unwrap(); +} + +#[tokio::test] +#[stubr::mock("hello/hello-with-headers.json")] +async fn client_get_content_extra_header_test() { + let mut config = RequesterConfig::default(); + + config.add_header( + HeaderName::from_static("x-api-key"), + HeaderValue::from_static("hello-api"), + ); + + let client = HTTPClient::with_config(config); + let url = Url::parse_with_params( + &stubr.path("/hello-with-headers"), + &[("search", "site:foo.com")], + ) + .unwrap(); + + let content = client.get_content(url).await.unwrap(); + + assert_eq!(content, "hello"); +} diff --git a/tests/requesters/common.rs b/tests/requesters/common.rs new file mode 100644 index 00000000..7aebc8c9 --- /dev/null +++ b/tests/requesters/common.rs @@ -0,0 +1,3 @@ +pub mod constants { + pub const TEST_URL: &str = "http://foo.com"; +} diff --git a/tests/requesters/main.rs b/tests/requesters/main.rs new file mode 100644 index 00000000..9cf4fd92 --- /dev/null +++ b/tests/requesters/main.rs @@ -0,0 +1 @@ +automod::dir!("tests/requesters"); diff --git a/tests/requesters_test.rs b/tests/requesters_test.rs deleted file mode 100644 index 8fde905b..00000000 --- a/tests/requesters_test.rs +++ /dev/null @@ -1,183 +0,0 @@ -mod common; - -use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; -use reqwest::header::{CONTENT_LENGTH, USER_AGENT}; -use reqwest::Url; -use std::time::Duration; -use subscan::{ - enums::RequesterType, - interfaces::requester::RequesterInterface, - types::config::{RequesterConfig, DEFAULT_HTTP_TIMEOUT}, -}; - -#[cfg(test)] -mod chrome { - use super::common::constants::TEST_URL; - use super::*; - use subscan::requesters::chrome::ChromeBrowser; - - #[tokio::test] - async fn chrome_configure_test() { - let mut browser = ChromeBrowser::new(); - let mut config = browser.config().await; - - let new_headers = HeaderMap::from_iter([ - (USER_AGENT, HeaderValue::from_static("foo")), - (CONTENT_LENGTH, HeaderValue::from_static("20")), - ]); - let new_config = RequesterConfig { - headers: new_headers.clone(), - timeout: Duration::from_secs(120), - proxy: Some(TEST_URL.to_string()), - }; - - assert_eq!(config.timeout, DEFAULT_HTTP_TIMEOUT); - assert_eq!(config.headers.len(), 0); - assert_eq!(config.proxy, None); - - browser.configure(new_config.clone()).await; - config = browser.config().await; - - assert_eq!(config.timeout, new_config.timeout); - assert_eq!(config.headers, new_config.headers); - assert_eq!(config.headers.len(), new_headers.len()); - assert_eq!(config.proxy, new_config.proxy); - - assert_eq!(browser.r#type().await, RequesterType::ChromeBrowser); - } - - #[tokio::test] - #[stubr::mock("hello/hello.json")] - async fn chrome_get_content_test() { - let browser = ChromeBrowser::new(); - let url = Url::parse(&stubr.path("/hello")).unwrap(); - - let content = browser.get_content(url).await.unwrap(); - - assert!(content.contains("hello")); - } - - #[tokio::test] - #[stubr::mock("hello/hello-delayed.json")] - #[should_panic] - async fn chrome_get_content_timeout_test() { - let config = RequesterConfig { - timeout: Duration::from_millis(500), - headers: HeaderMap::default(), - proxy: None, - }; - - let browser = ChromeBrowser::with_config(config); - let url = Url::parse(&stubr.path("/hello-delayed")).unwrap(); - - browser.get_content(url).await.unwrap(); - } - - #[tokio::test] - #[stubr::mock("hello/hello-with-headers.json")] - async fn chrome_get_content_extra_header_test() { - let mut config = RequesterConfig::default(); - - config.add_header( - HeaderName::from_static("x-api-key"), - HeaderValue::from_static("hello-api"), - ); - - let browser = ChromeBrowser::with_config(config); - let url = Url::parse_with_params( - &stubr.path("/hello-with-headers"), - &[("search", "site:foo.com")], - ) - .unwrap(); - - let content = browser.get_content(url).await.unwrap(); - - assert!(content.contains("hello")); - } -} - -#[cfg(test)] -mod client { - use super::common::constants::TEST_URL; - use super::*; - use subscan::requesters::client::HTTPClient; - - #[tokio::test] - async fn client_configure_test() { - let mut client = HTTPClient::default(); - let mut config = client.config().await; - - let new_headers = HeaderMap::from_iter([ - (USER_AGENT, HeaderValue::from_static("foo")), - (CONTENT_LENGTH, HeaderValue::from_static("20")), - ]); - let new_config = RequesterConfig { - headers: new_headers.clone(), - timeout: Duration::from_secs(120), - proxy: Some(TEST_URL.to_string()), - }; - - assert_eq!(config.timeout, DEFAULT_HTTP_TIMEOUT); - assert_eq!(config.headers.len(), 0); - assert_eq!(config.proxy, None); - - client.configure(new_config.clone()).await; - config = client.config().await; - - assert_eq!(config.timeout, new_config.timeout); - assert_eq!(config.headers, new_config.headers); - assert_eq!(config.headers.len(), new_headers.len()); - assert_eq!(config.proxy, new_config.proxy); - - assert_eq!(client.r#type().await, RequesterType::HTTPClient); - } - - #[tokio::test] - #[stubr::mock("hello/hello.json")] - async fn client_get_content_test() { - let client = HTTPClient::default(); - let url = Url::parse(&stubr.path("/hello")).unwrap(); - - let content = client.get_content(url).await.unwrap(); - - assert_eq!(content, "hello"); - } - - #[tokio::test] - #[stubr::mock("hello/hello-delayed.json")] - #[should_panic] - async fn client_get_content_timeout_test() { - let config = RequesterConfig { - timeout: Duration::from_millis(500), - headers: HeaderMap::default(), - proxy: None, - }; - - let client = HTTPClient::with_config(config); - let url = Url::parse(&stubr.path("/hello-delayed")).unwrap(); - - client.get_content(url).await.unwrap(); - } - - #[tokio::test] - #[stubr::mock("hello/hello-with-headers.json")] - async fn client_get_content_extra_header_test() { - let mut config = RequesterConfig::default(); - - config.add_header( - HeaderName::from_static("x-api-key"), - HeaderValue::from_static("hello-api"), - ); - - let client = HTTPClient::with_config(config); - let url = Url::parse_with_params( - &stubr.path("/hello-with-headers"), - &[("search", "site:foo.com")], - ) - .unwrap(); - - let content = client.get_content(url).await.unwrap(); - - assert_eq!(content, "hello"); - } -} diff --git a/tests/stubs/module/engines/yahoo.json b/tests/stubs/module/engines/yahoo.json index 77ca0ea2..ffa31da5 100644 --- a/tests/stubs/module/engines/yahoo.json +++ b/tests/stubs/module/engines/yahoo.json @@ -12,7 +12,7 @@ "urlPath": "/search" }, "response": { - "body": "
  1. baz.foo.com

", + "body": "
  1. bar.foo.com

", "headers": { "content-type": "text/html" }, From 41a099c66477047e38ef5e6424a9d69b5ff3cc36 Mon Sep 17 00:00:00 2001 From: Erdogan Yoksul Date: Wed, 18 Sep 2024 01:34:07 +0300 Subject: [PATCH 2/4] chore: minor refactor --- tests/extractors/regex_test.rs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/tests/extractors/regex_test.rs b/tests/extractors/regex_test.rs index c68b21c9..f7abf86f 100644 --- a/tests/extractors/regex_test.rs +++ b/tests/extractors/regex_test.rs @@ -4,17 +4,14 @@ use subscan::interfaces::extractor::SubdomainExtractorInterface; #[tokio::test] async fn extract_one_test() { + let target = String::from(TEST_DOMAIN); let extractor = RegexExtractor::default(); let matches = String::from(TEST_BAR_SUBDOMAIN); - let no_match = String::from("foobarbaz"); + let no_matches = String::from("foobarbaz"); - assert!(extractor - .extract_one(matches, TEST_DOMAIN.to_string()) - .is_some()); - assert!(extractor - .extract_one(no_match, TEST_DOMAIN.to_string()) - .is_none()); + assert!(extractor.extract_one(matches, target.clone()).is_some()); + assert!(extractor.extract_one(no_matches, target).is_none()); } #[tokio::test] From b8d313125ccb64698e7e87b68dbbd94dc4271bca Mon Sep 17 00:00:00 2001 From: Erdogan Yoksul Date: Wed, 18 Sep 2024 01:38:41 +0300 Subject: [PATCH 3/4] chore: minor refactor --- tests/cache_test.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/cache_test.rs b/tests/cache_test.rs index c5777844..e8fdf1ec 100644 --- a/tests/cache_test.rs +++ b/tests/cache_test.rs @@ -10,6 +10,8 @@ mod requesters { types::config::RequesterConfig, }; + const TEST_URL: &str = "http://foo.com"; + #[tokio::test] async fn get_by_type_test() { for rtype in RequesterType::iter() { @@ -27,7 +29,7 @@ mod requesters { (USER_AGENT, HeaderValue::from_static("x-api-key")), (CONTENT_LENGTH, HeaderValue::from_static("10000")), ]), - proxy: Some("http://foo.com".to_string()), + proxy: Some(TEST_URL.to_string()), }; for requester in cache::ALL_REQUESTERS.values() { From b60e5218131ecc88c3c23061e66ed5bbd560130d Mon Sep 17 00:00:00 2001 From: Erdogan Yoksul Date: Wed, 18 Sep 2024 01:43:45 +0300 Subject: [PATCH 4/4] chore: minor refactor --- tests/cache_test.rs | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/tests/cache_test.rs b/tests/cache_test.rs index e8fdf1ec..2d3fafe9 100644 --- a/tests/cache_test.rs +++ b/tests/cache_test.rs @@ -1,17 +1,24 @@ -use reqwest::header::{HeaderMap, HeaderValue, CONTENT_LENGTH, USER_AGENT}; +use reqwest::header::{HeaderMap, CONTENT_LENGTH, USER_AGENT}; use std::time::Duration; use strum::IntoEnumIterator; +mod constants { + use reqwest::header::HeaderValue; + + pub const TEST_URL: &str = "http://foo.com"; + pub const USER_AGENT_VALUE: HeaderValue = HeaderValue::from_static("x-api-key"); + pub const CONTENT_LENGTH_VALUE: HeaderValue = HeaderValue::from_static("10000"); +} + #[cfg(test)] mod requesters { + use super::constants::{CONTENT_LENGTH_VALUE, TEST_URL, USER_AGENT_VALUE}; use super::*; use subscan::{ cache, enums::RequesterType, interfaces::requester::RequesterInterface, types::config::RequesterConfig, }; - const TEST_URL: &str = "http://foo.com"; - #[tokio::test] async fn get_by_type_test() { for rtype in RequesterType::iter() { @@ -26,8 +33,8 @@ mod requesters { let new_config = RequesterConfig { timeout: Duration::from_secs(120), headers: HeaderMap::from_iter([ - (USER_AGENT, HeaderValue::from_static("x-api-key")), - (CONTENT_LENGTH, HeaderValue::from_static("10000")), + (USER_AGENT, USER_AGENT_VALUE), + (CONTENT_LENGTH, CONTENT_LENGTH_VALUE), ]), proxy: Some(TEST_URL.to_string()), };