From 4bec35d4a401223f02664f68eb383524f579c4c2 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Mon, 22 May 2023 12:58:05 +0200
Subject: [PATCH 01/40] dropping unimap
---
README.md | 2 +-
install.sh | 12 ------------
reconftw.sh | 26 --------------------------
3 files changed, 1 insertion(+), 39 deletions(-)
diff --git a/README.md b/README.md
index 1c69cf35..9c42dc16 100644
--- a/README.md
+++ b/README.md
@@ -513,7 +513,7 @@ reset='\033[0m'
## Webs
-- Web Prober ([httpx](https://github.com/projectdiscovery/httpx) and [unimap](https://github.com/Edu4rdSHL/unimap))
+- Web Prober ([httpx](https://github.com/projectdiscovery/httpx))
- Web screenshoting ([webscreenshot](https://github.com/maaaaz/webscreenshot) or [gowitness](https://github.com/sensepost/gowitness))
- Web templates scanner ([nuclei](https://github.com/projectdiscovery/nuclei) and [nuclei geeknik](https://github.com/geeknik/the-nuclei-templates.git))
- CMS Scanner ([CMSeeK](https://github.com/Tuhinshubhra/CMSeeK))
diff --git a/install.sh b/install.sh
index 61a9e6ce..91ef3f7d 100755
--- a/install.sh
+++ b/install.sh
@@ -515,41 +515,29 @@ done
if [ "True" = "$IS_ARM" ]; then
if [ "True" = "$RPI_3" ]; then
- eval wget -N -c https://github.com/Edu4rdSHL/unimap/releases/latest/download/unimap-armv7 $DEBUG_STD
eval wget -N -c https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
eval $SUDO tar -C /usr/local/bin/ -xzf ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
eval $SUDO rm -rf ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
- eval $SUDO mv unimap-armv7 /usr/local/bin/unimap
elif [ "True" = "$RPI_4" ] || [ "True" = "$IS_MAC" ]; then
- eval wget -N -c https://github.com/Edu4rdSHL/unimap/releases/latest/download/unimap-aarch64 $DEBUG_STD
eval wget -N -c https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-aarch64-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
eval $SUDO tar -C /usr/local/bin/ -xzf ppfuzz-v1.0.1-aarch64-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
eval $SUDO rm -rf ppfuzz-v1.0.1-aarch64-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
- eval $SUDO mv unimap-aarch64 /usr/local/bin/unimap
fi
elif [ "True" = "$IS_MAC" ]; then
if [ "True" = "$IS_ARM" ]; then
- eval wget -N -c https://github.com/Edu4rdSHL/unimap/releases/latest/download/unimap-armv7 $DEBUG_STD
eval wget -N -c https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
eval $SUDO tar -C /usr/local/bin/ -xzf ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
eval $SUDO rm -rf ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
- eval $SUDO mv unimap-armv7 /usr/local/bin/unimap
else
- eval wget -N -c https://github.com/Edu4rdSHL/unimap/releases/latest/download/unimap-osx $DEBUG_STD
eval wget -N -c https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-x86_64-apple-darwin.tar.gz $DEBUG_STD
eval $SUDO tar -C /usr/local/bin/ -xzf ppfuzz-v1.0.1-x86_64-apple-darwin.tar.gz $DEBUG_STD
eval $SUDO rm -rf ppfuzz-v1.0.1-x86_64-apple-darwin.tar.gz $DEBUG_STD
- eval $SUDO mv unimap-osx /usr/local/bin/unimap
fi
else
- eval wget -N -c https://github.com/Edu4rdSHL/unimap/releases/download/0.4.0/unimap-linux $DEBUG_STD
eval wget -N -c https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-x86_64-unknown-linux-musl.tar.gz $DEBUG_STD
eval $SUDO tar -C /usr/local/bin/ -xzf ppfuzz-v1.0.1-x86_64-unknown-linux-musl.tar.gz $DEBUG_STD
eval $SUDO rm -rf ppfuzz-v1.0.1-x86_64-unknown-linux-musl.tar.gz $DEBUG_STD
- eval $SUDO mv unimap-linux /usr/local/bin/unimap
fi
-eval $SUDO chmod 755 /usr/local/bin/unimap
-eval $SUDO strip -s /usr/local/bin/unimap $DEBUG_STD
eval $SUDO chmod 755 /usr/local/bin/ppfuzz
eval $SUDO strip -s /usr/local/bin/ppfuzz $DEBUG_STD
eval notify $DEBUG_STD
diff --git a/reconftw.sh b/reconftw.sh
index 821711c7..69360945 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -91,7 +91,6 @@ function tools_installed(){
which notify &>/dev/null || { printf "${bred} [*] notify [NO]${reset}\n${reset}"; allinstalled=false;}
which dalfox &>/dev/null || { printf "${bred} [*] dalfox [NO]${reset}\n${reset}"; allinstalled=false;}
which puredns &>/dev/null || { printf "${bred} [*] puredns [NO]${reset}\n${reset}"; allinstalled=false;}
- which unimap &>/dev/null || { printf "${bred} [*] unimap [NO]${reset}\n${reset}"; allinstalled=false;}
which emailfinder &>/dev/null || { printf "${bred} [*] emailfinder [NO]${reset}\n"; allinstalled=false;}
which analyticsrelationships &>/dev/null || { printf "${bred} [*] analyticsrelationships [NO]${reset}\n"; allinstalled=false;}
which mapcidr &>/dev/null || { printf "${bred} [*] mapcidr [NO]${reset}\n"; allinstalled=false;}
@@ -979,30 +978,6 @@ function webprobe_full(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBPROBEFULL" = true ]; then
start_func ${FUNCNAME[0]} "Http probing non standard ports"
if [ -s "subdomains/subdomains.txt" ]; then
- if [ "$NMAP_WEBPROBE" = true ]; then
- if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
- $SUDO nmap -iL subdomains/subdomains.txt -p $UNCOMMON_PORTS_WEB -oG .tmp/uncommon_nmap.gnmap 2>>"$LOGFILE" &>/dev/null
- cat .tmp/uncommon_nmap.gnmap | egrep -v "^#|Status: Up" | cut -d' ' -f2,4- | grep "open" | sed -e 's/\/.*$//g' | sed -e "s/ /:/g" | sort -u | anew -q .tmp/nmap_uncommonweb.txt
- else
- if [ ! "$AXIOM" = true ]; then
- $SUDO unimap --fast-scan -f subdomains/subdomains.txt --ports $UNCOMMON_PORTS_WEB -q -k --url-output 2>>"$LOGFILE" | anew -q .tmp/nmap_uncommonweb.txt
- else
- axiom-scan subdomains/subdomains.txt -m unimap --fast-scan --ports $UNCOMMON_PORTS_WEB -q -k --url-output -o .tmp/nmap_uncommonweb.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
- fi
- fi
- fi
- fi
- if [ "$NMAP_WEBPROBE" = true ]; then
- if [ ! "$AXIOM" = true ]; then
- if [ -s ".tmp/nmap_uncommonweb.txt" ]; then
- cat .tmp/nmap_uncommonweb.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" &>/dev/null
- fi
- else
- if [ -s ".tmp/nmap_uncommonweb.txt" ]; then
- axiom-scan .tmp/nmap_uncommonweb.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
- fi
- fi
- else
if [ ! "$AXIOM" = true ]; then
if [ -s "subdomains/subdomains.txt" ]; then
cat subdomains/subdomains.txt | httpx -follow-host-redirects -random-agent -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" &>/dev/null
@@ -1026,7 +1001,6 @@ function webprobe_full(){
notification "Uncommon web ports: ${NUMOFLINES} new websites" good
[ -s "webs/webs_uncommon_ports.txt" ] && cat webs/webs_uncommon_ports.txt
cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
- rm -rf "unimap_logs" 2>>"$LOGFILE"
end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" ${FUNCNAME[0]}
if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs_uncommon_ports.txt| wc -l) -le $DEEP_LIMIT2 ]]; then
notification "Sending websites with uncommon ports to proxy" info
From 1c07defd66966fb18a51351ad0b6525ac0284fed Mon Sep 17 00:00:00 2001
From: six2dez
Date: Fri, 26 May 2023 11:12:46 +0200
Subject: [PATCH 02/40] gitleaks + improved GH repos analysis
---
install.sh | 31 ++++---------------------------
reconftw.sh | 10 +++++++---
2 files changed, 11 insertions(+), 30 deletions(-)
diff --git a/install.sh b/install.sh
index 91ef3f7d..2a8b386e 100755
--- a/install.sh
+++ b/install.sh
@@ -116,6 +116,7 @@ repos["regulator"]="cramppet/regulator"
repos["byp4xx"]="lobuhi/byp4xx"
repos["Infoga"]="m4ll0k/Infoga"
repos["ghauri"]="r0oth3x49/ghauri"
+repos["gitleaks"]="gitleaks/gitleaks"
function banner_web(){
@@ -157,35 +158,8 @@ install_webserver(){
printf "${yellow} Installing Requirements...${reset}\n\n"
$SUDO pip3 install -r $SCRIPTPATH/web/requirements.txt &>/dev/null
- #$SUDO virtualenv web/env &>/dev/null
- #$SUDO source web/env/bin/activate
- #$SUDO pip3 install -r web/requirements.txt &>/dev/null
-
printf "${yellow} Installing tools...${reset}\n\n"
$SUDO apt install redis-server -y &>/dev/null
- #$SUDO apt install postgresql -y &>/dev/null
-
- # printf "${yellow} Database configuration...${reset}\n\n"
- #$SUDO service postgresql restart &>/dev/null
- #$SUDO su postgres -c 'psql -c "DROP DATABASE web;"' &>/dev/null
- #$SUDO su postgres -c 'psql -c "CREATE DATABASE web;"' &>/dev/null
-
- #read -p ' What Username is used in db: ' DBUser
- #read -s -p ' What Password is used in db: ' DBPass
-
- #$SUDO su postgres 'psql -c "DROP USER "'$DBUser &> /dev/null
-
- #sed -i "s/'USER': '.*/'USER': '$DBUser',/" web/web/settings.py
- #sed -i "s/'PASSWORD': '.*/'PASSWORD': '$DBPass',/" web/web/settings.py
- #echo ""
-
- #printf "${yellow} Creating DB User...${reset}\n\n"
- #$SUDO su postgres -c "psql -c \"CREATE USER $DBUser with PASSWORD '$DBPass';\""
-
- #$SUDO su postgres -c "psql -c \"ALTER ROLE $DBUser SET client_encoding TO 'utf8';\""
- #$SUDO su postgres -c "psql -c \"ALTER ROLE $DBUser SET default_transaction_isolation TO 'read committed';\""
- #$SUDO su postgres -c "psql -c \"ALTER ROLE $DBUser SET timezone TO 'UTC';\""
- #$SUDO su postgres -c "psql -c \"GRANT ALL PRIVILEGES ON DATABASE web TO $DBUser;\""
printf "${yellow} Creating WEB User...${reset}\n\n"
$SUDO rm $SCRIPTPATH/web/db.sqlite3 &>/dev/null
@@ -502,6 +476,9 @@ for repo in "${!repos[@]}"; do
if [ "massdns" = "$repo" ]; then
eval make $DEBUG_STD && strip -s bin/massdns && eval $SUDO cp bin/massdns /usr/local/bin/ $DEBUG_ERROR
fi
+ if [ "gitleaks" = "$repo" ]; then
+ eval make build $DEBUG_STD && eval $SUDO cp ./gitleaks /usr/local/bin/ $DEBUG_ERROR
+ fi
fi
if [ "gf" = "$repo" ]; then
eval cp -r examples ~/.gf $DEBUG_ERROR
diff --git a/reconftw.sh b/reconftw.sh
index 69360945..b9f4b9d1 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -114,6 +114,7 @@ function tools_installed(){
which hakip2host &>/dev/null || { printf "${bred} [*] hakip2host [NO]${reset}\n${reset}"; allinstalled=false;}
which gau &>/dev/null || { printf "${bred} [*] gau [NO]${reset}\n${reset}"; allinstalled=false;}
which subgpt &>/dev/null || { printf "${bred} [*] subgpt [NO]${reset}\n${reset}"; allinstalled=false;}
+ which gitleaks &>/dev/null || { printf "${bred} [*] gitleaks [NO]${reset}\n${reset}"; allinstalled=false;}
if [ "${allinstalled}" = true ]; then
printf "${bgreen} Good! All installed! ${reset}\n\n"
@@ -175,9 +176,12 @@ function github_repos(){
GH_TOKEN=$(cat ${GITHUB_TOKENS} | head -1)
echo $domain | unfurl format %r > .tmp/company_name.txt
enumerepo -token-string ${GH_TOKEN} -usernames .tmp/company_name.txt -o .tmp/company_repos.txt 2>>"$LOGFILE" &>/dev/null
- [ -s .tmp/company_repos.txt ] && cat .tmp/company_repos.txt | jq -r '.[].repos[]|.url' > .tmp/company_repos_url.txt 2>>"$LOGFILE" &>/dev/null
- interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "trufflehog git _target_ -j | jq -c > _output_/_cleantarget_" -o .tmp/github/ 2>>"$LOGFILE" &>/dev/null
- cat .tmp/github/* | jq -c | jq -r > osint/github_company_secrets.json 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/company_repos.txt" ] && cat .tmp/company_repos.txt | jq -r '.[].repos[]|.url' > .tmp/company_repos_url.txt 2>>"$LOGFILE" &>/dev/null
+ mkdir -p .tmp/github_repos 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "git clone _target_ .tmp/github_repos/_cleantarget_" 2>>"$LOGFILE" &>/dev/null
+ [ -d ".tmp/github/" ] && ls .tmp/github_repos > .tmp/github_repos_folders.txt
+ [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/github_repos_folders.txt -threads ${INTERLACE_THREADS} -c "gitleaks detect --source .tmp/github_repos/_target_ --no-banner --no-color -r ./tmp/github/gh_secret_cleantarget_.json" 2>>"$LOGFILE" &>/dev/null
+ [ -d ".tmp/github/" ] && cat .tmp/github/* | jq -c | jq -r > osint/github_company_secrets.json 2>>"$LOGFILE" &>/dev/null
else
printf "\n${bred} Required file ${GITHUB_TOKENS} not exists or empty${reset}\n"
fi
From 546bd923da44260a5633e56af7e23fbd17072064 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Fri, 26 May 2023 11:35:08 +0200
Subject: [PATCH 03/40] added mantra for js secrets
---
install.sh | 1 +
reconftw.sh | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/install.sh b/install.sh
index 2a8b386e..8dddec2f 100755
--- a/install.sh
+++ b/install.sh
@@ -83,6 +83,7 @@ gotools["subfinder"]="go install -v github.com/projectdiscovery/subfinder/v2/cmd
gotools["byp4xx"]="go install -v github.com/lobuhi/byp4xx@latest"
gotools["hakip2host"]="go install github.com/hakluke/hakip2host@latest"
gotools["gau"]="go install -v github.com/lc/gau/v2/cmd/gau@latest"
+gotools["Mantra"]="go install github.com/MrEmpy/Mantra@latest"
declare -A repos
repos["dorks_hunter"]="six2dez/dorks_hunter"
diff --git a/reconftw.sh b/reconftw.sh
index b9f4b9d1..03344003 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1478,7 +1478,7 @@ function jschecks(){
fi
printf "${yellow} Running : Gathering secrets 4/5${reset}\n"
if [ ! "$AXIOM" = true ]; then
- [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | nuclei -silent -t ~/nuclei-templates/ $NUCLEI_FLAGS_JS -nh -retries 3 -rl $NUCLEI_RATELIMIT -o js/js_secrets.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | Mantra -ua ${HEADER} | tail -n +11 | anew -q js/js_secrets.txt
else
[ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m nuclei $NUCLEI_FLAGS_JS -retries 3 -nh -rl $NUCLEI_RATELIMIT -o js/js_secrets.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
fi
From bb6fdde7f349f88456a5e3ef11a88496ccb320ec Mon Sep 17 00:00:00 2001
From: six2dez
Date: Fri, 26 May 2023 11:53:24 +0200
Subject: [PATCH 04/40] better axiom nmap scan
---
reconftw.sh | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/reconftw.sh b/reconftw.sh
index 03344003..6eaa802c 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1120,8 +1120,7 @@ function portscan(){
if [ ! "$AXIOM" = true ]; then
[ -s ".tmp/ips_nocdn.txt" ] && $SUDO nmap --top-ports 200 -sV -n --max-retries 2 -Pn --open -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" &>/dev/null
else
- [ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx --top-ports 200 -sV -n -Pn --open --max-retries 2 -o hosts/portscan_active.gnmap $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
- [ -s "hosts/portscan_active.gnmap" ] && cat hosts/portscan_active.gnmap | egrep -v "^#|Status: Up" | cut -d' ' -f2,4- | sed -n -e 's/Ignored.*//p' | awk '{print "Host: " $1 " Ports: " NF-1; $1=""; for(i=2; i<=NF; i++) { a=a" "$i; }; split(a,s,","); for(e in s) { split(s[e],v,"/"); printf "%-8s %s/%-7s %s\n" , v[2], v[3], v[1], v[5]}; a="" }' > hosts/portscan_active.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx --top-ports 200 -sV -n -Pn --open --max-retries 2 -oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
fi
fi
if [ "$BBRF_CONNECTION" = true ]; then
From 172c38d60f033a6ec792c300fb0587122070fb27 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Fri, 26 May 2023 12:03:11 +0200
Subject: [PATCH 05/40] bbrf bye bye
---
README.md | 14 -------
Terraform/files/reconftw.cfg | 6 ---
install.sh | 17 --------
lang/README-PT-BR.md | 76 ------------------------------------
reconftw.cfg | 6 ---
reconftw.sh | 45 ---------------------
requirements.txt | 1 -
7 files changed, 165 deletions(-)
delete mode 100644 lang/README-PT-BR.md
diff --git a/README.md b/README.md
index 9c42dc16..5c1cf0e3 100644
--- a/README.md
+++ b/README.md
@@ -59,7 +59,6 @@ So, what are you waiting for? Go! Go! Go! :boom:
- [Perform all steps (whole recon + all attacks) a.k.a. YOLO mode](#perform-all-steps-whole-recon--all-attacks-aka-yolo-mode)
- [Show help section](#show-help-section)
- [Axiom Support :cloud:](#axiom-support-cloud)
-- [BBRF Support :computer:](#bbrf-support-computer)
- [Sample video](#sample-video)
- [:fire: Features :fire:](#fire-features-fire)
- [Osint](#osint)
@@ -350,12 +349,6 @@ AXIOM_FLEET_SHUTDOWN=true # # Enable or disable delete the fleet after the execu
AXIOM_EXTRA_ARGS="" # Leave empty if you don't want to add extra arguments
#AXIOM_EXTRA_ARGS="--rm-logs" # Example
-# BBRF
-BBRF_CONNECTION=false
-BBRF_SERVER=https://demo.bbrf.me/bbrf
-BBRF_USERNAME="user"
-BBRF_PASSWORD="password"
-
# TERM COLORS
bred='\033[1;31m'
bblue='\033[1;34m'
@@ -463,13 +456,6 @@ reset='\033[0m'
- During the configuration of axiom you need to select `reconftw` as provisoner.
- You can create your own axiom's fleet before running reconFTW or let reconFTW to create and destroy it automatically just modifying reconftw.cfg file.
-# BBRF Support :computer:
-
-- To add reconFTW results to your [BBRF instance](https://github.com/honoki/bbrf-server) just add IP and credentials to reconftw.cfg file section dedicated to bbrf.
-
-- During the execution of the scans the results will be added dynamically when each step ends.
-- Even you can set up locally your BBRF instance to be able to visualize your results in a fancy web UI.
-
# Sample video
![Video](images/reconFTW.gif)
diff --git a/Terraform/files/reconftw.cfg b/Terraform/files/reconftw.cfg
index af1480c8..f01bcbe3 100644
--- a/Terraform/files/reconftw.cfg
+++ b/Terraform/files/reconftw.cfg
@@ -176,12 +176,6 @@ AXIOM_FLEET_SHUTDOWN=true
AXIOM_EXTRA_ARGS="" # Leave empty if you don't want to add extra arguments
#AXIOM_EXTRA_ARGS="--rm-logs" # Example
-# BBRF
-BBRF_CONNECTION=false
-BBRF_SERVER=https://demo.bbrf.me/bbrf
-BBRF_USERNAME="user"
-BBRF_PASSWORD="password"
-
# TERM COLORS
bred='\033[1;31m'
bblue='\033[1;34m'
diff --git a/install.sh b/install.sh
index 8dddec2f..d21693ba 100755
--- a/install.sh
+++ b/install.sh
@@ -571,23 +571,6 @@ if [ "$double_check" = "true" ]; then
done
fi
-# BBRF Setup
-if [ ! -d "$HOME/.bbrf/" ] ; then
- mkdir "$HOME/.bbrf/"
-fi
-if [ -d "$HOME/.bbrf/" ] && [ ! -s "$HOME/.bbrf/config.json" ]; then
- cat > "$HOME/.bbrf/config.json" << EOF
-{
- "username": "$BBRF_USERNAME",
- "password": "$BBRF_PASSWORD",
- "couchdb": "https://$BBRF_SERVER/bbrf",
- "slack_token": "",
- "discord_webhook": "",
- "ignore_ssl_errors": false
-}
-EOF
-fi
-
printf "${bblue} Running: Performing last configurations ${reset}\n\n"
## Last steps
if [ "$generate_resolvers" = true ]; then
diff --git a/lang/README-PT-BR.md b/lang/README-PT-BR.md
deleted file mode 100644
index 7413b537..00000000
--- a/lang/README-PT-BR.md
+++ /dev/null
@@ -1,76 +0,0 @@
-
-
-
-
- reconFTW
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-Sumário
-
-**ReconFTW** automatiza todo o processo de reconhecimento para você. Realiza o trabalho de enumeração de subdomínios, junto com diversos checks de várias vulnerabilidades e
-e o máximo de informação possível do seu alvo.
-
-O ReconFTW utiliza de várias técnicas (passive, bruteforce, permutations, certificate transparency, source code scraping, analytics, DNS records...) na enumeração
-dos subdomínios que ajudam você a selecionar os subdomínios mais interesantes para você saia na frente da competição
-
-O mesmo também realiza vários checks de vulnerabilidades como XSS, Open Redirects, SSRF, CRLF, LFI, SQLi, testes de SSL, DNS Zone Transfers e muito mais. Além disso
-é perfomado diversas técnicas de OSINT, fuzzing de diretórios, dorking, escaneamento de portan e scan do nuclei no seu alvo.
-
-Então, o que está esperando? Bora! Bora! Bora! :boom:
-
-📔 Tabela de Conteúdos
------------------
-- [💿 Instalação:](#-installation)
- - [a) No seu PC/VPS/VM](#a-in-your-pcvpsvm)
- - [b) Docker container 🐳 (2 options)](#b-docker-container--2-options)
- - [1) Pelo DockerHub](#1-from-dockerhub)
- - [2) Pelo repositório](#2-from-repository)
-- [⚙️ Arquivo de configuração:](#️-config-file)
-- [Uso:](#usage)
-- [Exemplos de uso:](#example-usage)
-- [Suporte ao Axiom: :cloud:](#axiom-support-cloud)
-- [Suporte ao BBRF: :computer:](#bbrf-support-computer)
-- [Vídeo de Demonstração:](#sample-video)
-- [:fire: Características :fire:](#fire-features-fire)
- - [Osint](#osint)
- - [Subdomains](#subdomains)
- - [Hosts](#hosts)
- - [Webs](#webs)
- - [Extras](#extras)
-- [Mindmap/Workflow](#mindmapworkflow)
- - [Data Keep](#data-keep)
- - [Comandos principais:](#main-commands)
- - [Como contribuir:](#how-to-contribute)
- - [Precisa de ajuda? :information_source:](#need-help-information_source)
- - [Dê suporte ao projeto me comprando um café:](#you-can-support-this-work-buying-me-a-coffee)
-- [Patrocinadores ❤️](#sponsors-️)
-- [Agradecimentos :pray:](#thanks-pray)
-- [Isenção de responsabilidade](#disclaimer)
\ No newline at end of file
diff --git a/reconftw.cfg b/reconftw.cfg
index d540ef1d..7efdda6a 100644
--- a/reconftw.cfg
+++ b/reconftw.cfg
@@ -212,12 +212,6 @@ AXIOM_FLEET_SHUTDOWN=true # # Enable or disable delete the fleet after the execu
AXIOM_EXTRA_ARGS="" # Leave empty if you don't want to add extra arguments
#AXIOM_EXTRA_ARGS=" --rm-logs" # Example
-# BBRF
-BBRF_CONNECTION=false
-BBRF_SERVER=https://demo.bbrf.me/bbrf
-BBRF_USERNAME="user"
-BBRF_PASSWORD="password"
-
# TERM COLORS
bred='\033[1;31m'
bblue='\033[1;34m'
diff --git a/reconftw.sh b/reconftw.sh
index 6eaa802c..6ac842a4 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -99,7 +99,6 @@ function tools_installed(){
which cdncheck &>/dev/null || { printf "${bred} [*] cdncheck [NO]${reset}\n"; allinstalled=false;}
which interactsh-client &>/dev/null || { printf "${bred} [*] interactsh-client [NO]${reset}\n"; allinstalled=false;}
which tlsx &>/dev/null || { printf "${bred} [*] tlsx [NO]${reset}\n"; allinstalled=false;}
- which bbrf &>/dev/null || { printf "${bred} [*] bbrf [NO]${reset}\n"; allinstalled=false;}
which smap &>/dev/null || { printf "${bred} [*] smap [NO]${reset}\n"; allinstalled=false;}
which gitdorks_go &>/dev/null || { printf "${bred} [*] gitdorks_go [NO]${reset}\n"; allinstalled=false;}
which ripgen &>/dev/null || { printf "${bred} [*] ripgen [NO]${reset}\n${reset}"; allinstalled=false;}
@@ -357,10 +356,6 @@ function subdomains_full(){
echo $domain | anew -q subdomains/subdomains.txt
fi
- if [ "$BBRF_CONNECTION" = true ]; then
- [ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | bbrf domain add - 2>>"$LOGFILE" &>/dev/null
- fi
-
webprobe_simple
if [ -s "subdomains/subdomains.txt" ]; then
[ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file subdomains/subdomains.txt
@@ -863,9 +858,6 @@ function subtakeover(){
if [ "$NUMOFLINES" -gt 0 ]; then
notification "${NUMOFLINES} new possible takeovers found" info
fi
- if [ "$BBRF_CONNECTION" = true ]; then
- [ -s "webs/takeover.txt" ] && cat webs/takeover.txt | grep -aEo 'https?://[^ ]+' | bbrf url add - -t subtko:true 2>>"$LOGFILE" &>/dev/null
- fi
end_func "Results are saved in $domain/webs/takeover.txt" ${FUNCNAME[0]}
else
if [ "$SUBTAKEOVER" = false ]; then
@@ -922,11 +914,6 @@ function s3buckets(){
notification "${NUMOFLINES2} new S3 buckets found" info
fi
- if [ "$BBRF_CONNECTION" = true ]; then
- [ -s "subdomains/cloud_assets.txt" ] && cat subdomains/cloud_assets.txt | grep -aEo 'https?://[^ ]+' | sed 's/[ \t]*$//' | bbrf url add - -t cloud_assets:true 2>>"$LOGFILE" &>/dev/null
- [ -s "subdomains/s3buckets.txt" ] && cat subdomains/s3buckets.txt | cut -d'|' -f1 | sed 's/[ \t]*$//' | bbrf domain update - -t s3bucket:true 2>>"$LOGFILE" &>/dev/null
- fi
-
end_func "Results are saved in subdomains/s3buckets.txt and subdomains/cloud_assets.txt" ${FUNCNAME[0]}
else
if [ "$S3BUCKETS" = false ]; then
@@ -966,9 +953,6 @@ function webprobe_simple(){
notification "Sending websites to proxy" info
ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null
fi
- if [ "$BBRF_CONNECTION" = true ]; then
- [ -s "webs/webs.txt" ] && cat webs/webs.txt | bbrf url add - 2>>"$LOGFILE" &>/dev/null
- fi
else
if [ "$WEBPROBESIMPLE" = false ]; then
printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
@@ -1010,9 +994,6 @@ function webprobe_full(){
notification "Sending websites with uncommon ports to proxy" info
ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null
fi
- if [ "$BBRF_CONNECTION" = true ]; then
- [ -s "webs/webs_uncommon_ports.txt" ] && cat webs/webs_uncommon_ports.txt | bbrf url add - 2>>"$LOGFILE" &>/dev/null
- fi
else
if [ "$WEBPROBEFULL" = false ]; then
printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
@@ -1123,12 +1104,6 @@ function portscan(){
[ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx --top-ports 200 -sV -n -Pn --open --max-retries 2 -oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
fi
fi
- if [ "$BBRF_CONNECTION" = true ]; then
- [ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | awk '{print $2,$1}' | sed -e 's/\s\+/:/g' | bbrf domain add -
- [ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | sed -e 's/\s\+/:/g' | bbrf ip add -
- [ -s "hosts/portscan_active.xml" ] && $tools/ultimate-nmap-parser/ultimate-nmap-parser.sh hosts/portscan_active.gnmap --csv 2>>"$LOGFILE" &>/dev/null
- [ -s "parsed_nmap.csv" ] && mv parsed_nmap.csv .tmp/parsed_nmap.csv && cat .tmp/parsed_nmap.csv | tail -n +2 | cut -d',' -f1,2,5,6 | sed -e 's/,/:/g' | sed 's/\:$//' | bbrf service add - && rm -f parsed_nmap.csv
- fi
[ -s "hosts/portscan_active.xml" ] && searchsploit --nmap hosts/portscan_active.xml 2>/dev/null > hosts/searchsploit.txt
end_func "Results are saved in hosts/portscan_[passive|active].txt" ${FUNCNAME[0]}
else
@@ -1173,9 +1148,6 @@ function waf_checks(){
cat .tmp/wafs.txt | sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' | tr -s "\t" ";" > webs/webs_wafs.txt
NUMOFLINES=$(cat webs/webs_wafs.txt 2>>"$LOGFILE" | sed '/^$/d' | wc -l)
notification "${NUMOFLINES} websites protected by waf" info
- if [ "$BBRF_CONNECTION" = true ]; then
- [ -s "webs/webs_wafs.txt" ] && cat webs/webs_wafs.txt | bbrf url add - -t waf:true 2>>"$LOGFILE" &>/dev/null
- fi
end_func "Results are saved in $domain/webs/webs_wafs.txt" ${FUNCNAME[0]}
else
end_func "No results found" ${FUNCNAME[0]}
@@ -1222,13 +1194,6 @@ function nuclei_check(){
printf "\n\n"
fi
fi
- if [ "$BBRF_CONNECTION" = true ]; then
- [ -s "nuclei_output/info.txt" ] && cat nuclei_output/info.txt | cut -d' ' -f6 | sort -u | bbrf url add - -t nuclei:${crit} 2>>"$LOGFILE" &>/dev/null
- [ -s "nuclei_output/low.txt" ] && cat nuclei_output/low.txt | cut -d' ' -f6 | sort -u | bbrf url add - -t nuclei:${crit} 2>>"$LOGFILE" &>/dev/null
- [ -s "nuclei_output/medium.txt" ] && cat nuclei_output/medium.txt | cut -d' ' -f6 | sort -u | bbrf url add - -t nuclei:${crit} 2>>"$LOGFILE" &>/dev/null
- [ -s "nuclei_output/high.txt" ] && cat nuclei_output/high.txt | cut -d' ' -f6 | sort -u | bbrf url add - -t nuclei:${crit} 2>>"$LOGFILE" &>/dev/null
- [ -s "nuclei_output/critical.txt" ] && cat nuclei_output/critical.txt | cut -d' ' -f6 | sort -u | bbrf url add - -t nuclei:${crit} 2>>"$LOGFILE" &>/dev/null
- fi
end_func "Results are saved in $domain/nuclei_output folder" ${FUNCNAME[0]}
else
if [ "$NUCLEICHECK" = false ]; then
@@ -1432,9 +1397,6 @@ function url_ext(){
if [[ ${NUMOFLINES} -gt 0 ]]; then
echo -e "\n############################\n + ${t} + \n############################\n" >> webs/urls_by_ext.txt
cat .tmp/url_extract_tmp.txt | grep -aEi "\.(${t})($|\/|\?)" >> webs/urls_by_ext.txt
- if [ "$BBRF_CONNECTION" = true ]; then
- cat .tmp/url_extract_tmp.txt | grep -aEi "\.(${t})($|\/|\?)" | bbrf url add - 2>>"$LOGFILE" &>/dev/null
- fi
fi
done
end_func "Results are saved in $domain/webs/urls_by_ext.txt" ${FUNCNAME[0]}
@@ -2321,13 +2283,6 @@ function start(){
touch .log/${NOW}_${NOWT}.txt
echo "Start ${NOW} ${NOWT}" > "${LOGFILE}"
- if [ "$BBRF_CONNECTION" = true ]; then
- program_bbrf=$(echo $domain | awk -F. '{print $1"_"$2}') 2>>"$LOGFILE" &>/dev/null
- bbrf new ${program_bbrf} 2>>"$LOGFILE" &>/dev/null
- bbrf use ${program_bbrf} 2>>"$LOGFILE" &>/dev/null
- bbrf inscope add "*.${domain}" 2>>"$LOGFILE" &>/dev/null
- fi
-
printf "\n"
printf "${bred} Target: ${domain}\n\n"
}
diff --git a/requirements.txt b/requirements.txt
index fb3e8c3d..c1210c22 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,7 +1,6 @@
aiohttp # sqlmap
argcomplete # brutespray
argparse # multiple
-bbrf # multiple
beautifulsoup4 # multiple
bs4 # multiple
censys # multiple
From 1840508e4bb81c0a1c4ab899decccfdb71e23df4 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Fri, 26 May 2023 12:30:02 +0200
Subject: [PATCH 06/40] Added Mantra silent and axiom (not pushed yet on axiom)
---
reconftw.sh | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/reconftw.sh b/reconftw.sh
index 6ac842a4..0e03d837 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1439,9 +1439,9 @@ function jschecks(){
fi
printf "${yellow} Running : Gathering secrets 4/5${reset}\n"
if [ ! "$AXIOM" = true ]; then
- [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | Mantra -ua ${HEADER} | tail -n +11 | anew -q js/js_secrets.txt
+ [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | Mantra -ua ${HEADER} -s | anew -q js/js_secrets.txt
else
- [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m nuclei $NUCLEI_FLAGS_JS -retries 3 -nh -rl $NUCLEI_RATELIMIT -o js/js_secrets.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m Mantra -ua ${HEADER} -o js/js_secrets.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
fi
printf "${yellow} Running : Building wordlist 5/5${reset}\n"
[ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "python3 $tools/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" &>/dev/null
From a1c1efb84270866bfeeec408440c2f2b33d4271f Mon Sep 17 00:00:00 2001
From: six2dez
Date: Fri, 26 May 2023 13:15:05 +0200
Subject: [PATCH 07/40] Removed trufflehog, better fuzzing and readme
---
README.md | 5 +-
Terraform/files/reconftw.cfg | 213 ++++++++++++++++++++---------------
install.sh | 3 -
reconftw.cfg | 3 +-
reconftw.sh | 17 ++-
5 files changed, 136 insertions(+), 105 deletions(-)
diff --git a/README.md b/README.md
index 5c1cf0e3..9fd058e7 100644
--- a/README.md
+++ b/README.md
@@ -239,7 +239,6 @@ WAF_DETECTION=true # Detect WAFs
NUCLEICHECK=true # Enable or disable nuclei
NUCLEI_SEVERITY="info,low,medium,high,critical" # Set templates criticity
NUCLEI_FLAGS="-silent -t ~/nuclei-templates/ -retries 2" # Additional nuclei extra flags, don't set the severity here but the exclusions like "-etags openssh"
-NUCLEI_FLAGS_JS="-silent -tags exposure,token -severity info,low,medium,high,critical" # Additional nuclei extra flags for js secrets
URL_CHECK=true # Enable or disable URL collection
URL_CHECK_PASSIVE=true # Search for urls, passive methods from Archive, OTX, CommonCrawl, etc
URL_CHECK_ACTIVE=true # Search for urls by crawling the websites
@@ -469,7 +468,7 @@ reset='\033[0m'
- Metadata finder ([MetaFinder](https://github.com/Josue87/MetaFinder))
- Google Dorks ([dorks_hunter](https://github.com/six2dez/dorks_hunter))
- Github Dorks ([gitdorks_go](https://github.com/damit5/gitdorks_go))
-- GitHub org analysis ([enumerepo](https://github.com/trickest/enumerepo) and [trufflehog](https://github.com/trufflesecurity/trufflehog))
+- GitHub org analysis ([enumerepo](https://github.com/trickest/enumerepo) and [gitleaks](https://github.com/gitleaks/gitleaks))
## Subdomains
@@ -506,7 +505,7 @@ reset='\033[0m'
- Url extraction ([gau](https://github.com/lc/gau),[waymore](https://github.com/xnl-h4ck3r/waymore), [katana](https://github.com/projectdiscovery/katana), [github-endpoints](https://gist.github.com/six2dez/d1d516b606557526e9a78d7dd49cacd3) and [JSA](https://github.com/w9w/JSA))
- URL patterns Search and filtering ([urless](https://github.com/xnl-h4ck3r/urless), [gf](https://github.com/tomnomnom/gf) and [gf-patterns](https://github.com/1ndianl33t/Gf-Patterns))
- Favicon Real IP ([fav-up](https://github.com/pielco11/fav-up))
-- Javascript analysis ([subjs](https://github.com/lc/subjs), [JSA](https://github.com/w9w/JSA), [xnLinkFinder](https://github.com/xnl-h4ck3r/xnLinkFinder), [getjswords](https://github.com/m4ll0k/BBTz))
+- Javascript analysis ([subjs](https://github.com/lc/subjs), [JSA](https://github.com/w9w/JSA), [xnLinkFinder](https://github.com/xnl-h4ck3r/xnLinkFinder), [getjswords](https://github.com/m4ll0k/BBTz), [Mantra](https://github.com/MrEmpy/Mantra))
- Fuzzing ([ffuf](https://github.com/ffuf/ffuf))
- URL sorting by extension
- Wordlist generation
diff --git a/Terraform/files/reconftw.cfg b/Terraform/files/reconftw.cfg
index f01bcbe3..9e3cf4b0 100644
--- a/Terraform/files/reconftw.cfg
+++ b/Terraform/files/reconftw.cfg
@@ -3,12 +3,18 @@
#################################################################
# General values
-tools=~/Tools
-SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
-profile_shell=".$(basename $(echo $SHELL))rc"
-reconftw_version=$(git rev-parse --abbrev-ref HEAD)-$(git describe --tags)
-generate_resolvers=false
-proxy_url="http://127.0.0.1:8080/"
+tools=~/Tools # Path installed tools
+SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" # Get current script's path
+profile_shell=".$(basename $(echo $SHELL))rc" # Get current shell profile
+reconftw_version=$(git rev-parse --abbrev-ref HEAD)-$(git describe --tags) # Fetch current reconftw version
+generate_resolvers=false # Generate custom resolvers with dnsvalidator
+update_resolvers=true # Fetch and rewrite resolvers from trickest/resolvers before DNS resolution
+resolvers_url="https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt"
+resolvers_trusted_url="https://raw.githubusercontent.com/six2dez/resolvers_reconftw/main/resolvers_trusted.txt"
+fuzzing_remote_list="https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt" # Used to send to axiom(if used) on fuzzing
+proxy_url="http://127.0.0.1:8080/" # Proxy url
+install_golang=true # Set it to false if you already have Golang configured and ready
+upgrade_tools=true
#dir_output=/custom/output/path
# Golang Vars (Comment or change on your own)
@@ -20,6 +26,8 @@ export PATH=$GOPATH/bin:$GOROOT/bin:$HOME/.local/bin:$PATH
#NOTIFY_CONFIG=~/.config/notify/provider-config.yaml # No need to define
AMASS_CONFIG=~/.config/amass/config.ini
GITHUB_TOKENS=${tools}/.github_tokens
+GITLAB_TOKENS=${tools}/.gitlab_tokens
+SUBGPT_COOKIE=${tools}/subgpt_cookies.json
#CUSTOM_CONFIG=custom_config_path.txt # In case you use a custom config file, uncomment this line and set your files path
# APIs/TOKENS - Uncomment the lines you want removing the '#' at the beginning of the line
@@ -31,100 +39,120 @@ GITHUB_TOKENS=${tools}/.github_tokens
#slack_auth="xoXX-XXX-XXX-XXX"
# File descriptors
-DEBUG_STD="&>/dev/null"
-DEBUG_ERROR="2>/dev/null"
+DEBUG_STD="&>/dev/null" # Skips STD output on installer
+DEBUG_ERROR="2>/dev/null" # Skips ERR output on installer
# Osint
-OSINT=true
+OSINT=true # Enable or disable the whole OSINT module
GOOGLE_DORKS=true
GITHUB_DORKS=true
-METADATA=true
-EMAILS=true
-DOMAIN_INFO=true
-IP_INFO=true
+GITHUB_REPOS=true
+METADATA=true # Fetch metadata from indexed office documents
+EMAILS=true # Fetch emails from differents sites
+DOMAIN_INFO=true # whois info
+REVERSE_WHOIS=true # amass intel reverse whois info, takes some time
+IP_INFO=true # Reverse IP search, geolocation and whois
METAFINDER_LIMIT=20 # Max 250
# Subdomains
-SUBDOMAINS_GENERAL=true
-SUBPASSIVE=true
-SUBCRT=true
-SUBANALYTICS=true
-SUBBRUTE=true
-SUBSCRAPING=true
-SUBPERMUTE=true
-SUBTAKEOVER=true
-SUBRECURSIVE=true
+RUNAMASS=true
+RUNSUBFINDER=true
+SUBDOMAINS_GENERAL=true # Enable or disable the whole Subdomains module
+SUBPASSIVE=true # Passive subdomains search
+SUBCRT=true # crtsh search
+SUBNOERROR=true # Check DNS NOERROR response and BF on them
+SUBANALYTICS=true # Google Analytics search
+SUBBRUTE=true # DNS bruteforcing
+SUBSCRAPING=true # Subdomains extraction from web crawling
+SUBPERMUTE=true # DNS permutations
+SUBREGEXPERMUTE=true # Permutations by regex analysis
+SUBGPT=true # Permutations by BingGPT prediction
+PERMUTATIONS_OPTION=gotator # The alternative is "ripgen" (faster, not deeper)
+GOTATOR_FLAGS=" -depth 1 -numbers 3 -mindup -adv -md" # Flags for gotator
+SUBTAKEOVER=false # Check subdomain takeovers, false by default cuz nuclei already check this
SUB_RECURSIVE_PASSIVE=false # Uses a lot of API keys queries
+DEEP_RECURSIVE_PASSIVE=10 # Number of top subdomains for recursion
SUB_RECURSIVE_BRUTE=false # Needs big disk space and time to resolve
-ZONETRANSFER=true
-S3BUCKETS=true
-REVERSE_IP=false
-TLS_PORTS="21,22,25,80,110,135,143,261,271,324,443,448,465,563,614,631,636,664,684,695,832,853,854,990,993,989,990,992,993,994,995,1129,1131,1184,2083,2087,2089,2096,2221,2252,2376,2381,2478,2479,2482,2484,2679,2762,3077,3078,3183,3191,3220,3269,3306,3410,3424,3471,3496,3509,3529,3539,3535,3660,36611,3713,3747,3766,3864,3885,3995,3896,4031,4036,4062,4064,4081,4083,4116,4335,4336,4536,4590,4740,4843,4843,4849,5443,5007,5061,5321,5349,5671,5783,5868,5986,5989,5990,6209,6251,6443,6513,6514,6619,6697,6771,6697,7202,7443,7673,7674,7677,7775,8243,8443,8991,8989,9089,9295,9318,9443,9444,9614,9802,10161,10162,11751,12013,12109,14143,15002,16995,41230,16993,20003"
+ZONETRANSFER=true # Check zone transfer
+S3BUCKETS=true # Check S3 buckets misconfigs
+REVERSE_IP=false # Check reverse IP subdomain search (set True if your target is CIDR/IP)
+TLS_PORTS="21,22,25,80,110,135,143,261,271,324,443,448,465,563,614,631,636,664,684,695,832,853,854,990,993,989,992,994,995,1129,1131,1184,2083,2087,2089,2096,2221,2252,2376,2381,2478,2479,2482,2484,2679,2762,3077,3078,3183,3191,3220,3269,3306,3410,3424,3471,3496,3509,3529,3539,3535,3660,36611,3713,3747,3766,3864,3885,3995,3896,4031,4036,4062,4064,4081,4083,4116,4335,4336,4536,4590,4740,4843,4849,5443,5007,5061,5321,5349,5671,5783,5868,5986,5989,5990,6209,6251,6443,6513,6514,6619,6697,6771,7202,7443,7673,7674,7677,7775,8243,8443,8991,8989,9089,9295,9318,9443,9444,9614,9802,10161,10162,11751,12013,12109,14143,15002,16995,41230,16993,20003"
+INSCOPE=false # Uses inscope tool to filter the scope, requires .scope file in reconftw folder
# Web detection
-WEBPROBESIMPLE=true
-WEBPROBEFULL=true
-WEBSCREENSHOT=true
-VIRTUALHOSTS=true
-UNCOMMON_PORTS_WEB="81,300,591,593,832,981,1010,1311,1099,2082,2095,2096,2480,3000,3128,3333,4243,4567,4711,4712,4993,5000,5104,5108,5280,5281,5601,5800,6543,7000,7001,7396,7474,8000,8001,8008,8014,8042,8060,8069,8080,8081,8083,8088,8090,8091,8095,8118,8123,8172,8181,8222,8243,8280,8281,8333,8337,8443,8500,8834,8880,8888,8983,9000,9001,9043,9060,9080,9090,9091,9092,9200,9443,9502,9800,9981,10000,10250,11371,12443,15672,16080,17778,18091,18092,20720,32000,55440,55672"
-# You can change to aquatone if gowitness fails, comment the one you don't want
-AXIOM_SCREENSHOT_MODULE=webscreenshot # Choose between aquatone,gowitness,webscreenshot
+WEBPROBESIMPLE=true # Web probing on 80/443
+WEBPROBEFULL=true # Web probing in a large port list
+WEBSCREENSHOT=true # Webs screenshooting
+VIRTUALHOSTS=false # Check virtualhosts by fuzzing HOST header
+NMAP_WEBPROBE=true # If disabled it will run httpx directly over subdomains list, nmap before web probing is used to increase the speed and avoid repeated requests
+UNCOMMON_PORTS_WEB="81,300,591,593,832,981,1010,1311,1099,2082,2095,2096,2480,3000,3001,3002,3003,3128,3333,4243,4567,4711,4712,4993,5000,5104,5108,5280,5281,5601,5800,6543,7000,7001,7396,7474,8000,8001,8008,8014,8042,8060,8069,8080,8081,8083,8088,8090,8091,8095,8118,8123,8172,8181,8222,8243,8280,8281,8333,8337,8443,8500,8834,8880,8888,8983,9000,9001,9043,9060,9080,9090,9091,9092,9200,9443,9502,9800,9981,10000,10250,11371,12443,15672,16080,17778,18091,18092,20720,32000,55440,55672"
# Host
-FAVICON=true
-PORTSCANNER=true
-PORTSCAN_PASSIVE=true
-PORTSCAN_ACTIVE=true
-CDN_IP=true
+FAVICON=true # Check Favicon domain discovery
+PORTSCANNER=true # Enable or disable the whole Port scanner module
+PORTSCAN_PASSIVE=true # Port scanner with Shodan
+PORTSCAN_ACTIVE=true # Port scanner with nmap
+CDN_IP=true # Check which IPs belongs to CDN
# Web analysis
-WAF_DETECTION=true
-NUCLEICHECK=true
-NUCLEI_SEVERITY="info,low,medium,high,critical"
-URL_CHECK=true
-URL_GF=true
-URL_EXT=true
-JSCHECKS=true
-FUZZ=true
-CMS_SCANNER=true
-WORDLIST=true
-ROBOTSWORDLIST=true
-PASSWORD_DICT=true
-PASSWORD_MIN_LENGTH=5
-PASSWORD_MAX_LENGTH=14
+WAF_DETECTION=true # Detect WAFs
+NUCLEICHECK=true # Enable or disable nuclei
+NUCLEI_SEVERITY="info,low,medium,high,critical" # Set templates criticity
+NUCLEI_FLAGS=" -silent -t $HOME/nuclei-templates/ -retries 2" # Additional nuclei extra flags, don't set the severity here but the exclusions like " -etags openssh"
+NUCLEI_FLAGS_JS=" -silent -tags exposure,token -severity info,low,medium,high,critical" # Additional nuclei extra flags for js secrets
+URL_CHECK=true # Enable or disable URL collection
+URL_CHECK_PASSIVE=true # Search for urls, passive methods from Archive, OTX, CommonCrawl, etc
+URL_CHECK_ACTIVE=true # Search for urls by crawling the websites
+URL_GF=true # Url patterns classification
+URL_EXT=true # Returns a list of files divided by extension
+JSCHECKS=true # JS analysis
+FUZZ=true # Web fuzzing
+CMS_SCANNER=true # CMS scanner
+WORDLIST=true # Wordlist generation
+ROBOTSWORDLIST=true # Check historic disallow entries on waybackMachine
+PASSWORD_DICT=true # Generate password dictionary
+PASSWORD_MIN_LENGTH=5 # Min password lenght
+PASSWORD_MAX_LENGTH=14 # Max password lenght
# Vulns
-VULNS_GENERAL=false
-XSS=true
-CORS=true
-TEST_SSL=true
-OPEN_REDIRECT=true
-SSRF_CHECKS=true
-CRLF_CHECKS=true
-LFI=true
-SSTI=true
-SQLI=true
-BROKENLINKS=true
-SPRAY=true
-COMM_INJ=true
-PROTO_POLLUTION=true
+VULNS_GENERAL=false # Enable or disable the vulnerability module (very intrusive and slow)
+XSS=true # Check for xss with dalfox
+CORS=true # CORS misconfigs
+TEST_SSL=true # SSL misconfigs
+OPEN_REDIRECT=true # Check open redirects
+SSRF_CHECKS=true # SSRF checks
+CRLF_CHECKS=true # CRLF checks
+LFI=true # LFI by fuzzing
+SSTI=true # SSTI by fuzzing
+SQLI=true # Check SQLI
+SQLMAP=true # Check SQLI with sqlmap
+GHAURI=false # Check SQLI with ghauri
+BROKENLINKS=true # Check for brokenlinks
+SPRAY=true # Performs password spraying
+COMM_INJ=true # Check for command injections with commix
+PROTO_POLLUTION=true # Check for prototype pollution flaws
+SMUGGLING=true # Check for HTTP request smuggling flaws
+WEBCACHE=true # Check for Web Cache issues
+BYPASSER4XX=true # Check for 4XX bypasses
# Extra features
NOTIFICATION=false # Notification for every function
SOFT_NOTIFICATION=false # Only for start/end
-DEEP=false
-DEEP_LIMIT=500
-DEEP_LIMIT2=1500
-DIFF=false
-REMOVETMP=false
-REMOVELOG=false
-PROXY=false
-SENDZIPNOTIFY=false
+DEEP=false # DEEP mode, really slow and don't care about the number of results
+DEEP_LIMIT=500 # First limit to not run unless you run DEEP
+DEEP_LIMIT2=1500 # Second limit to not run unless you run DEEP
+DIFF=false # Diff function, run every module over an already scanned target, printing only new findings (but save everything)
+REMOVETMP=false # Delete temporary files after execution (to free up space)
+REMOVELOG=false # Delete logs after execution
+PROXY=false # Send to proxy the websites found
+SENDZIPNOTIFY=false # Send to zip the results (over notify)
PRESERVE=true # set to true to avoid deleting the .called_fn files on really large scans
-FFUF_FLAGS="-mc all -fc 404 -ac -sf -s"
+FFUF_FLAGS=" -mc all -fc 404 -ac -sf" # Ffuf flags
+HTTPX_FLAGS=" -follow-redirects -random-agent -status-code -silent -title -web-server -tech-detect -location -content-length" # Httpx flags for simple web probing
+GOWITNESS_FLAGS=" --disable-logging --timeout 5"
# HTTP options
-HEADER="User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0"
+HEADER="User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" # Default header
# Threads
FFUF_THREADS=40
@@ -133,26 +161,35 @@ HTTPX_UNCOMMONPORTS_THREADS=100
KATANA_THREADS=20
BRUTESPRAY_THREADS=20
BRUTESPRAY_CONCURRENCE=10
-GAUPLUS_THREADS=10
+GAU_THREADS=10
DNSTAKE_THREADS=100
DALFOX_THREADS=200
-PUREDNS_PUBLIC_LIMIT=0 # Set between 2000 - 10000 if your router blows up, 0 is unlimited
+PUREDNS_PUBLIC_LIMIT=0 # Set between 2000 - 10000 if your router blows up, 0 means unlimited
PUREDNS_TRUSTED_LIMIT=400
PUREDNS_WILDCARDTEST_LIMIT=30
PUREDNS_WILDCARDBATCH_LIMIT=1500000
-WEBSCREENSHOT_THREADS=200
-GOWITNESS_THREADS=8
+GOWITNESS_THREADS=20
RESOLVE_DOMAINS_THREADS=150
PPFUZZ_THREADS=30
DNSVALIDATOR_THREADS=200
INTERLACE_THREADS=10
TLSX_THREADS=1000
+XNLINKFINDER_DEPTH=3
+BYP4XX_THREADS=20
+
+# Rate limits
+HTTPX_RATELIMIT=150
+NUCLEI_RATELIMIT=150
+FFUF_RATELIMIT=0
# Timeouts
-CMSSCAN_TIMEOUT=3600
+AMASS_INTEL_TIMEOUT=15 # Minutes
+AMASS_ENUM_TIMEOUT=180 # Minutes
+CMSSCAN_TIMEOUT=3600 # Seconds
FFUF_MAXTIME=900 # Seconds
HTTPX_TIMEOUT=10 # Seconds
HTTPX_UNCOMMONPORTS_TIMEOUT=10 # Seconds
+PERMUTATIONS_LIMIT=21474836480 # Bytes, default is 20 GB
# lists
fuzz_wordlist=${tools}/fuzz_wordlist.txt
@@ -166,15 +203,15 @@ resolvers_trusted=${tools}/resolvers_trusted.txt
# Axiom Fleet
# Will not start a new fleet if one exist w/ same name and size (or larger)
# AXIOM=false Uncomment only to overwrite command line flags
-AXIOM_FLEET_LAUNCH=false
-AXIOM_FLEET_NAME="reconFTW"
-AXIOM_FLEET_COUNT=5
-AXIOM_FLEET_REGIONS="eu-central"
-AXIOM_FLEET_SHUTDOWN=true
+AXIOM_FLEET_LAUNCH=true # Enable or disable spin up a new fleet, if false it will use the current fleet with the AXIOM_FLEET_NAME prefix
+AXIOM_FLEET_NAME="reconFTW" # Fleet's prefix name
+AXIOM_FLEET_COUNT=10 # Fleet's number
+AXIOM_FLEET_REGIONS="eu-central" # Fleet's region
+AXIOM_FLEET_SHUTDOWN=true # # Enable or disable delete the fleet after the execution
# This is a script on your reconftw host that might prep things your way...
-#AXIOM_POST_START="~/Tools/axiom_config.sh"
+#AXIOM_POST_START="~/Tools/axiom_config.sh" # Useful to send your config files to the fleet
AXIOM_EXTRA_ARGS="" # Leave empty if you don't want to add extra arguments
-#AXIOM_EXTRA_ARGS="--rm-logs" # Example
+#AXIOM_EXTRA_ARGS=" --rm-logs" # Example
# TERM COLORS
bred='\033[1;31m'
diff --git a/install.sh b/install.sh
index d21693ba..0bbd7287 100755
--- a/install.sh
+++ b/install.sh
@@ -110,7 +110,6 @@ repos["ultimate-nmap-parser"]="shifty0g/ultimate-nmap-parser"
repos["pydictor"]="LandGrey/pydictor"
repos["gitdorks_go"]="damit5/gitdorks_go"
repos["urless"]="xnl-h4ck3r/urless"
-repos["trufflehog"]="trufflesecurity/trufflehog"
repos["smuggler"]="defparam/smuggler"
repos["Web-Cache-Vulnerability-Scanner"]="Hackmanit/Web-Cache-Vulnerability-Scanner"
repos["regulator"]="cramppet/regulator"
@@ -485,8 +484,6 @@ for repo in "${!repos[@]}"; do
eval cp -r examples ~/.gf $DEBUG_ERROR
elif [ "Gf-Patterns" = "$repo" ]; then
eval mv ./*.json ~/.gf $DEBUG_ERROR
- elif [ "trufflehog" = "$repo" ]; then
- eval go install $DEBUG_STD
fi
cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
done
diff --git a/reconftw.cfg b/reconftw.cfg
index 7efdda6a..dc2d5f27 100644
--- a/reconftw.cfg
+++ b/reconftw.cfg
@@ -11,6 +11,7 @@ generate_resolvers=false # Generate custom resolvers with dnsvalidator
update_resolvers=true # Fetch and rewrite resolvers from trickest/resolvers before DNS resolution
resolvers_url="https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt"
resolvers_trusted_url="https://raw.githubusercontent.com/six2dez/resolvers_reconftw/main/resolvers_trusted.txt"
+fuzzing_remote_list="https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt" # Used to send to axiom(if used) on fuzzing
proxy_url="http://127.0.0.1:8080/" # Proxy url
install_golang=true # Set it to false if you already have Golang configured and ready
upgrade_tools=true
@@ -146,7 +147,7 @@ REMOVELOG=false # Delete logs after execution
PROXY=false # Send to proxy the websites found
SENDZIPNOTIFY=false # Send to zip the results (over notify)
PRESERVE=true # set to true to avoid deleting the .called_fn files on really large scans
-FFUF_FLAGS=" -mc all -fc 404 -ac -sf" # Ffuf flags
+FFUF_FLAGS=" -mc all -fc 404 -ach -sf -of json" # Ffuf flags
HTTPX_FLAGS=" -follow-redirects -random-agent -status-code -silent -title -web-server -tech-detect -location -content-length" # Httpx flags for simple web probing
GOWITNESS_FLAGS=" --disable-logging --timeout 5"
diff --git a/reconftw.sh b/reconftw.sh
index 0e03d837..f5ba94e4 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -105,7 +105,6 @@ function tools_installed(){
which dsieve &>/dev/null || { printf "${bred} [*] dsieve [NO]${reset}\n${reset}"; allinstalled=false;}
which inscope &>/dev/null || { printf "${bred} [*] inscope [NO]${reset}\n${reset}"; allinstalled=false;}
which enumerepo &>/dev/null || { printf "${bred} [*] enumerepo [NO]${reset}\n${reset}"; allinstalled=false;}
- which trufflehog &>/dev/null || { printf "${bred} [*] trufflehog [NO]${reset}\n${reset}"; allinstalled=false;}
which Web-Cache-Vulnerability-Scanner &>/dev/null || { printf "${bred} [*] Web-Cache-Vulnerability-Scanner [NO]${reset}\n"; allinstalled=false;}
which subfinder &>/dev/null || { printf "${bred} [*] subfinder [NO]${reset}\n${reset}"; allinstalled=false;}
which byp4xx &>/dev/null || { printf "${bred} [*] byp4xx [NO]${reset}\n${reset}"; allinstalled=false;}
@@ -1211,24 +1210,22 @@ function fuzz(){
if [ -s ".tmp/webs_all.txt" ]; then
mkdir -p $dir/fuzzing $dir/.tmp/fuzzing
if [ ! "$AXIOM" = true ]; then
- interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -of json -o _output_/_cleantarget_.json" -o $dir/.tmp/fuzzing 2>>"$LOGFILE" &>/dev/null
+ interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -o _output_/_cleantarget_.json" -o $dir/.tmp/fuzzing 2>>"$LOGFILE" &>/dev/null
for sub in $(cat .tmp/webs_all.txt); do
sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
[ -s "$dir/.tmp/fuzzing/${sub_out}.json" ] && cat $dir/.tmp/fuzzing/${sub_out}.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort | anew -q $dir/fuzzing/${sub_out}.txt
done
- find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | anew -q $dir/fuzzing/fuzzing_full.txt
+ find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | sort -k3 | anew -q $dir/fuzzing/fuzzing_full.txt
else
- axiom-exec 'wget -q -O - https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt > /home/op/lists/fuzz_wordlist.txt' &>/dev/null
- axiom-scan .tmp/webs_all.txt -m ffuf -w /home/op/lists/fuzz_wordlist.txt -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/fuzzing/ffuf-content.csv $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
- grep -v "FUZZ,url,redirectlocation" $dir/fuzzing/ffuf-content.csv 2>>"$LOGFILE" | awk -F "," '{print $2" "$5" "$6}' | sort > $dir/fuzzing/ffuf-content.tmp
+ axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/fuzz_wordlist.txt" &>/dev/null
+ axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/seclists/Discovery/Web-Content/big.txt" &>/dev/null
+ axiom-scan .tmp/webs_all.txt -m ffuf -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
for sub in $(cat .tmp/webs_all.txt); do
sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
- grep "$sub" $dir/fuzzing/ffuf-content.tmp | awk '{print $2" "$3" "$1}' | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt
+ grep "$sub" $dir/.tmp/ffuf-content.json | awk '{print $2" "$3" "$1}' | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt
done
- find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | anew -q $dir/fuzzing/fuzzing_full.txt
- rm -f $dir/fuzzing/ffuf-content.tmp $dir/fuzzing/ffuf-content.csv
+ find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | sort -k3 | anew -q $dir/fuzzing/fuzzing_full.txt
fi
- sort --numeric-sort --reverse -t ' ' -k1 -k2 -o $dir/fuzzing/fuzzing_full.txt{,}
end_func "Results are saved in $domain/fuzzing/*subdomain*.txt" ${FUNCNAME[0]}
else
end_func "No $domain/web/webs.txts file found, fuzzing skipped " ${FUNCNAME[0]}
From 0aacbdc056521a98904f88adb6e831dce9c60392 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Fri, 26 May 2023 13:17:50 +0200
Subject: [PATCH 08/40] Update docs
---
README.md | 45 ++++++++++++++++++++++++++++-----------------
1 file changed, 28 insertions(+), 17 deletions(-)
diff --git a/README.md b/README.md
index 9fd058e7..b9b68e24 100644
--- a/README.md
+++ b/README.md
@@ -145,7 +145,7 @@ Yes! reconFTW can also be easily deployed with Terraform and Ansible to AWS, if
```yaml
#################################################################
-# reconFTW config file #
+# reconFTW config file #
#################################################################
# General values
@@ -154,9 +154,13 @@ SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" # Get current sc
profile_shell=".$(basename $(echo $SHELL))rc" # Get current shell profile
reconftw_version=$(git rev-parse --abbrev-ref HEAD)-$(git describe --tags) # Fetch current reconftw version
generate_resolvers=false # Generate custom resolvers with dnsvalidator
-update_resolvers=true # Fetch and rewrite resolvers before DNS resolution
+update_resolvers=true # Fetch and rewrite resolvers from trickest/resolvers before DNS resolution
+resolvers_url="https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt"
+resolvers_trusted_url="https://raw.githubusercontent.com/six2dez/resolvers_reconftw/main/resolvers_trusted.txt"
+fuzzing_remote_list="https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt" # Used to send to axiom(if used) on fuzzing
proxy_url="http://127.0.0.1:8080/" # Proxy url
install_golang=true # Set it to false if you already have Golang configured and ready
+upgrade_tools=true
#dir_output=/custom/output/path
# Golang Vars (Comment or change on your own)
@@ -168,6 +172,8 @@ export PATH=$GOPATH/bin:$GOROOT/bin:$HOME/.local/bin:$PATH
#NOTIFY_CONFIG=~/.config/notify/provider-config.yaml # No need to define
AMASS_CONFIG=~/.config/amass/config.ini
GITHUB_TOKENS=${tools}/.github_tokens
+GITLAB_TOKENS=${tools}/.gitlab_tokens
+SUBGPT_COOKIE=${tools}/subgpt_cookies.json
#CUSTOM_CONFIG=custom_config_path.txt # In case you use a custom config file, uncomment this line and set your files path
# APIs/TOKENS - Uncomment the lines you want removing the '#' at the beginning of the line
@@ -188,7 +194,7 @@ GOOGLE_DORKS=true
GITHUB_DORKS=true
GITHUB_REPOS=true
METADATA=true # Fetch metadata from indexed office documents
-EMAILS=true # Fetch emails from differents sites
+EMAILS=true # Fetch emails from differents sites
DOMAIN_INFO=true # whois info
REVERSE_WHOIS=true # amass intel reverse whois info, takes some time
IP_INFO=true # Reverse IP search, geolocation and whois
@@ -205,8 +211,10 @@ SUBANALYTICS=true # Google Analytics search
SUBBRUTE=true # DNS bruteforcing
SUBSCRAPING=true # Subdomains extraction from web crawling
SUBPERMUTE=true # DNS permutations
+SUBREGEXPERMUTE=true # Permutations by regex analysis
+SUBGPT=true # Permutations by BingGPT prediction
PERMUTATIONS_OPTION=gotator # The alternative is "ripgen" (faster, not deeper)
-GOTATOR_FLAGS="-depth 1 -numbers 3 -mindup -adv -md" # Flags for gotator
+GOTATOR_FLAGS=" -depth 1 -numbers 3 -mindup -adv -md" # Flags for gotator
SUBTAKEOVER=false # Check subdomain takeovers, false by default cuz nuclei already check this
SUB_RECURSIVE_PASSIVE=false # Uses a lot of API keys queries
DEEP_RECURSIVE_PASSIVE=10 # Number of top subdomains for recursion
@@ -215,7 +223,7 @@ ZONETRANSFER=true # Check zone transfer
S3BUCKETS=true # Check S3 buckets misconfigs
REVERSE_IP=false # Check reverse IP subdomain search (set True if your target is CIDR/IP)
TLS_PORTS="21,22,25,80,110,135,143,261,271,324,443,448,465,563,614,631,636,664,684,695,832,853,854,990,993,989,992,994,995,1129,1131,1184,2083,2087,2089,2096,2221,2252,2376,2381,2478,2479,2482,2484,2679,2762,3077,3078,3183,3191,3220,3269,3306,3410,3424,3471,3496,3509,3529,3539,3535,3660,36611,3713,3747,3766,3864,3885,3995,3896,4031,4036,4062,4064,4081,4083,4116,4335,4336,4536,4590,4740,4843,4849,5443,5007,5061,5321,5349,5671,5783,5868,5986,5989,5990,6209,6251,6443,6513,6514,6619,6697,6771,7202,7443,7673,7674,7677,7775,8243,8443,8991,8989,9089,9295,9318,9443,9444,9614,9802,10161,10162,11751,12013,12109,14143,15002,16995,41230,16993,20003"
-INSCOPE=false # Uses inscope tool to filter the scope, requires .scope file in reconftw folder
+INSCOPE=false # Uses inscope tool to filter the scope, requires .scope file in reconftw folder
# Web detection
WEBPROBESIMPLE=true # Web probing on 80/443
@@ -224,12 +232,10 @@ WEBSCREENSHOT=true # Webs screenshooting
VIRTUALHOSTS=false # Check virtualhosts by fuzzing HOST header
NMAP_WEBPROBE=true # If disabled it will run httpx directly over subdomains list, nmap before web probing is used to increase the speed and avoid repeated requests
UNCOMMON_PORTS_WEB="81,300,591,593,832,981,1010,1311,1099,2082,2095,2096,2480,3000,3001,3002,3003,3128,3333,4243,4567,4711,4712,4993,5000,5104,5108,5280,5281,5601,5800,6543,7000,7001,7396,7474,8000,8001,8008,8014,8042,8060,8069,8080,8081,8083,8088,8090,8091,8095,8118,8123,8172,8181,8222,8243,8280,8281,8333,8337,8443,8500,8834,8880,8888,8983,9000,9001,9043,9060,9080,9090,9091,9092,9200,9443,9502,9800,9981,10000,10250,11371,12443,15672,16080,17778,18091,18092,20720,32000,55440,55672"
-# You can change to aquatone if gowitness fails, comment the one you don't want
-AXIOM_SCREENSHOT_MODULE=webscreenshot # Choose between aquatone,gowitness,webscreenshot
# Host
FAVICON=true # Check Favicon domain discovery
-PORTSCANNER=true # Enable or disable the whole Port scanner module
+PORTSCANNER=true # Enable or disable the whole Port scanner module
PORTSCAN_PASSIVE=true # Port scanner with Shodan
PORTSCAN_ACTIVE=true # Port scanner with nmap
CDN_IP=true # Check which IPs belongs to CDN
@@ -238,7 +244,8 @@ CDN_IP=true # Check which IPs belongs to CDN
WAF_DETECTION=true # Detect WAFs
NUCLEICHECK=true # Enable or disable nuclei
NUCLEI_SEVERITY="info,low,medium,high,critical" # Set templates criticity
-NUCLEI_FLAGS="-silent -t ~/nuclei-templates/ -retries 2" # Additional nuclei extra flags, don't set the severity here but the exclusions like "-etags openssh"
+NUCLEI_FLAGS=" -silent -t $HOME/nuclei-templates/ -retries 2" # Additional nuclei extra flags, don't set the severity here but the exclusions like " -etags openssh"
+NUCLEI_FLAGS_JS=" -silent -tags exposure,token -severity info,low,medium,high,critical" # Additional nuclei extra flags for js secrets
URL_CHECK=true # Enable or disable URL collection
URL_CHECK_PASSIVE=true # Search for urls, passive methods from Archive, OTX, CommonCrawl, etc
URL_CHECK_ACTIVE=true # Search for urls by crawling the websites
@@ -263,13 +270,16 @@ SSRF_CHECKS=true # SSRF checks
CRLF_CHECKS=true # CRLF checks
LFI=true # LFI by fuzzing
SSTI=true # SSTI by fuzzing
-SQLI=true # Check SQLI with sqlmap
+SQLI=true # Check SQLI
+SQLMAP=true # Check SQLI with sqlmap
+GHAURI=false # Check SQLI with ghauri
BROKENLINKS=true # Check for brokenlinks
SPRAY=true # Performs password spraying
COMM_INJ=true # Check for command injections with commix
PROTO_POLLUTION=true # Check for prototype pollution flaws
SMUGGLING=true # Check for HTTP request smuggling flaws
-WEBCACHE=true # Check for HTTP request smuggling flaws
+WEBCACHE=true # Check for Web Cache issues
+BYPASSER4XX=true # Check for 4XX bypasses
# Extra features
NOTIFICATION=false # Notification for every function
@@ -283,8 +293,9 @@ REMOVELOG=false # Delete logs after execution
PROXY=false # Send to proxy the websites found
SENDZIPNOTIFY=false # Send to zip the results (over notify)
PRESERVE=true # set to true to avoid deleting the .called_fn files on really large scans
-FFUF_FLAGS="-mc all -fc 404 -ac -sf" # Ffuf flags
-HTTPX_FLAGS="-follow-redirects -random-agent -status-code -silent -title -web-server -tech-detect -location" # Httpx flags for simple web probing
+FFUF_FLAGS=" -mc all -fc 404 -ach -sf -of json" # Ffuf flags
+HTTPX_FLAGS=" -follow-redirects -random-agent -status-code -silent -title -web-server -tech-detect -location -content-length" # Httpx flags for simple web probing
+GOWITNESS_FLAGS=" --disable-logging --timeout 5"
# HTTP options
HEADER="User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" # Default header
@@ -303,14 +314,14 @@ PUREDNS_PUBLIC_LIMIT=0 # Set between 2000 - 10000 if your router blows up, 0 mea
PUREDNS_TRUSTED_LIMIT=400
PUREDNS_WILDCARDTEST_LIMIT=30
PUREDNS_WILDCARDBATCH_LIMIT=1500000
-WEBSCREENSHOT_THREADS=200
-GOWITNESS_THREADS=8
+GOWITNESS_THREADS=20
RESOLVE_DOMAINS_THREADS=150
PPFUZZ_THREADS=30
DNSVALIDATOR_THREADS=200
INTERLACE_THREADS=10
TLSX_THREADS=1000
XNLINKFINDER_DEPTH=3
+BYP4XX_THREADS=20
# Rate limits
HTTPX_RATELIMIT=150
@@ -340,13 +351,13 @@ resolvers_trusted=${tools}/resolvers_trusted.txt
# AXIOM=false Uncomment only to overwrite command line flags
AXIOM_FLEET_LAUNCH=true # Enable or disable spin up a new fleet, if false it will use the current fleet with the AXIOM_FLEET_NAME prefix
AXIOM_FLEET_NAME="reconFTW" # Fleet's prefix name
-AXIOM_FLEET_COUNT=5 # Fleet's number
+AXIOM_FLEET_COUNT=10 # Fleet's number
AXIOM_FLEET_REGIONS="eu-central" # Fleet's region
AXIOM_FLEET_SHUTDOWN=true # # Enable or disable delete the fleet after the execution
# This is a script on your reconftw host that might prep things your way...
#AXIOM_POST_START="~/Tools/axiom_config.sh" # Useful to send your config files to the fleet
AXIOM_EXTRA_ARGS="" # Leave empty if you don't want to add extra arguments
-#AXIOM_EXTRA_ARGS="--rm-logs" # Example
+#AXIOM_EXTRA_ARGS=" --rm-logs" # Example
# TERM COLORS
bred='\033[1;31m'
From af918e73721e1e0de9331d3130070ff422709e03 Mon Sep 17 00:00:00 2001
From: 720922
Date: Fri, 26 May 2023 19:51:03 +0530
Subject: [PATCH 09/40] Randomized Banner Output
---
banners.txt | 167 ++++++++++++++++++++++++++++++++++++++++++++++++++++
reconftw.sh | 22 ++++---
2 files changed, 177 insertions(+), 12 deletions(-)
create mode 100644 banners.txt
diff --git a/banners.txt b/banners.txt
new file mode 100644
index 00000000..c789853d
--- /dev/null
+++ b/banners.txt
@@ -0,0 +1,167 @@
+banner1=""" ██▀███ ▓█████ ▄████▄ ▒█████ ███▄ █ █████▒▄▄▄█████▓ █ █░
+▓██ ▒ ██▒▓█ ▀ ▒██▀ ▀█ ▒██▒ ██▒ ██ ▀█ █ ▓██ ▒ ▓ ██▒ ▓▒▓█░ █ ░█░
+▓██ ░▄█ ▒▒███ ▒▓█ ▄ ▒██░ ██▒▓██ ▀█ ██▒▒████ ░ ▒ ▓██░ ▒░▒█░ █ ░█
+▒██▀▀█▄ ▒▓█ ▄ ▒▓▓▄ ▄██▒▒██ ██░▓██▒ ▐▌██▒░▓█▒ ░ ░ ▓██▓ ░ ░█░ █ ░█
+░██▓ ▒██▒░▒████▒▒ ▓███▀ ░░ ████▓▒░▒██░ ▓██░░▒█░ ▒██▒ ░ ░░██▒██▓
+░ ▒▓ ░▒▓░░░ ▒░ ░░ ░▒ ▒ ░░ ▒░▒░▒░ ░ ▒░ ▒ ▒ ▒ ░ ▒ ░░ ░ ▓░▒ ▒
+ ░▒ ░ ▒░ ░ ░ ░ ░ ▒ ░ ▒ ▒░ ░ ░░ ░ ▒░ ░ ░ ▒ ░ ░
+ ░░ ░ ░ ░ ░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ░ ░
+ ░ ░ ░░ ░ ░ ░ ░ ░
+ ░
+"""
+
+banner2=""" _____ ______ _________ __
+ | __ \ | ____|__ __\ \ / /
+ | |__) |___ ___ ___ _ __ | |__ | | \ \ /\ / /
+ | _ // _ \/ __/ _ \| '_ \| __| | | \ \/ \/ /
+ | | \ \ __/ (_| (_) | | | | | | | \ /\ /
+ |_| \_\___|\___\___/|_| |_|_| |_| \/ \/
+"""
+
+banner3=""" ____ ____ ___ _____ _ _ ____ ____ _ _
+( _ \( ___)/ __)( _ )( \( )( ___)(_ _)( \/\/ )
+ ) / )__)( (__ )(_)( ) ( )__) )( ) (
+(_)\_)(____)\___)(_____)(_)\_)(__) (__) (__/\__)
+
+"""
+banner4="""######## ######## ###### ####### ## ## ######## ######## ## ##
+## ## ## ## ## ## ## ### ## ## ## ## ## ##
+## ## ## ## ## ## #### ## ## ## ## ## ##
+######## ###### ## ## ## ## ## ## ###### ## ## ## ##
+## ## ## ## ## ## ## #### ## ## ## ## ##
+## ## ## ## ## ## ## ## ### ## ## ## ## ##
+## ## ######## ###### ####### ## ## ## ## ### ###
+"""
+
+banner5=""" _______ _______ _______ _______ _ _______ _________
+( ____ )( ____ \( ____ \( ___ )( ( /|( ____ \\__ __/|\ /|
+| ( )|| ( \/| ( \/| ( ) || \ ( || ( \/ ) ( | ) ( |
+| (____)|| (__ | | | | | || \ | || (__ | | | | _ | |
+| __)| __) | | | | | || (\ \) || __) | | | |( )| |
+| (\ ( | ( | | | | | || | \ || ( | | | || || |
+| ) \ \__| (____/\| (____/\| (___) || ) \ || ) | | | () () |
+|/ \__/(_______/(_______/(_______)|/ )_)|/ )_( (_______)
+"""
+
+banner6="""__________ ________________________ __
+\______ \ ____ ____ ____ ____ \_ _____/\__ ___/ \ / \
+ | _// __ \_/ ___\/ _ \ / \ | __) | | \ \/\/ /
+ | | \ ___/\ \__( <_> ) | \| \ | | \ /
+ |____|_ /\___ >\___ >____/|___| /\___ / |____| \__/\ /
+ \/ \/ \/ \/ \/ \/
+"""
+
+banner7=""" __ ___ _____ __ __
+ /__\ ___ ___ ___ _ __ / __\/__ \/ / /\ \ \
+ / \/// _ \/ __/ _ \| '_ \ / _\ / /\/\ \/ \/ /
+/ _ \ __/ (_| (_) | | | / / / / \ /\ /
+\/ \_/\___|\___\___/|_| |_\/ \/ \/ \/
+"""
+
+banner8="""╦═╗┌─┐┌─┐┌─┐┌┐┌╔═╗╔╦╗╦ ╦
+╠╦╝├┤ │ │ ││││╠╣ ║ ║║║
+╩╚═└─┘└─┘└─┘┘└┘╚ ╩ ╚╩╝
+"""
+
+banner9=""" ▄▀▀▄▀▀▀▄ ▄▀▀█▄▄▄▄ ▄▀▄▄▄▄ ▄▀▀▀▀▄ ▄▀▀▄ ▀▄ ▄▀▀▀█▄ ▄▀▀▀█▀▀▄ ▄▀▀▄ ▄▀▀▄
+█ █ █ ▐ ▄▀ ▐ █ █ ▌ █ █ █ █ █ █ █ ▄▀ ▀▄ █ █ ▐ █ █ ▐ █
+▐ █▀▀█▀ █▄▄▄▄▄ ▐ █ █ █ ▐ █ ▀█ ▐ █▄▄▄▄ ▐ █ ▐ █ █
+ ▄▀ █ █ ▌ █ ▀▄ ▄▀ █ █ █ ▐ █ █ ▄ █
+█ █ ▄▀▄▄▄▄ ▄▀▄▄▄▄▀ ▀▀▀▀ ▄▀ █ █ ▄▀ ▀▄▀ ▀▄ ▄▀
+▐ ▐ █ ▐ █ ▐ █ ▐ █ █ ▀
+ ▐ ▐ ▐ ▐ ▐
+"""
+
+banner10="""
+ // ) ) // / / /__ ___/ || / | / /
+ //___/ / ___ ___ ___ __ //___ / / || / | / /
+ / ___ ( //___) ) // ) ) // ) ) // ) ) / ___ / / || / /||/ /
+ // | | // // // / / // / / // / / ||/ / | /
+// | | ((____ ((____ ((___/ / // / / // / / | / | /
+"""
+
+banner11=""" ____ _____________ __
+ / __ \___ _________ ____ / ____/_ __/ | / /
+ / /_/ / _ \/ ___/ __ \/ __ \/ /_ / / | | /| / /
+ / _, _/ __/ /__/ /_/ / / / / __/ / / | |/ |/ /
+/_/ |_|\___/\___/\____/_/ /_/_/ /_/ |__/|__/
+"""
+
+banner12=""" ####### ####### # #
+ ##### ###### #### #### # # # # # # #
+ # # # # # # # ## # # # # # #
+ # # ##### # # # # # # ##### # # # #
+ ##### # # # # # # # # # # # #
+ # # # # # # # # ## # # # # #
+ # # ###### #### #### # # # # ## ##
+"""
+
+banner13=""" ___ ____ __ ___ _ ____ _____ _
+| |_) | |_ / /\` / / \ | |\ | | |_ | | \ \ /
+|_| \ |_|__ \_\_, \_\_/ |_| \| |_| |_| \_\/\/
+"""
+
+banner14=""" ______ _______ _______ _____ __ _ _______ _______ _ _ _
+ |_____/ |______ | | | | \ | |______ | | | |
+ | \_ |______ |_____ |_____| | \_| | | |__|__|
+"""
+
+banner15=""" ____ ____ ___ ___ __ __ ____ ______ __ __
+ || \\ || // // \\ ||\ || || | || | || ||
+ ||_// ||== (( (( )) ||\\|| ||== || \\ /\ //
+ || \\ ||___ \\__ \\_// || \|| || || \V/\V/
+"""
+
+banner16=""" ______ _______ _______ _ _ _
+(_____ \ (_______|_______|_)(_)(_)
+ _____) )_____ ____ ___ ____ _____ _ _ _ _
+| __ /| ___ |/ ___) _ \| _ \| ___) | | | || || |
+| | \ \| ____( (__| |_| | | | | | | | | || || |
+|_| |_|_____)\____)___/|_| |_|_| |_| \_____/
+"""
+
+banner17=""" ____ ____ ____ ____ ____ ____ ____ ____ _________
+||R |||e |||c |||o |||n |||F |||T |||W ||| ||
+||__|||__|||__|||__|||__|||__|||__|||__|||_______||
+|/__\|/__\|/__\|/__\|/__\|/__\|/__\|/__\|/_______\|
+"""
+
+banner18=""" __ ___ ___
+ )_) _ _ _ _ )_ ) \ X /
+/ \ )_) (_ (_) ) ) ( ( \/ \/
+ (_
+"""
+
+banner19=""" ______ _________ __
+ | ____|__ __\ \ / /
+ _ __ ___ ___ ___ _ __ | |__ | | \ \ /\ / /
+ | '__/ _ \/ __/ _ \| '_ \| __| | | \ \/ \/ /
+ | | | __/ (_| (_) | | | | | | | \ /\ /
+ |_| \___|\___\___/|_| |_|_| |_| \/ \/
+"""
+
+banner20=""" :::==== :::===== :::===== :::==== :::= === :::===== :::==== ::: === ===
+ ::: === ::: ::: ::: === :::===== ::: :::==== ::: === ===
+ ======= ====== === === === ======== ====== === === === ===
+ === === === === === === === ==== === === ===========
+ === === ======== ======= ====== === === === === ==== ====
+"""
+
+banner21=""" _ _ _ _ _ _ _ _
+ / \ / \ / \ / \ / \ / \ / \ / \
+ ( r | e | c | o | n | F | T | W )
+ \_/ \_/ \_/ \_/ \_/ \_/ \_/ \_/
+"""
+
+banner22=""" _______ _______ ________
+.----.-----.----.-----.-----.| ___|_ _| | | |
+| _| -__| __| _ | || ___| | | | | | |
+|__| |_____|____|_____|__|__||___| |___| |________|
+"""
+
+banner23=""" ________) ______) __ __)
+ (, / (, / (, ) | /
+ __ _ _ _____ /___, / | /| /
+/ (__(/_(__(_) / (_) / ) / |/ |/
+ (_/ (_/ / |
+"""
\ No newline at end of file
diff --git a/reconftw.sh b/reconftw.sh
index f5ba94e4..ce8685a1 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1,18 +1,16 @@
#!/usr/bin/env bash
+function banner_graber(){
+ source $SCRIPTPATH/banners.txt
+ randx=$(shuf -i 1-23 -n 1)
+ tmp="banner${randx}"
+ banner_code=${!tmp}
+ echo -e "${banner_code}"
+}
function banner(){
- printf "\n${bgreen}"
- printf " ██▀███ ▓█████ ▄████▄ ▒█████ ███▄ █ █████▒▄▄▄█████▓ █ █░\n"
- printf " ▓██ ▒ ██▒▓█ ▀ ▒██▀ ▀█ ▒██▒ ██▒ ██ ▀█ █ ▓██ ▒ ▓ ██▒ ▓▒▓█░ █ ░█░\n"
- printf " ▓██ ░▄█ ▒▒███ ▒▓█ ▄ ▒██░ ██▒▓██ ▀█ ██▒▒████ ░ ▒ ▓██░ ▒░▒█░ █ ░█ \n"
- printf " ▒██▀▀█▄ ▒▓█ ▄ ▒▓▓▄ ▄██▒▒██ ██░▓██▒ ▐▌██▒░▓█▒ ░ ░ ▓██▓ ░ ░█░ █ ░█ \n"
- printf " ░██▓ ▒██▒░▒████▒▒ ▓███▀ ░░ ████▓▒░▒██░ ▓██░░▒█░ ▒██▒ ░ ░░██▒██▓ \n"
- printf " ░ ▒▓ ░▒▓░░░ ▒░ ░░ ░▒ ▒ ░░ ▒░▒░▒░ ░ ▒░ ▒ ▒ ▒ ░ ▒ ░░ ░ ▓░▒ ▒ \n"
- printf " ░▒ ░ ▒░ ░ ░ ░ ░ ▒ ░ ▒ ▒░ ░ ░░ ░ ▒░ ░ ░ ▒ ░ ░ \n"
- printf " ░░ ░ ░ ░ ░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ░ ░ \n"
- printf " ░ ░ ░░ ░ ░ ░ ░ ░ \n"
- printf " ░ \n"
- printf " ${reconftw_version} by @six2dez${reset}\n"
+ banner_code=$(banner_graber)
+ printf "\n${bgreen}${banner_code}"
+ printf "\n ${reconftw_version} by @six2dez${reset}\n"
}
###############################################################################################################
From 825d481ba70a53d27ba8e0017dda87bc388abe13 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Sat, 27 May 2023 00:58:38 +0200
Subject: [PATCH 10/40] truffle + gitleaks for git repo scan
---
README.md | 2 +-
install.sh | 9 ++++++---
reconftw.sh | 4 +++-
3 files changed, 10 insertions(+), 5 deletions(-)
diff --git a/README.md b/README.md
index b9b68e24..13bf6572 100644
--- a/README.md
+++ b/README.md
@@ -479,7 +479,7 @@ reset='\033[0m'
- Metadata finder ([MetaFinder](https://github.com/Josue87/MetaFinder))
- Google Dorks ([dorks_hunter](https://github.com/six2dez/dorks_hunter))
- Github Dorks ([gitdorks_go](https://github.com/damit5/gitdorks_go))
-- GitHub org analysis ([enumerepo](https://github.com/trickest/enumerepo) and [gitleaks](https://github.com/gitleaks/gitleaks))
+- GitHub org analysis ([enumerepo](https://github.com/trickest/enumerepo), [trufflehog](https://github.com/trufflesecurity/trufflehog) and [gitleaks](https://github.com/gitleaks/gitleaks))
## Subdomains
diff --git a/install.sh b/install.sh
index 0bbd7287..f9b02bca 100755
--- a/install.sh
+++ b/install.sh
@@ -117,6 +117,7 @@ repos["byp4xx"]="lobuhi/byp4xx"
repos["Infoga"]="m4ll0k/Infoga"
repos["ghauri"]="r0oth3x49/ghauri"
repos["gitleaks"]="gitleaks/gitleaks"
+repos["trufflehog"]="trufflesecurity/trufflehog"
function banner_web(){
@@ -558,11 +559,13 @@ if [ "$double_check" = "true" ]; then
eval $SUDO python3 setup.py install $DEBUG_STD
fi
if [ "massdns" = "$repo" ]; then
- eval make $DEBUG_STD && strip -s bin/massdns && eval $SUDO cp bin/massdns /usr/local/bin/ $DEBUG_ERROR
+ eval make $DEBUG_STD && strip -s bin/massdns && eval $SUDO cp bin/massdns /usr/local/bin/ $DEBUG_ERROR
elif [ "gf" = "$repo" ]; then
- eval cp -r examples ~/.gf $DEBUG_ERROR
+ eval cp -r examples ~/.gf $DEBUG_ERROR
elif [ "Gf-Patterns" = "$repo" ]; then
- eval mv ./*.json ~/.gf $DEBUG_ERROR
+ eval mv ./*.json ~/.gf $DEBUG_ERROR
+ elif [ "trufflehog" = "$repo" ]; then
+ eval go install $DEBUG_STD
fi
cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
done
diff --git a/reconftw.sh b/reconftw.sh
index f5ba94e4..6dca8db0 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -113,6 +113,7 @@ function tools_installed(){
which gau &>/dev/null || { printf "${bred} [*] gau [NO]${reset}\n${reset}"; allinstalled=false;}
which subgpt &>/dev/null || { printf "${bred} [*] subgpt [NO]${reset}\n${reset}"; allinstalled=false;}
which gitleaks &>/dev/null || { printf "${bred} [*] gitleaks [NO]${reset}\n${reset}"; allinstalled=false;}
+ which trufflehog &>/dev/null || { printf "${bred} [*] trufflehog [NO]${reset}\n${reset}"; allinstalled=false;}
if [ "${allinstalled}" = true ]; then
printf "${bgreen} Good! All installed! ${reset}\n\n"
@@ -178,7 +179,8 @@ function github_repos(){
mkdir -p .tmp/github_repos 2>>"$LOGFILE" &>/dev/null
[ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "git clone _target_ .tmp/github_repos/_cleantarget_" 2>>"$LOGFILE" &>/dev/null
[ -d ".tmp/github/" ] && ls .tmp/github_repos > .tmp/github_repos_folders.txt
- [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/github_repos_folders.txt -threads ${INTERLACE_THREADS} -c "gitleaks detect --source .tmp/github_repos/_target_ --no-banner --no-color -r ./tmp/github/gh_secret_cleantarget_.json" 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/github_repos_folders.txt" ] && interlace -tL .tmp/github_repos_folders.txt -threads ${INTERLACE_THREADS} -c "gitleaks detect --source .tmp/github_repos/_target_ --no-banner --no-color -r ./tmp/github/gh_secret_cleantarget_.json" 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "trufflehog git _target_ -j | jq -c > _output_/_cleantarget_" -o .tmp/github/ 2>>"$LOGFILE" &>/dev/null
[ -d ".tmp/github/" ] && cat .tmp/github/* | jq -c | jq -r > osint/github_company_secrets.json 2>>"$LOGFILE" &>/dev/null
else
printf "\n${bred} Required file ${GITHUB_TOKENS} not exists or empty${reset}\n"
From 5ad0a089e4e3c6fb6e41c542e865b22225d978d1 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Mon, 29 May 2023 11:57:53 +0200
Subject: [PATCH 11/40] nuclei update before run
---
reconftw.sh | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/reconftw.sh b/reconftw.sh
index 8a54af7c..538bdc21 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -841,6 +841,7 @@ function subtakeover(){
touch .tmp/tko.txt
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
if [ ! "$AXIOM" = true ]; then
+ nuclei -update 2>>"$LOGFILE" &>/dev/null
cat subdomains/subdomains.txt .tmp/webs_all.txt 2>/dev/null | nuclei -silent -nh -tags takeover -severity low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -o .tmp/tko.txt
else
cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | sed '/^$/d' | anew -q .tmp/webs_subs.txt
@@ -1166,7 +1167,7 @@ function waf_checks(){
function nuclei_check(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$NUCLEICHECK" = true ]; then
start_func ${FUNCNAME[0]} "Templates based web scanner"
- nuclei -update-templates 2>>"$LOGFILE" &>/dev/null
+ nuclei -update 2>>"$LOGFILE" &>/dev/null
mkdir -p nuclei_output
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
[ ! -s ".tmp/webs_subs.txt" ] && cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt
From a672ba201f45b77ed77f4901a257dc83c3c1a149 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 1 Jun 2023 12:02:39 +0200
Subject: [PATCH 12/40] small change on sub tko detrection
---
reconftw.cfg | 2 +-
reconftw.sh | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/reconftw.cfg b/reconftw.cfg
index dc2d5f27..f92bd05f 100644
--- a/reconftw.cfg
+++ b/reconftw.cfg
@@ -69,7 +69,7 @@ SUBREGEXPERMUTE=true # Permutations by regex analysis
SUBGPT=true # Permutations by BingGPT prediction
PERMUTATIONS_OPTION=gotator # The alternative is "ripgen" (faster, not deeper)
GOTATOR_FLAGS=" -depth 1 -numbers 3 -mindup -adv -md" # Flags for gotator
-SUBTAKEOVER=false # Check subdomain takeovers, false by default cuz nuclei already check this
+SUBTAKEOVER=true # Check subdomain takeovers, false by default cuz nuclei already check this
SUB_RECURSIVE_PASSIVE=false # Uses a lot of API keys queries
DEEP_RECURSIVE_PASSIVE=10 # Number of top subdomains for recursion
SUB_RECURSIVE_BRUTE=false # Needs big disk space and time to resolve
diff --git a/reconftw.sh b/reconftw.sh
index 538bdc21..9ca5741c 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -842,10 +842,10 @@ function subtakeover(){
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
if [ ! "$AXIOM" = true ]; then
nuclei -update 2>>"$LOGFILE" &>/dev/null
- cat subdomains/subdomains.txt .tmp/webs_all.txt 2>/dev/null | nuclei -silent -nh -tags takeover -severity low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -o .tmp/tko.txt
+ cat subdomains/subdomains.txt .tmp/webs_all.txt 2>/dev/null | nuclei -silent -nh -tags takeover -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -o .tmp/tko.txt
else
cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | sed '/^$/d' | anew -q .tmp/webs_subs.txt
- [ -s ".tmp/webs_subs.txt" ] && axiom-scan .tmp/webs_subs.txt -m nuclei -tags takeover -nh -severity low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -o .tmp/tko.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/webs_subs.txt" ] && axiom-scan .tmp/webs_subs.txt -m nuclei -tags takeover -nh -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -o .tmp/tko.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
fi
# DNS_TAKEOVER
From 6f213797520b429fe17a1732445bbaf85730465e Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 1 Jun 2023 12:38:20 +0200
Subject: [PATCH 13/40] update ffuf over axiom
---
reconftw.sh | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/reconftw.sh b/reconftw.sh
index 9ca5741c..b72e63d6 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1218,9 +1218,10 @@ function fuzz(){
done
find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | sort -k3 | anew -q $dir/fuzzing/fuzzing_full.txt
else
+ axiom-exec "mkdir -p /home/op/lists/seclists/Discovery/Web-Content/" &>/dev/null
axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/fuzz_wordlist.txt" &>/dev/null
axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/seclists/Discovery/Web-Content/big.txt" &>/dev/null
- axiom-scan .tmp/webs_all.txt -m ffuf -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
for sub in $(cat .tmp/webs_all.txt); do
sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
grep "$sub" $dir/.tmp/ffuf-content.json | awk '{print $2" "$3" "$1}' | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt
@@ -1439,7 +1440,7 @@ function jschecks(){
if [ ! "$AXIOM" = true ]; then
[ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | Mantra -ua ${HEADER} -s | anew -q js/js_secrets.txt
else
- [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m Mantra -ua ${HEADER} -o js/js_secrets.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua ${HEADER} -o js/js_secrets.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
fi
printf "${yellow} Running : Building wordlist 5/5${reset}\n"
[ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "python3 $tools/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" &>/dev/null
From 81ff90ba4dcfa239a9dc15b4d3aca29abdd77014 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Fri, 2 Jun 2023 08:52:48 +0200
Subject: [PATCH 14/40] Fix fuzzing axiom
---
reconftw.sh | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/reconftw.sh b/reconftw.sh
index b72e63d6..5f959dc1 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1036,7 +1036,7 @@ function virtualhosts(){
find $dir/virtualhosts/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | anew -q $dir/virtualhosts/virtualhosts_full.txt
end_func "Results are saved in $domain/virtualhosts/*subdomain*.txt" ${FUNCNAME[0]}
else
- end_func "No $domain/web/webs.txts file found, fuzzing skipped " ${FUNCNAME[0]}
+ end_func "No $domain/web/webs.txts file found, virtualhosts skipped " ${FUNCNAME[0]}
fi
else
if [ "$VIRTUALHOSTS" = false ]; then
@@ -1224,7 +1224,7 @@ function fuzz(){
axiom-scan .tmp/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
for sub in $(cat .tmp/webs_all.txt); do
sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
- grep "$sub" $dir/.tmp/ffuf-content.json | awk '{print $2" "$3" "$1}' | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt
+ [ -s "$dir/.tmp/ffuf-content.json" ] && cat .tmp/ffuf-content.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | grep $sub | sort | sort -k1 | anew -q fuzzing/${sub_out}.txt
done
find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | sort -k3 | anew -q $dir/fuzzing/fuzzing_full.txt
fi
From 145164c52ba3a0c68f2e1a7d4f862aa60569a3c2 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Sun, 4 Jun 2023 23:03:36 +0200
Subject: [PATCH 15/40] ctfr replaced by crt
---
README.md | 2 +-
install.sh | 2 +-
reconftw.cfg | 1 +
reconftw.sh | 4 ++--
4 files changed, 5 insertions(+), 4 deletions(-)
diff --git a/README.md b/README.md
index 13bf6572..415a9c9d 100644
--- a/README.md
+++ b/README.md
@@ -484,7 +484,7 @@ reset='\033[0m'
## Subdomains
- Passive ([amass](https://github.com/OWASP/Amass), [subfinder](https://github.com/projectdiscovery/subfinder) and [github-subdomains](https://github.com/gwen001/github-subdomains))
-- Certificate transparency ([ctfr](https://github.com/UnaPibaGeek/ctfr))
+- Certificate transparency ([crt](https://github.com/cemulus/crt))
- NOERROR subdomain discovery ([dnsx](https://github.com/projectdiscovery/dnsx), more info [here](https://www.securesystems.de/blog/enhancing-subdomain-enumeration-ents-and-noerror/))
- Bruteforce ([puredns](https://github.com/d3mondev/puredns))
- Permutations ([Gotator](https://github.com/Josue87/gotator), [ripgen](https://github.com/resyncgg/ripgen) and [regulator](https://github.com/cramppet/regulator))
diff --git a/install.sh b/install.sh
index f9b02bca..92c55cb9 100755
--- a/install.sh
+++ b/install.sh
@@ -84,6 +84,7 @@ gotools["byp4xx"]="go install -v github.com/lobuhi/byp4xx@latest"
gotools["hakip2host"]="go install github.com/hakluke/hakip2host@latest"
gotools["gau"]="go install -v github.com/lc/gau/v2/cmd/gau@latest"
gotools["Mantra"]="go install github.com/MrEmpy/Mantra@latest"
+gotools["crt"]="go install github.com/cemulus/crt@latest"
declare -A repos
repos["dorks_hunter"]="six2dez/dorks_hunter"
@@ -94,7 +95,6 @@ repos["brutespray"]="x90skysn3k/brutespray"
repos["wafw00f"]="EnableSecurity/wafw00f"
repos["gf"]="tomnomnom/gf"
repos["Gf-Patterns"]="1ndianl33t/Gf-Patterns"
-repos["ctfr"]="UnaPibaGeek/ctfr"
repos["xnLinkFinder"]="xnl-h4ck3r/xnLinkFinder"
repos["waymore"]="xnl-h4ck3r/waymore"
repos["Corsy"]="s0md3v/Corsy"
diff --git a/reconftw.cfg b/reconftw.cfg
index f92bd05f..5c35e4bc 100644
--- a/reconftw.cfg
+++ b/reconftw.cfg
@@ -60,6 +60,7 @@ RUNSUBFINDER=true
SUBDOMAINS_GENERAL=true # Enable or disable the whole Subdomains module
SUBPASSIVE=true # Passive subdomains search
SUBCRT=true # crtsh search
+CTR_LIMIT=999999 # Limit the number of results
SUBNOERROR=true # Check DNS NOERROR response and BF on them
SUBANALYTICS=true # Google Analytics search
SUBBRUTE=true # DNS bruteforcing
diff --git a/reconftw.sh b/reconftw.sh
index 5f959dc1..5fb6197d 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -48,7 +48,6 @@ function tools_installed(){
[ -f "$tools/Corsy/corsy.py" ] || { printf "${bred} [*] Corsy [NO]${reset}\n"; allinstalled=false;}
[ -f "$tools/testssl.sh/testssl.sh" ] || { printf "${bred} [*] testssl [NO]${reset}\n"; allinstalled=false;}
[ -f "$tools/CMSeeK/cmseek.py" ] || { printf "${bred} [*] CMSeeK [NO]${reset}\n"; allinstalled=false;}
- [ -f "$tools/ctfr/ctfr.py" ] || { printf "${bred} [*] ctfr [NO]${reset}\n"; allinstalled=false;}
[ -f "$tools/fuzz_wordlist.txt" ] || { printf "${bred} [*] OneListForAll [NO]${reset}\n"; allinstalled=false;}
[ -f "$tools/xnLinkFinder/xnLinkFinder.py" ] || { printf "${bred} [*] xnLinkFinder [NO]${reset}\n"; allinstalled=false;}
[ -f "$tools/waymore/waymore.py" ] || { printf "${bred} [*] waymore [NO]${reset}\n"; allinstalled=false;}
@@ -109,6 +108,7 @@ function tools_installed(){
which ghauri &>/dev/null || { printf "${bred} [*] ghauri [NO]${reset}\n${reset}"; allinstalled=false;}
which hakip2host &>/dev/null || { printf "${bred} [*] hakip2host [NO]${reset}\n${reset}"; allinstalled=false;}
which gau &>/dev/null || { printf "${bred} [*] gau [NO]${reset}\n${reset}"; allinstalled=false;}
+ which crt &>/dev/null || { printf "${bred} [*] crt [NO]${reset}\n${reset}"; allinstalled=false;}
which subgpt &>/dev/null || { printf "${bred} [*] subgpt [NO]${reset}\n${reset}"; allinstalled=false;}
which gitleaks &>/dev/null || { printf "${bred} [*] gitleaks [NO]${reset}\n${reset}"; allinstalled=false;}
which trufflehog &>/dev/null || { printf "${bred} [*] trufflehog [NO]${reset}\n${reset}"; allinstalled=false;}
@@ -416,7 +416,7 @@ function sub_passive(){
function sub_crt(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBCRT" = true ]; then
start_subfunc ${FUNCNAME[0]} "Running : Crtsh Subdomain Enumeration"
- python3 $tools/ctfr/ctfr.py -d $domain -o .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" &>/dev/null
+ crt -s -json -l ${CTR_LIMIT} $domain 2>>"$LOGFILE" | jq -r '.[].subdomain' | sed -e "s/^\\*\\.//" | anew -q .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" &>/dev/null
[[ "$INSCOPE" = true ]] && check_inscope .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" &>/dev/null
NUMOFLINES=$(cat .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" | sed 's/\*.//g' | anew .tmp/crtsh_subs.txt | sed '/^$/d' | wc -l)
end_subfunc "${NUMOFLINES} new subs (cert transparency)" ${FUNCNAME[0]}
From 0e39befe1909388b2e4ec834f4972b02a65b9f19 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Tue, 6 Jun 2023 11:27:03 +0200
Subject: [PATCH 16/40] Fix web server installation
---
install.sh | 8 ++++----
reconftw.sh | 2 +-
requirements.txt | 4 ++--
3 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/install.sh b/install.sh
index 92c55cb9..6f6bb2d0 100755
--- a/install.sh
+++ b/install.sh
@@ -188,8 +188,8 @@ while true; do
if $rftw_installed; then
printf "${bblue} 1. Install/Update ReconFTW (without Web Interface)${reset}\n\n"
- printf "${bblue} 2. Install/Update ReconFTW + Install Web Interface${reset} ${yellow}(User Interaction needed!)${reset}\n\n"
- printf "${bblue} 3. Setup Web Interface${reset}\n\n"
+ printf "${bblue} 2. Install/Update ReconFTW + Install Web Interface${reset}\n\n"
+ printf "${bblue} 3. Setup Web Interface${reset} ${yellow}(User Interaction needed!)${reset}\n\n"
printf "${bblue} 4. Exit${reset}\n\n"
printf "${bgreen}#######################################################################${reset}\n\n"
read -p "$(echo -e ${bblue} "Insert option: "${reset})" option
@@ -494,7 +494,7 @@ if [ "True" = "$IS_ARM" ]; then
eval wget -N -c https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
eval $SUDO tar -C /usr/local/bin/ -xzf ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
eval $SUDO rm -rf ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
- elif [ "True" = "$RPI_4" ] || [ "True" = "$IS_MAC" ]; then
+ elif [ "True" = "$RPI_4" ]; then
eval wget -N -c https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-aarch64-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
eval $SUDO tar -C /usr/local/bin/ -xzf ppfuzz-v1.0.1-aarch64-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
eval $SUDO rm -rf ppfuzz-v1.0.1-aarch64-unknown-linux-gnueabihf.tar.gz $DEBUG_STD
@@ -603,7 +603,7 @@ eval strip -s "$HOME"/go/bin/* $DEBUG_STD
eval $SUDO cp "$HOME"/go/bin/* /usr/local/bin/ $DEBUG_STD
if [ "$web" = true ]; then
- sh -c "echo 3 | $SCRIPTPATH/install.sh"
+ printf "\n${bgreen} Web server is installed, to set it up run ./install.sh and select option 3 ${reset}\n\n"
fi
printf "${yellow} Remember set your api keys:\n - amass (~/.config/amass/config.ini)\n - subfinder (~/.config/subfinder/provider-config.yaml)\n - GitLab (~/Tools/.gitlab_tokens)\n - SSRF Server (COLLAB_SERVER in reconftw.cfg or env var) \n - Blind XSS Server (XSS_SERVER in reconftw.cfg or env var) \n - notify (~/.config/notify/provider-config.yaml) \n - WHOISXML API (WHOISXML_API in reconftw.cfg or env var)\n - subgpt_cookies.json (subgpt_cookies.json file, follow instructions at https://github.com/s0md3v/SubGPT#getting-bing-cookie)\n\n\n${reset}"
diff --git a/reconftw.sh b/reconftw.sh
index 5fb6197d..ce839d4d 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -111,7 +111,7 @@ function tools_installed(){
which crt &>/dev/null || { printf "${bred} [*] crt [NO]${reset}\n${reset}"; allinstalled=false;}
which subgpt &>/dev/null || { printf "${bred} [*] subgpt [NO]${reset}\n${reset}"; allinstalled=false;}
which gitleaks &>/dev/null || { printf "${bred} [*] gitleaks [NO]${reset}\n${reset}"; allinstalled=false;}
- which trufflehog &>/dev/null || { printf "${bred} [*] trufflehog [NO]${reset}\n${reset}"; allinstalled=false;}
+ which trufflehog &>/dev/null || { printf "${bred} [*] trufflehog [NO]${reset}\n${reset}"; allinstalled=false;}
if [ "${allinstalled}" = true ]; then
printf "${bgreen} Good! All installed! ${reset}\n\n"
diff --git a/requirements.txt b/requirements.txt
index c1210c22..70674336 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -9,7 +9,7 @@ chardet # ghauri
colorama # ghauri
colorclass # dnsvalidator
dank # regulator
-datetime #JSA
+datetime # JSA
datrie # regulator
dnspython # ip2provider
emailfinder # Tool
@@ -35,4 +35,4 @@ tldextract # dorks_hunter
tqdm # multiple
ujson # multiple
urllib3 # multiple
-subgpt # subgpt
+subgpt # Tool
From dd119a18c9b7f6f0970463ae8f83c63b1cd05f10 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Tue, 6 Jun 2023 11:29:54 +0200
Subject: [PATCH 17/40] Web interface installation process
---
web/README.md | 9 ++++++++-
1 file changed, 8 insertions(+), 1 deletion(-)
diff --git a/web/README.md b/web/README.md
index b54928c1..bb5badaf 100644
--- a/web/README.md
+++ b/web/README.md
@@ -33,7 +33,14 @@ So, without further ado, we invite you to explore the enhanced web interface of
### Run the following command to install the WEB GUI Interface
```bash
-./install.sh
+./install.sh #(Option 2)
+```
+![ReconFTW Install Web GUI](https://i.imgur.com/675L89x.png)
+---
+
+### Run the following after install to setup the WEB GUI Interface
+```bash
+./install.sh #(Option 3)
```
![ReconFTW Install Web GUI](https://i.imgur.com/675L89x.png)
---
From 16f3d684556ac005292655a7310403320b744461 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Wed, 7 Jun 2023 15:15:27 +0200
Subject: [PATCH 18/40] passive mode improved
---
reconftw.sh | 8 +++++++-
1 file changed, 7 insertions(+), 1 deletion(-)
diff --git a/reconftw.sh b/reconftw.sh
index ce839d4d..6187e22e 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -2339,9 +2339,15 @@ function passive(){
github_dorks
github_repos
metadata
+ SUBNOERROR=false
+ SUBANALYTICS=false
+ SUBBRUTE=false
SUBSCRAPING=false
+ SUBPERMUTE=false
+ SUBREGEXPERMUTE=false
+ SUBGPT=false
+ SUB_RECURSIVE_BRUTE=false
WEBPROBESIMPLE=false
-
if [ "$AXIOM" = true ]; then
axiom_lauch
axiom_selected
From eed38d6e8d4b23fa6d6a4f2f08e479dc791b033a Mon Sep 17 00:00:00 2001
From: six2dez
Date: Wed, 7 Jun 2023 16:17:00 +0200
Subject: [PATCH 19/40] vulners replaces searchsploit
---
README.md | 2 +-
install.sh | 1 -
reconftw.sh | 6 ++----
3 files changed, 3 insertions(+), 6 deletions(-)
diff --git a/README.md b/README.md
index 415a9c9d..0a674413 100644
--- a/README.md
+++ b/README.md
@@ -504,7 +504,7 @@ reset='\033[0m'
- CDN checker ([ipcdn](https://github.com/six2dez/ipcdn))
- WAF checker ([wafw00f](https://github.com/EnableSecurity/wafw00f))
- Port Scanner (Active with [nmap](https://github.com/nmap/nmap) and passive with [smap](https://github.com/s0md3v/Smap))
-- Port services vulnerability checks ([searchsploit](https://github.com/offensive-security/exploitdb))
+- Port services vulnerability checks ([vulners](https://github.com/vulnersCom/nmap-vulners))
- Password spraying ([brutespray](https://github.com/x90skysn3k/brutespray))
## Webs
diff --git a/install.sh b/install.sh
index 6f6bb2d0..c78e63ce 100755
--- a/install.sh
+++ b/install.sh
@@ -438,7 +438,6 @@ cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; e
eval git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git $dir/sqlmap $DEBUG_STD
eval git clone --depth 1 https://github.com/drwetter/testssl.sh.git $dir/testssl.sh $DEBUG_STD
eval $SUDO git clone https://gitlab.com/exploit-database/exploitdb /opt/exploitdb $DEBUG_STD
-eval $SUDO ln -sf /opt/exploitdb/searchsploit /usr/local/bin/searchsploit $DEBUG_STD
# Standard repos installation
repos_step=0
diff --git a/reconftw.sh b/reconftw.sh
index 6187e22e..f32da9bb 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -92,7 +92,6 @@ function tools_installed(){
which analyticsrelationships &>/dev/null || { printf "${bred} [*] analyticsrelationships [NO]${reset}\n"; allinstalled=false;}
which mapcidr &>/dev/null || { printf "${bred} [*] mapcidr [NO]${reset}\n"; allinstalled=false;}
which ppfuzz &>/dev/null || { printf "${bred} [*] ppfuzz [NO]${reset}\n"; allinstalled=false;}
- which searchsploit &>/dev/null || { printf "${bred} [*] searchsploit [NO]${reset}\n"; allinstalled=false;}
which cdncheck &>/dev/null || { printf "${bred} [*] cdncheck [NO]${reset}\n"; allinstalled=false;}
which interactsh-client &>/dev/null || { printf "${bred} [*] interactsh-client [NO]${reset}\n"; allinstalled=false;}
which tlsx &>/dev/null || { printf "${bred} [*] tlsx [NO]${reset}\n"; allinstalled=false;}
@@ -1099,12 +1098,11 @@ function portscan(){
fi
if [ "$PORTSCAN_ACTIVE" = true ]; then
if [ ! "$AXIOM" = true ]; then
- [ -s ".tmp/ips_nocdn.txt" ] && $SUDO nmap --top-ports 200 -sV -n --max-retries 2 -Pn --open -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/ips_nocdn.txt" ] && $SUDO nmap --top-ports 200 -sV -n --max-retries 2 -Pn --open --script vulners -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" &>/dev/null
else
- [ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx --top-ports 200 -sV -n -Pn --open --max-retries 2 -oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx --top-ports 200 -sV -n -Pn --open --max-retries 2 --script vulners -oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
fi
fi
- [ -s "hosts/portscan_active.xml" ] && searchsploit --nmap hosts/portscan_active.xml 2>/dev/null > hosts/searchsploit.txt
end_func "Results are saved in hosts/portscan_[passive|active].txt" ${FUNCNAME[0]}
else
if [ "$PORTSCANNER" = false ]; then
From 32db045d8c9ea6e97d6163371b2a743546bd1d26 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 8 Jun 2023 00:46:29 +0200
Subject: [PATCH 20/40] Fix for Mantra
---
reconftw.sh | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/reconftw.sh b/reconftw.sh
index f32da9bb..a86ad741 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1186,7 +1186,7 @@ function nuclei_check(){
for i in "${!array[@]}"
do
crit=${array[i]}
- printf "${yellow}\n Running : Nuclei $crit ${reset}\n\n"
+ printf "${yellow}\n Running : Nuclei $crit, check results on nuclei_output folder${reset}\n\n"
axiom-scan .tmp/webs_subs.txt -m nuclei -severity ${crit} -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
done
printf "\n\n"
@@ -1438,7 +1438,7 @@ function jschecks(){
if [ ! "$AXIOM" = true ]; then
[ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | Mantra -ua ${HEADER} -s | anew -q js/js_secrets.txt
else
- [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua ${HEADER} -o js/js_secrets.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua ${HEADER} -o js/js_secrets.txt $AXIOM_EXTRA_ARGS &>/dev/null
fi
printf "${yellow} Running : Building wordlist 5/5${reset}\n"
[ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "python3 $tools/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" &>/dev/null
From e5876e7fa2b66c8333038d84c31dd57b85cb0127 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 8 Jun 2023 11:44:22 +0200
Subject: [PATCH 21/40] Fix warn shellcheck
---
reconftw.sh | 19 ++++++++++---------
1 file changed, 10 insertions(+), 9 deletions(-)
diff --git a/reconftw.sh b/reconftw.sh
index a86ad741..f65be7d3 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1,7 +1,7 @@
#!/usr/bin/env bash
function banner_graber(){
- source $SCRIPTPATH/banners.txt
+ source "${SCRIPTPATH}"/banners.txt
randx=$(shuf -i 1-23 -n 1)
tmp="banner${randx}"
banner_code=${!tmp}
@@ -23,7 +23,7 @@ function check_version(){
if [ $exit_status -eq 0 ]; then
BRANCH=$(git rev-parse --abbrev-ref HEAD)
HEADHASH=$(git rev-parse HEAD)
- UPSTREAMHASH=$(git rev-parse ${BRANCH}@{upstream})
+ UPSTREAMHASH=$(git rev-parse "${BRANCH}"@\{upstream\})
if [ "$HEADHASH" != "$UPSTREAMHASH" ]; then
printf "\n${yellow} There is a new version, run ./install.sh to get latest version${reset}\n\n"
fi
@@ -1171,7 +1171,7 @@ function nuclei_check(){
[ ! -s ".tmp/webs_subs.txt" ] && cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt
if [ ! "$AXIOM" = true ]; then
set -f # avoid globbing (expansion of *).
- array=(${NUCLEI_SEVERITY//,/ })
+ array=("${NUCLEI_SEVERITY//,/ }")
for i in "${!array[@]}"
do
crit=${array[i]}
@@ -1182,7 +1182,7 @@ function nuclei_check(){
else
if [ -s ".tmp/webs_subs.txt" ]; then
set -f # avoid globbing (expansion of *).
- array=(${NUCLEI_SEVERITY//,/ })
+ array=("${NUCLEI_SEVERITY//,/ }")
for i in "${!array[@]}"
do
crit=${array[i]}
@@ -1945,9 +1945,9 @@ function getElapsedTime {
}
function zipSnedOutputFolder {
- zip_name=`date +"%Y_%m_%d-%H.%M.%S"`
- zip_name="$zip_name"_"$domain.zip"
- (cd $dir && zip -r "$zip_name" .)
+ zip_name1=$(date +"%Y_%m_%d-%H.%M.%S")
+ zip_name="${zip_name1}_${domain}.zip"
+ (cd "$dir" && zip -r "$zip_name" .)
echo "Sending zip file "${dir}/${zip_name}""
if [ -s "${dir}/$zip_name" ]; then
@@ -2975,10 +2975,11 @@ while true; do
done
# This is the first thing to do to read in alternate config
-SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
+SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 || exit ; pwd -P )"
. "$SCRIPTPATH"/reconftw.cfg
if [ -s "$CUSTOM_CONFIG" ]; then
- . "${CUSTOM_CONFIG}"
+# shellcheck source=/home/six2dez/Tools/reconftw/custom_config.cfg
+. "${CUSTOM_CONFIG}"
fi
if [ $opt_deep ]; then
From fb8089bc41e6d5cc2467d41408ffbc03056f7827 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 8 Jun 2023 12:34:08 +0200
Subject: [PATCH 22/40] Fix shellcheck warn and error
---
reconftw.sh | 331 ++++++++++++++++++++++++++--------------------------
1 file changed, 167 insertions(+), 164 deletions(-)
diff --git a/reconftw.sh b/reconftw.sh
index f65be7d3..e3cbeab6 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -131,8 +131,8 @@ function tools_installed(){
function google_dorks(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$GOOGLE_DORKS" = true ] && [ "$OSINT" = true ]; then
- python3 $tools/dorks_hunter/dorks_hunter.py -d $domain -o osint/dorks.txt
- end_func "Results are saved in $domain/osint/dorks.txt" ${FUNCNAME[0]}
+ python3 $tools/dorks_hunter/dorks_hunter.py -d "$domain" -o osint/dorks.txt
+ end_func "Results are saved in $domain/osint/dorks.txt" "${FUNCNAME[0]}"
else
if [ "$GOOGLE_DORKS" = false ] || [ "$OSINT" = false ]; then
printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
@@ -144,17 +144,17 @@ function google_dorks(){
function github_dorks(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$GITHUB_DORKS" = true ] && [ "$OSINT" = true ]; then
- start_func ${FUNCNAME[0]} "Github Dorks in process"
+ start_func "${FUNCNAME[0]}" "Github Dorks in process"
if [ -s "${GITHUB_TOKENS}" ]; then
if [ "$DEEP" = true ]; then
- gitdorks_go -gd $tools/gitdorks_go/Dorks/medium_dorks.txt -nws 20 -target $domain -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt
+ gitdorks_go -gd $tools/gitdorks_go/Dorks/medium_dorks.txt -nws 20 -target "$domain" -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt
else
gitdorks_go -gd $tools/gitdorks_go/Dorks/smalldorks.txt -nws 20 -target $domain -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt
fi
else
printf "\n${bred} Required file ${GITHUB_TOKENS} not exists or empty${reset}\n"
fi
- end_func "Results are saved in $domain/osint/gitdorks.txt" ${FUNCNAME[0]}
+ end_func "Results are saved in $domain/osint/gitdorks.txt" "${FUNCNAME[0]}"
else
if [ "$GITHUB_DORKS" = false ] || [ "$OSINT" = false ]; then
printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
@@ -166,19 +166,21 @@ function github_dorks(){
function github_repos(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$GITHUB_REPOS" = true ] && [ "$OSINT" = true ]; then
- start_func ${FUNCNAME[0]} "Github Repos analysis in process"
+ start_func "${FUNCNAME[0]}" "Github Repos analysis in process"
if [ -s "${GITHUB_TOKENS}" ]; then
GH_TOKEN=$(cat ${GITHUB_TOKENS} | head -1)
echo $domain | unfurl format %r > .tmp/company_name.txt
- enumerepo -token-string ${GH_TOKEN} -usernames .tmp/company_name.txt -o .tmp/company_repos.txt 2>>"$LOGFILE" &>/dev/null
- [ -s ".tmp/company_repos.txt" ] && cat .tmp/company_repos.txt | jq -r '.[].repos[]|.url' > .tmp/company_repos_url.txt 2>>"$LOGFILE" &>/dev/null
- mkdir -p .tmp/github_repos 2>>"$LOGFILE" &>/dev/null
- [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "git clone _target_ .tmp/github_repos/_cleantarget_" 2>>"$LOGFILE" &>/dev/null
+ enumerepo -token-string "${GH_TOKEN}" -usernames .tmp/company_name.txt -o .tmp/company_repos.txt 2>>"$LOGFILE" >/dev/null &
+ [ -s ".tmp/company_repos.txt" ] && jq -r '.[].repos[]|.url' < .tmp/company_repos.txt > .tmp/company_repos_url.txt 2>>"$LOGFILE"
+ mkdir -p .tmp/github_repos 2>>"$LOGFILE" >>"$LOGFILE"
+ [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "git clone _target_ .tmp/github_repos/_cleantarget_" 2>>"$LOGFILE" >/dev/null 2>&1
[ -d ".tmp/github/" ] && ls .tmp/github_repos > .tmp/github_repos_folders.txt
- [ -s ".tmp/github_repos_folders.txt" ] && interlace -tL .tmp/github_repos_folders.txt -threads ${INTERLACE_THREADS} -c "gitleaks detect --source .tmp/github_repos/_target_ --no-banner --no-color -r ./tmp/github/gh_secret_cleantarget_.json" 2>>"$LOGFILE" &>/dev/null
- [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "trufflehog git _target_ -j | jq -c > _output_/_cleantarget_" -o .tmp/github/ 2>>"$LOGFILE" &>/dev/null
- [ -d ".tmp/github/" ] && cat .tmp/github/* | jq -c | jq -r > osint/github_company_secrets.json 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/github_repos_folders.txt" ] && interlace -tL .tmp/github_repos_folders.txt -threads ${INTERLACE_THREADS} -c "gitleaks detect --source .tmp/github_repos/_target_ --no-banner --no-color -r ./tmp/github/gh_secret_cleantarget_.json" 2>>"$LOGFILE" >/dev/null
+ [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "trufflehog git _target_ -j 2>&1 | jq -c > _output_/_cleantarget_" -o .tmp/github/ >>"$LOGFILE" 2>&1
+ if [ -d ".tmp/github/" ]; then
+ cat .tmp/github/* | jq -c | jq -r > osint/github_company_secrets.json 2>>"$LOGFILE"
+ fi
else
printf "\n${bred} Required file ${GITHUB_TOKENS} not exists or empty${reset}\n"
fi
@@ -195,7 +197,7 @@ function github_repos(){
function metadata(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$METADATA" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
start_func ${FUNCNAME[0]} "Scanning metadata in public files"
- metafinder -d "$domain" -l $METAFINDER_LIMIT -o osint -go -bi -ba 2>>"$LOGFILE" &>/dev/null
+ { metafinder -d "$domain" -l $METAFINDER_LIMIT -o osint -go -bi -ba; } >>"$LOGFILE" 2>&1 &
mv "osint/${domain}/"*".txt" "osint/" 2>>"$LOGFILE"
rm -rf "osint/${domain}" 2>>"$LOGFILE"
end_func "Results are saved in $domain/osint/[software/authors/metadata_results].txt" ${FUNCNAME[0]}
@@ -222,20 +224,20 @@ function emails(){
cd "$tools/Infoga" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
- python3 infoga.py --domain $domain --source all --report $dir/.tmp/infoga.txt 2>>"$LOGFILE" &>/dev/null
+ python3 infoga.py --domain "$domain" --source all --report "$dir/.tmp/infoga.txt" 2>&1 | tee -a "$LOGFILE"
cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
[ -s ".tmp/infoga.txt" ] && cat .tmp/infoga.txt | cut -d " " -f3 | grep -v "-" | anew -q osint/emails.txt
# COMMENTED THEHARVESTER, H8MAIL AND PWNDB AS THEY'RE NOT WORKING AS EXPECTED
# cd "$tools/theHarvester" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
-# python3 theHarvester.py -d $domain -b all -f $dir/.tmp/harvester.json 2>>"$LOGFILE" &>/dev/null
+# python3 theHarvester.py -d $domain -b all -f $dir/.tmp/harvester.json 2>>"$LOGFILE" >/dev/null
# cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
# if [ -s ".tmp/harvester.json" ]; then
# cat .tmp/harvester.json | jq -r 'try .emails[]' 2>/dev/null | anew -q osint/emails.txt
# cat .tmp/harvester.json | jq -r 'try .linkedin_people[]' 2>/dev/null | anew -q osint/employees.txt
# cat .tmp/harvester.json | jq -r 'try .linkedin_links[]' 2>/dev/null | anew -q osint/linkedin.txt
# fi
-# h8mail -t $domain -q domain --loose -c $tools/h8mail_config.ini -j .tmp/h8_results.json 2>>"$LOGFILE" &>/dev/null
+# h8mail -t $domain -q domain --loose -c $tools/h8mail_config.ini -j .tmp/h8_results.json 2>>"$LOGFILE" >/dev/null
# [ -s ".tmp/h8_results.json" ] && cat .tmp/h8_results.json | jq -r '.targets[0] | .data[] | .[]' | awk '{print $12}' | anew -q osint/h8mail.txt
#
# PWNDB_STATUS=$(timeout 30s curl -Is --socks5-hostname localhost:9050 http://pwndb2am4tzkvold.onion | grep HTTP | cut -d ' ' -f2)
@@ -272,7 +274,7 @@ function domain_info(){
start_func ${FUNCNAME[0]} "Searching domain info (whois, registrant name/email domains)"
whois -H $domain > osint/domain_info_general.txt
if [ "$DEEP" = true ] || [ "$REVERSE_WHOIS" = true ]; then
- timeout -k $AMASS_INTEL_TIMEOUT amass intel -d ${domain} -whois -timeout $AMASS_INTEL_TIMEOUT -o osint/domain_info_reverse_whois.txt 2>>"$LOGFILE" &>/dev/null
+ timeout -k "$AMASS_INTEL_TIMEOUT" amass intel -d "${domain}" -whois -timeout "$AMASS_INTEL_TIMEOUT" -o osint/domain_info_reverse_whois.txt 2>&1 | tee -a "$LOGFILE" &>/dev/null
fi
end_func "Results are saved in $domain/osint/domain_info_[general/name/email/ip].txt" ${FUNCNAME[0]}
else
@@ -377,29 +379,29 @@ function sub_passive(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPASSIVE" = true ]; then
start_subfunc ${FUNCNAME[0]} "Running : Passive Subdomain Enumeration"
if [ ! "$AXIOM" = true ]; then
- [[ $RUNAMASS == true ]] && timeout -k $AMASS_ENUM_TIMEOUT amass enum -passive -d $domain -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT -json .tmp/amass_json.json 2>>"$LOGFILE" &>/dev/null
+ [[ $RUNAMASS == true ]] && timeout -k "$AMASS_ENUM_TIMEOUT" amass enum -passive -d "$domain" -config "$AMASS_CONFIG" -timeout "$AMASS_ENUM_TIMEOUT" -json .tmp/amass_json.json 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/amass_json.json" ] && cat .tmp/amass_json.json | jq -r '.name' | anew -q .tmp/amass_psub.txt
- [[ $RUNSUBFINDER == true ]] && subfinder -all -d $domain -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" &>/dev/null
+ [[ $RUNSUBFINDER == true ]] && subfinder -all -d "$domain" -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null
else
echo $domain > .tmp/amass_temp_axiom.txt
- [[ $RUNAMASS == true ]] && axiom-scan .tmp/amass_temp_axiom.txt -m amass -passive -o .tmp/amass_psub.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
- [[ $RUNSUBFINDER == true ]] && axiom-scan .tmp/amass_temp_axiom.txt -m subfinder -all -silent -o .tmp/subfinder_psub.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [[ $RUNAMASS == true ]] && axiom-scan .tmp/amass_temp_axiom.txt -m amass -passive -o .tmp/amass_psub.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+ [[ $RUNSUBFINDER == true ]] && axiom-scan .tmp/amass_temp_axiom.txt -m subfinder -all -silent -o .tmp/subfinder_psub.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
if [ -s "${GITHUB_TOKENS}" ]; then
if [ "$DEEP" = true ]; then
- github-subdomains -d $domain -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" &>/dev/null
+ github-subdomains -d $domain -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null
else
- github-subdomains -d $domain -k -q -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" &>/dev/null
+ github-subdomains -d $domain -k -q -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null
fi
fi
if [ -s "${GITLAB_TOKENS}" ]; then
- gitlab-subdomains -d $domain -t $GITLAB_TOKENS -o .tmp/gitlab_subdomains_psub.txt 2>>"$LOGFILE" &>/dev/null
+ gitlab-subdomains -d $domain -t $GITLAB_TOKENS -o .tmp/gitlab_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null
fi
if [ "$INSCOPE" = true ]; then
- check_inscope .tmp/amass_psub.txt 2>>"$LOGFILE" &>/dev/null
- check_inscope .tmp/subfinder_psub.txt 2>>"$LOGFILE" &>/dev/null
- check_inscope .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" &>/dev/null
- check_inscope .tmp/gitlab_subdomains_psub.txt 2>>"$LOGFILE" &>/dev/null
+ check_inscope .tmp/amass_psub.txt 2>>"$LOGFILE" >/dev/null
+ check_inscope .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null
+ check_inscope .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null
+ check_inscope .tmp/gitlab_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null
fi
NUMOFLINES=$(find .tmp -type f -iname "*_psub.txt" -exec cat {} + | sed "s/*.//" | anew .tmp/passive_subs.txt | sed '/^$/d' | wc -l)
end_subfunc "${NUMOFLINES} new subs (passive)" ${FUNCNAME[0]}
@@ -415,8 +417,8 @@ function sub_passive(){
function sub_crt(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBCRT" = true ]; then
start_subfunc ${FUNCNAME[0]} "Running : Crtsh Subdomain Enumeration"
- crt -s -json -l ${CTR_LIMIT} $domain 2>>"$LOGFILE" | jq -r '.[].subdomain' | sed -e "s/^\\*\\.//" | anew -q .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" &>/dev/null
- [[ "$INSCOPE" = true ]] && check_inscope .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" &>/dev/null
+ crt -s -json -l ${CTR_LIMIT} $domain 2>>"$LOGFILE" | jq -r '.[].subdomain' | sed -e "s/^\\*\\.//" | anew -q .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null
+ [[ "$INSCOPE" = true ]] && check_inscope .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES=$(cat .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" | sed 's/\*.//g' | anew .tmp/crtsh_subs.txt | sed '/^$/d' | wc -l)
end_subfunc "${NUMOFLINES} new subs (cert transparency)" ${FUNCNAME[0]}
else
@@ -435,10 +437,10 @@ function sub_active(){
[ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt
if [ ! "$AXIOM" = true ]; then
resolvers_update_quick_local
- [ -s ".tmp/subs_no_resolved.txt" ] && puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/subs_no_resolved.txt" ] && puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
resolvers_update_quick_axiom
- [ -s ".tmp/subs_no_resolved.txt" ] && axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/subs_no_resolved.txt" ] && axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
echo $domain | dnsx -retry 3 -silent -r $resolvers_trusted 2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt
if [ "$DEEP" = true ]; then
@@ -446,7 +448,7 @@ function sub_active(){
else
cat .tmp/subdomains_tmp.txt | tlsx -san -cn -silent -ro -c $TLSX_THREADS | anew -q .tmp/subdomains_tmp.txt
fi
- [[ "$INSCOPE" = true ]] && check_inscope .tmp/subdomains_tmp.txt 2>>"$LOGFILE" &>/dev/null
+ [[ "$INSCOPE" = true ]] && check_inscope .tmp/subdomains_tmp.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES=$(cat .tmp/subdomains_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
end_subfunc "${NUMOFLINES} subs DNS resolved from passive" ${FUNCNAME[0]}
else
@@ -460,11 +462,11 @@ function sub_noerror(){
if [[ $(echo "${RANDOM}thistotallynotexist${RANDOM}.$domain" | dnsx -r $resolvers -rcode noerror,nxdomain -retry 3 -silent | cut -d' ' -f2) == "[NXDOMAIN]" ]]; then
resolvers_update_quick_local
if [ "$DEEP" = true ]; then
- dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist_big | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" &>/dev/null
+ dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist_big | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null
else
- dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" &>/dev/null
+ dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null
fi
- [[ "$INSCOPE" = true ]] && check_inscope .tmp/subs_noerror.txt 2>>"$LOGFILE" &>/dev/null
+ [[ "$INSCOPE" = true ]] && check_inscope .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES=$(cat .tmp/subs_noerror.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
end_subfunc "${NUMOFLINES} new subs (DNS noerror)" ${FUNCNAME[0]}
else
@@ -483,22 +485,22 @@ function sub_dns(){
if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then
start_subfunc ${FUNCNAME[0]} "Running : DNS Subdomain Enumeration and PTR search"
if [ ! "$AXIOM" = true ]; then
- [ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | dnsx -r $resolvers_trusted -a -aaaa -cname -ns -ptr -mx -soa -silent -retry 3 -json -o subdomains/subdomains_dnsregs.json 2>>"$LOGFILE" &>/dev/null
+ [ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | dnsx -r $resolvers_trusted -a -aaaa -cname -ns -ptr -mx -soa -silent -retry 3 -json -o subdomains/subdomains_dnsregs.json 2>>"$LOGFILE" >/dev/null
[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' 2>/dev/null | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt
[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | hakip2host | cut -d' ' -f 3 | unfurl -u domains | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt
[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try "\(.host) - \(.a[])"' 2>/dev/null | sort -u -k2 | anew -q subdomains/subdomains_ips.txt
resolvers_update_quick_local
- [ -s ".tmp/subdomains_dns.txt" ] && puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/subdomains_dns.txt" ] && puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
- [ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -json -o subdomains/subdomains_dnsregs.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -json -o subdomains/subdomains_dnsregs.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | anew -q .tmp/subdomains_dns_a_records.txt
[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | hakip2host | cut -d' ' -f 3 | unfurl -u domains | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt
[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' 2>/dev/null | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt
[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try "\(.host) - \(.a[])"' 2>/dev/null | sort -u -k2 | anew -q subdomains/subdomains_ips.txt
resolvers_update_quick_axiom
- [ -s ".tmp/subdomains_dns.txt" ] && axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_dns_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/subdomains_dns.txt" ] && axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_dns_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
- [[ "$INSCOPE" = true ]] && check_inscope .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" &>/dev/null
+ [[ "$INSCOPE" = true ]] && check_inscope .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]}
else
@@ -512,21 +514,21 @@ function sub_brute(){
if [ ! "$AXIOM" = true ]; then
resolvers_update_quick_local
if [ "$DEEP" = true ]; then
- puredns bruteforce $subs_wordlist_big $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ puredns bruteforce $subs_wordlist_big $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
- puredns bruteforce $subs_wordlist $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ puredns bruteforce $subs_wordlist $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
fi
- [ -s ".tmp/subs_brute.txt" ] && puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/subs_brute.txt" ] && puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
resolvers_update_quick_axiom
if [ "$DEEP" = true ]; then
- axiom-scan $subs_wordlist_big -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan $subs_wordlist_big -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
else
- axiom-scan $subs_wordlist -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan $subs_wordlist -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
- [ -s ".tmp/subs_brute.txt" ] && axiom-scan .tmp/subs_brute.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute_valid.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/subs_brute.txt" ] && axiom-scan .tmp/subs_brute.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute_valid.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
- [[ "$INSCOPE" = true ]] && check_inscope .tmp/subs_brute_valid.txt 2>>"$LOGFILE" &>/dev/null
+ [[ "$INSCOPE" = true ]] && check_inscope .tmp/subs_brute_valid.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES=$(cat .tmp/subs_brute_valid.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
end_subfunc "${NUMOFLINES} new subs (bruteforce)" ${FUNCNAME[0]}
else
@@ -546,36 +548,36 @@ function sub_scraping(){
if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] || [ "$DEEP" = true ] ; then
if [ ! "$AXIOM" = true ]; then
resolvers_update_quick_local
- cat subdomains/subdomains.txt | httpx -follow-host-redirects -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt 2>>"$LOGFILE" &>/dev/null
+ cat subdomains/subdomains.txt | httpx -follow-host-redirects -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
- [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
if [ "$DEEP" = true ]; then
- [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
else
- [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
fi
else
resolvers_update_quick_axiom
- axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
- [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
if [ "$DEEP" = true ]; then
- [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 3 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 3 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
else
- [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 2 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 2 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
fi
sed -i '/^.\{2048\}./d' .tmp/katana.txt
[ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | unfurl -u domains 2>>"$LOGFILE" | grep ".$domain$" | anew -q .tmp/scrap_subs.txt
- [ -s ".tmp/scrap_subs.txt" ] && puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/scrap_subs.txt" ] && puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
if [ "$INSCOPE" = true ]; then
- check_inscope .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" &>/dev/null
+ check_inscope .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" >/dev/null
fi
NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | sed '/^$/d' | wc -l)
- [ -s ".tmp/diff_scrap.txt" ] && cat .tmp/diff_scrap.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info3.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/diff_scrap.txt" ] && cat .tmp/diff_scrap.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info3.txt 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/web_full_info3.txt" ] && cat .tmp/web_full_info3.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
cat .tmp/web_full_info1.txt .tmp/web_full_info2.txt .tmp/web_full_info3.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" > .tmp/web_full_info.txt
end_subfunc "${NUMOFLINES} new subs (code scraping)" ${FUNCNAME[0]}
@@ -599,17 +601,18 @@ function sub_analytics(){
start_subfunc ${FUNCNAME[0]} "Running : Analytics Subdomain Enumeration"
if [ -s ".tmp/probed_tmp_scrap.txt" ]; then
mkdir -p .tmp/output_analytics/
- cat .tmp/probed_tmp_scrap.txt | analyticsrelationships -ch >> .tmp/analytics_subs_tmp.txt 2>>"$LOGFILE" &>/dev/null
+ analyticsrelationships -ch < .tmp/probed_tmp_scrap.txt >> .tmp/analytics_subs_tmp.txt 2>>"$LOGFILE"
+
[ -s ".tmp/analytics_subs_tmp.txt" ] && cat .tmp/analytics_subs_tmp.txt | grep "\.$domain$\|^$domain$" | sed "s/|__ //" | anew -q .tmp/analytics_subs_clean.txt
if [ ! "$AXIOM" = true ]; then
resolvers_update_quick_local
- [ -s ".tmp/analytics_subs_clean.txt" ] && puredns resolve .tmp/analytics_subs_clean.txt -w .tmp/analytics_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/analytics_subs_clean.txt" ] && puredns resolve .tmp/analytics_subs_clean.txt -w .tmp/analytics_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
resolvers_update_quick_axiom
- [ -s ".tmp/analytics_subs_clean.txt" ] && axiom-scan .tmp/analytics_subs_clean.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/analytics_subs_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/analytics_subs_clean.txt" ] && axiom-scan .tmp/analytics_subs_clean.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/analytics_subs_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
fi
- [[ "$INSCOPE" = true ]] && check_inscope .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" &>/dev/null
+ [[ "$INSCOPE" = true ]] && check_inscope .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES=$(cat .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
end_subfunc "${NUMOFLINES} new subs (analytics relationship)" ${FUNCNAME[0]}
else
@@ -642,10 +645,10 @@ function sub_permut(){
fi
if [ ! "$AXIOM" = true ]; then
resolvers_update_quick_local
- [ -s ".tmp/gotator1.txt" ] && puredns resolve .tmp/gotator1.txt -w .tmp/permute1.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/gotator1.txt" ] && puredns resolve .tmp/gotator1.txt -w .tmp/permute1.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
resolvers_update_quick_axiom
- [ -s ".tmp/gotator1.txt" ] && axiom-scan .tmp/gotator1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/gotator1.txt" ] && axiom-scan .tmp/gotator1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then
@@ -655,15 +658,15 @@ function sub_permut(){
fi
if [ ! "$AXIOM" = true ]; then
- [ -s ".tmp/gotator2.txt" ] && puredns resolve .tmp/gotator2.txt -w .tmp/permute2.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/gotator2.txt" ] && puredns resolve .tmp/gotator2.txt -w .tmp/permute2.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
- [ -s ".tmp/gotator2.txt" ] && axiom-scan .tmp/gotator2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/gotator2.txt" ] && axiom-scan .tmp/gotator2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt
if [ -s ".tmp/permute_subs.txt" ]; then
[ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/permute_subs.txt
- [[ "$INSCOPE" = true ]] && check_inscope .tmp/permute_subs.txt 2>>"$LOGFILE" &>/dev/null
+ [[ "$INSCOPE" = true ]] && check_inscope .tmp/permute_subs.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES=$(cat .tmp/permute_subs.txt 2>>"$LOGFILE" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
else
NUMOFLINES=0
@@ -687,15 +690,15 @@ function sub_regex_permut(){
if [ ! "$AXIOM" = true ]; then
resolvers_update_quick_local
- [ -s ".tmp/${domain}.brute" ] && puredns resolve .tmp/${domain}.brute -w .tmp/regulator.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/${domain}.brute" ] && puredns resolve .tmp/${domain}.brute -w .tmp/regulator.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
resolvers_update_quick_axiom
- [ -s ".tmp/${domain}.brute" ] && axiom-scan .tmp/${domain}.brute -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/regulator.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/${domain}.brute" ] && axiom-scan .tmp/${domain}.brute -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/regulator.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
if [ -s ".tmp/regulator.txt" ]; then
[ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/regulator.txt
- [[ "$INSCOPE" = true ]] && check_inscope .tmp/regulator.txt 2>>"$LOGFILE" &>/dev/null
+ [[ "$INSCOPE" = true ]] && check_inscope .tmp/regulator.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES=$(cat .tmp/regulator.txt 2>>"$LOGFILE" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
else
NUMOFLINES=0
@@ -716,15 +719,15 @@ function sub_gpt(){
subgpt -i ${dir}/subdomains/subdomains.txt -c $SUBGPT_COOKIE --dont-resolve -o ${dir}/.tmp/gpt_subs.txt 2>>"$LOGFILE"
if [ ! "$AXIOM" = true ]; then
resolvers_update_quick_local
- [ -s "${dir}/.tmp/gpt_subs.txt" ] && puredns resolve ${dir}/.tmp/gpt_subs.txt -w .tmp/gpt_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s "${dir}/.tmp/gpt_subs.txt" ] && puredns resolve ${dir}/.tmp/gpt_subs.txt -w .tmp/gpt_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
resolvers_update_quick_axiom
- [ -s "${dir}/.tmp/gpt_subs.txt" ] && axiom-scan ${dir}/.tmp/gpt_subs.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/gpt_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s "${dir}/.tmp/gpt_subs.txt" ] && axiom-scan ${dir}/.tmp/gpt_subs.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/gpt_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
if [ -s ".tmp/gpt_resolved.txt" ]; then
[ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/gpt_resolved.txt
- [[ "$INSCOPE" = true ]] && check_inscope .tmp/gpt_resolved.txt 2>>"$LOGFILE" &>/dev/null
+ [[ "$INSCOPE" = true ]] && check_inscope .tmp/gpt_resolved.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES=$(cat .tmp/gpt_resolved.txt 2>>"$LOGFILE" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
else
NUMOFLINES=0
@@ -749,14 +752,14 @@ function sub_recursive_passive(){
if [ ! "$AXIOM" = true ]; then
resolvers_update_quick_local
[ -s ".tmp/subdomains_recurs_top.txt" ] && timeout -k $AMASS_ENUM_TIMEOUT amass enum -passive -df .tmp/subdomains_recurs_top.txt -nf subdomains/subdomains.txt -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt
- [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
resolvers_update_quick_axiom
- [ -s ".tmp/subdomains_recurs_top.txt" ] && axiom-scan .tmp/subdomains_recurs_top.txt -m amass -passive -o .tmp/amass_prec.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/subdomains_recurs_top.txt" ] && axiom-scan .tmp/subdomains_recurs_top.txt -m amass -passive -o .tmp/amass_prec.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/amass_prec.txt" ] && cat .tmp/amass_prec.txt | anew -q .tmp/passive_recursive.txt
- [ -s ".tmp/passive_recursive.txt" ] && axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/passive_recurs_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/passive_recursive.txt" ] && axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/passive_recurs_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
- [[ "$INSCOPE" = true ]] && check_inscope .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" &>/dev/null
+ [[ "$INSCOPE" = true ]] && check_inscope .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES=$(cat .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l)
end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]}
else
@@ -776,10 +779,10 @@ function sub_recursive_brute(){
ripgen -d .tmp/subdomains_recurs_top.txt -w $subs_wordlist > .tmp/brute_recursive_wordlist.txt
if [ ! "$AXIOM" = true ]; then
resolvers_update_quick_local
- [ -s ".tmp/brute_recursive_wordlist.txt" ] && puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -w .tmp/brute_recursive_result.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/brute_recursive_wordlist.txt" ] && puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -w .tmp/brute_recursive_result.txt 2>>"$LOGFILE" >/dev/null
else
resolvers_update_quick_axiom
- [ -s ".tmp/brute_recursive_wordlist.txt" ] && axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_recursive_result.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/brute_recursive_wordlist.txt" ] && axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_recursive_result.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
[ -s ".tmp/brute_recursive_result.txt" ] && cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt
@@ -790,9 +793,9 @@ function sub_recursive_brute(){
fi
if [ ! "$AXIOM" = true ]; then
- [ -s ".tmp/gotator1_recursive.txt" ] && puredns resolve .tmp/gotator1_recursive.txt -w .tmp/permute1_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/gotator1_recursive.txt" ] && puredns resolve .tmp/gotator1_recursive.txt -w .tmp/permute1_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
- [ -s ".tmp/gotator1_recursive.txt" ] && axiom-scan .tmp/gotator1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/gotator1_recursive.txt" ] && axiom-scan .tmp/gotator1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then
@@ -802,25 +805,25 @@ function sub_recursive_brute(){
fi
if [ ! "$AXIOM" = true ]; then
- [ -s ".tmp/gotator2_recursive.txt" ] && puredns resolve .tmp/gotator2_recursive.txt -w .tmp/permute2_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/gotator2_recursive.txt" ] && puredns resolve .tmp/gotator2_recursive.txt -w .tmp/permute2_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
- [ -s ".tmp/gotator2_recursive.txt" ] && axiom-scan .tmp/gotator2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/gotator2_recursive.txt" ] && axiom-scan .tmp/gotator2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
cat .tmp/permute1_recursive.txt .tmp/permute2_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/permute_recursive.txt
else
end_subfunc "skipped in this mode or defined in reconftw.cfg" ${FUNCNAME[0]}
fi
if [ "$INSCOPE" = true ]; then
- check_inscope .tmp/permute_recursive.txt 2>>"$LOGFILE" &>/dev/null
- check_inscope .tmp/brute_recursive.txt 2>>"$LOGFILE" &>/dev/null
+ check_inscope .tmp/permute_recursive.txt 2>>"$LOGFILE" >/dev/null
+ check_inscope .tmp/brute_recursive.txt 2>>"$LOGFILE" >/dev/null
fi
# Last validation
cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/brute_perm_recursive.txt
if [ ! "$AXIOM" = true ]; then
- [ -s ".tmp/brute_recursive.txt" ] && puredns resolve .tmp/brute_perm_recursive.txt -w .tmp/brute_perm_recursive_final.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/brute_recursive.txt" ] && puredns resolve .tmp/brute_perm_recursive.txt -w .tmp/brute_perm_recursive_final.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
else
- [ -s ".tmp/brute_recursive.txt" ] && axiom-scan .tmp/brute_perm_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_perm_recursive_final.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/brute_recursive.txt" ] && axiom-scan .tmp/brute_perm_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_perm_recursive_final.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
NUMOFLINES=$(cat .tmp/brute_perm_recursive_final.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l)
@@ -840,11 +843,11 @@ function subtakeover(){
touch .tmp/tko.txt
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
if [ ! "$AXIOM" = true ]; then
- nuclei -update 2>>"$LOGFILE" &>/dev/null
+ nuclei -update 2>>"$LOGFILE" >/dev/null
cat subdomains/subdomains.txt .tmp/webs_all.txt 2>/dev/null | nuclei -silent -nh -tags takeover -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -o .tmp/tko.txt
else
cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | sed '/^$/d' | anew -q .tmp/webs_subs.txt
- [ -s ".tmp/webs_subs.txt" ] && axiom-scan .tmp/webs_subs.txt -m nuclei -tags takeover -nh -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -o .tmp/tko.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/webs_subs.txt" ] && axiom-scan .tmp/webs_subs.txt -m nuclei -tags takeover -nh -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -o .tmp/tko.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
# DNS_TAKEOVER
@@ -897,12 +900,12 @@ function s3buckets(){
if [ ! "$AXIOM" = true ]; then
[ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt
else
- axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/s3buckets_tmp.txt" ] && cat .tmp/s3buckets_tmp.txt .tmp/s3buckets_tmp2.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt && sed -i '/^$/d' .tmp/s3buckets.txt
fi
# Cloudenum
keyword=${domain%%.*}
- python3 ~/Tools/cloud_enum/cloud_enum.py -k $keyword -qs -l .tmp/output_cloud.txt 2>>"$LOGFILE" &>/dev/null
+ python3 ~/Tools/cloud_enum/cloud_enum.py -k $keyword -qs -l .tmp/output_cloud.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES1=$(cat .tmp/output_cloud.txt 2>>"$LOGFILE" | sed '/^#/d' | sed '/^$/d' | anew subdomains/cloud_assets.txt | wc -l)
if [ "$NUMOFLINES1" -gt 0 ]; then
@@ -937,9 +940,9 @@ function webprobe_simple(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBPROBESIMPLE" = true ]; then
start_subfunc ${FUNCNAME[0]} "Running : Http probing $domain"
if [ ! "$AXIOM" = true ]; then
- cat subdomains/subdomains.txt | httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt 2>>"$LOGFILE" &>/dev/null
+ cat subdomains/subdomains.txt | httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt 2>>"$LOGFILE" >/dev/null
else
- axiom-scan subdomains/subdomains.txt -m httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan subdomains/subdomains.txt -m httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
cat .tmp/web_full_info.txt .tmp/web_full_info_probe.txt webs/web_full_info.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" > webs/web_full_info.txt
[ -s "webs/web_full_info.txt" ] && cat webs/web_full_info.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew -q .tmp/probed_tmp.txt
@@ -950,7 +953,7 @@ function webprobe_simple(){
end_subfunc "${NUMOFLINES} new websites resolved" ${FUNCNAME[0]}
if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs.txt| wc -l) -le $DEEP_LIMIT2 ]]; then
notification "Sending websites to proxy" info
- ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null
+ ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null
fi
else
if [ "$WEBPROBESIMPLE" = false ]; then
@@ -967,11 +970,11 @@ function webprobe_full(){
if [ -s "subdomains/subdomains.txt" ]; then
if [ ! "$AXIOM" = true ]; then
if [ -s "subdomains/subdomains.txt" ]; then
- cat subdomains/subdomains.txt | httpx -follow-host-redirects -random-agent -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" &>/dev/null
+ cat subdomains/subdomains.txt | httpx -follow-host-redirects -random-agent -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" >/dev/null
fi
else
if [ -s "subdomains/subdomains.txt" ]; then
- axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
fi
fi
@@ -991,7 +994,7 @@ function webprobe_full(){
end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" ${FUNCNAME[0]}
if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs_uncommon_ports.txt| wc -l) -le $DEEP_LIMIT2 ]]; then
notification "Sending websites with uncommon ports to proxy" info
- ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null
+ ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null
fi
else
if [ "$WEBPROBEFULL" = false ]; then
@@ -1009,7 +1012,7 @@ function screenshot(){
if [ ! "$AXIOM" = true ]; then
[ -s ".tmp/webs_all.txt" ] && gowitness file -f .tmp/webs_all.txt -t $GOWITNESS_THREADS $GOWITNESS_FLAGS 2>>"$LOGFILE"
else
- axiom-scan .tmp/webs_all.txt -m gowitness -t $GOWITNESS_THREADS $GOWITNESS_FLAGS -o screenshots $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/webs_all.txt -m gowitness -t $GOWITNESS_THREADS $GOWITNESS_FLAGS -o screenshots $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
end_func "Results are saved in $domain/screenshots folder" ${FUNCNAME[0]}
else
@@ -1027,7 +1030,7 @@ function virtualhosts(){
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
if [ -s ".tmp/webs_all.txt" ]; then
mkdir -p $dir/virtualhosts $dir/.tmp/virtualhosts
- interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf -ac -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -H \"Host: FUZZ._cleantarget_\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_ -of json -o _output_/_cleantarget_.json" -o $dir/.tmp/virtualhosts 2>>"$LOGFILE" &>/dev/null
+ interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf -ac -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -H \"Host: FUZZ._cleantarget_\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_ -of json -o _output_/_cleantarget_.json" -o $dir/.tmp/virtualhosts 2>>"$LOGFILE" >/dev/null
for sub in $(cat .tmp/webs_all.txt); do
sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
[ -s "$dir/.tmp/virtualhosts/${sub_out}.json" ] && cat $dir/.tmp/virtualhosts/${sub_out}.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort | anew -q $dir/virtualhosts/${sub_out}.txt
@@ -1054,7 +1057,7 @@ function favicon(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$FAVICON" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
start_func ${FUNCNAME[0]} "Favicon Ip Lookup"
cd "$tools/fav-up" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
- python3 favUp.py -w "$domain" -sc -o favicontest.json 2>>"$LOGFILE" &>/dev/null
+ python3 favUp.py -w "$domain" -sc -o favicontest.json 2>>"$LOGFILE" >/dev/null
if [ -s "favicontest.json" ]; then
cat favicontest.json | jq -r 'try .found_ips' 2>>"$LOGFILE" | grep -v "not-found" > favicontest.txt
sed -i "s/|/\n/g" favicontest.txt
@@ -1098,9 +1101,9 @@ function portscan(){
fi
if [ "$PORTSCAN_ACTIVE" = true ]; then
if [ ! "$AXIOM" = true ]; then
- [ -s ".tmp/ips_nocdn.txt" ] && $SUDO nmap --top-ports 200 -sV -n --max-retries 2 -Pn --open --script vulners -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/ips_nocdn.txt" ] && $SUDO nmap --top-ports 200 -sV -n --max-retries 2 -Pn --open --script vulners -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" >/dev/null
else
- [ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx --top-ports 200 -sV -n -Pn --open --max-retries 2 --script vulners -oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx --top-ports 200 -sV -n -Pn --open --max-retries 2 --script vulners -oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
fi
end_func "Results are saved in hosts/portscan_[passive|active].txt" ${FUNCNAME[0]}
@@ -1138,9 +1141,9 @@ function waf_checks(){
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
if [ -s ".tmp/webs_all.txt" ]; then
if [ ! "$AXIOM" = true ]; then
- wafw00f -i .tmp/webs_all.txt -o .tmp/wafs.txt 2>>"$LOGFILE" &>/dev/null
+ wafw00f -i .tmp/webs_all.txt -o .tmp/wafs.txt 2>>"$LOGFILE" >/dev/null
else
- axiom-scan .tmp/webs_all.txt -m wafw00f -o .tmp/wafs.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/webs_all.txt -m wafw00f -o .tmp/wafs.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
if [ -s ".tmp/wafs.txt" ]; then
cat .tmp/wafs.txt | sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' | tr -s "\t" ";" > webs/webs_wafs.txt
@@ -1165,7 +1168,7 @@ function waf_checks(){
function nuclei_check(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$NUCLEICHECK" = true ]; then
start_func ${FUNCNAME[0]} "Templates based web scanner"
- nuclei -update 2>>"$LOGFILE" &>/dev/null
+ nuclei -update 2>>"$LOGFILE" >/dev/null
mkdir -p nuclei_output
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
[ ! -s ".tmp/webs_subs.txt" ] && cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt
@@ -1187,7 +1190,7 @@ function nuclei_check(){
do
crit=${array[i]}
printf "${yellow}\n Running : Nuclei $crit, check results on nuclei_output folder${reset}\n\n"
- axiom-scan .tmp/webs_subs.txt -m nuclei -severity ${crit} -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/webs_subs.txt -m nuclei -severity ${crit} -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
done
printf "\n\n"
fi
@@ -1209,7 +1212,7 @@ function fuzz(){
if [ -s ".tmp/webs_all.txt" ]; then
mkdir -p $dir/fuzzing $dir/.tmp/fuzzing
if [ ! "$AXIOM" = true ]; then
- interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -o _output_/_cleantarget_.json" -o $dir/.tmp/fuzzing 2>>"$LOGFILE" &>/dev/null
+ interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -o _output_/_cleantarget_.json" -o $dir/.tmp/fuzzing 2>>"$LOGFILE" >/dev/null
for sub in $(cat .tmp/webs_all.txt); do
sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
[ -s "$dir/.tmp/fuzzing/${sub_out}.json" ] && cat $dir/.tmp/fuzzing/${sub_out}.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort | anew -q $dir/fuzzing/${sub_out}.txt
@@ -1219,7 +1222,7 @@ function fuzz(){
axiom-exec "mkdir -p /home/op/lists/seclists/Discovery/Web-Content/" &>/dev/null
axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/fuzz_wordlist.txt" &>/dev/null
axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/seclists/Discovery/Web-Content/big.txt" &>/dev/null
- axiom-scan .tmp/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
for sub in $(cat .tmp/webs_all.txt); do
sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
[ -s "$dir/.tmp/ffuf-content.json" ] && cat .tmp/ffuf-content.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | grep $sub | sort | sort -k1 | anew -q fuzzing/${sub_out}.txt
@@ -1246,7 +1249,7 @@ function cms_scanner(){
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
if [ -s ".tmp/webs_all.txt" ]; then
tr '\n' ',' < .tmp/webs_all.txt > .tmp/cms.txt
- timeout -k $CMSSCAN_TIMEOUT python3 $tools/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r 2>>"$LOGFILE" &>/dev/null
+ timeout -k $CMSSCAN_TIMEOUT python3 $tools/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r 2>>"$LOGFILE" >/dev/null
exit_status=$?
if [[ $exit_status -eq 125 ]]; then
echo "TIMEOUT cmseek.py - investigate manually for $dir" >> "$LOGFILE"
@@ -1289,12 +1292,12 @@ function urlchecks(){
if [ "$URL_CHECK_PASSIVE" = true ]; then
if [ "$DEEP" = true ]; then
cat .tmp/webs_all.txt | unfurl -u domains > .tmp/waymore_input.txt
- python3 $tools/waymore/waymore.py -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" &>/dev/null
+ python3 $tools/waymore/waymore.py -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null
else
cat .tmp/webs_all.txt | gau --threads $GAU_THREADS | anew -q .tmp/url_extract_tmp.txt
fi
if [ -s "${GITHUB_TOKENS}" ]; then
- github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" &>/dev/null
+ github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt
fi
fi
@@ -1302,9 +1305,9 @@ function urlchecks(){
if [ $diff_webs != "0" ] || [ ! -s ".tmp/katana.txt" ]; then
if [ "$URL_CHECK_ACTIVE" = true ]; then
if [ "$DEEP" = true ]; then
- katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" &>/dev/null
+ katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
else
- katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" &>/dev/null
+ katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
fi
fi
fi
@@ -1312,12 +1315,12 @@ function urlchecks(){
if [ "$URL_CHECK_PASSIVE" = true ]; then
if [ "$DEEP" = true ]; then
cat .tmp/webs_all.txt | unfurl -u domains > .tmp/waymore_input.txt
- axiom-scan .tmp/waymore_input.txt -m waymore -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/waymore_input.txt -m waymore -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
else
- axiom-scan .tmp/webs_all.txt -m gau -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/webs_all.txt -m gau -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
if [ -s "${GITHUB_TOKENS}" ]; then
- github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" &>/dev/null
+ github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt
fi
fi
@@ -1325,9 +1328,9 @@ function urlchecks(){
if [ $diff_webs != "0" ] || [ ! -s ".tmp/katana.txt" ]; then
if [ "$URL_CHECK_ACTIVE" = true ]; then
if [ "$DEEP" = true ]; then
- axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 3 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 3 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
else
- axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 2 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 2 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
fi
fi
@@ -1339,13 +1342,13 @@ function urlchecks(){
[ -s ".tmp/url_extract_js.txt" ] && interlace -tL .tmp/url_extract_js.txt -threads 10 -c "python3 $tools/JSA/jsa.py -f target | anew -q .tmp/url_extract_tmp.txt" &>/dev/null
fi
[ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt
- [ -s ".tmp/url_extract_tmp2.txt" ] && cat .tmp/url_extract_tmp2.txt | python3 $tools/urless/urless.py | anew -q .tmp/url_extract_uddup.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/url_extract_tmp2.txt" ] && cat .tmp/url_extract_tmp2.txt | python3 $tools/urless/urless.py | anew -q .tmp/url_extract_uddup.txt 2>>"$LOGFILE" >/dev/null
NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | sed '/^$/d' | wc -l)
notification "${NUMOFLINES} new urls with params" info
end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]}
if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then
notification "Sending urls to proxy" info
- ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null
+ ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null
fi
fi
else
@@ -1415,21 +1418,21 @@ function jschecks(){
if [ ! "$AXIOM" = true ]; then
cat .tmp/url_extract_js.txt | subjs -ua "Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" -c 40 | grep "$domain" | anew -q .tmp/subjslinks.txt
else
- axiom-scan .tmp/url_extract_js.txt -m subjs -o .tmp/subjslinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/url_extract_js.txt -m subjs -o .tmp/subjslinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
[ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | egrep -iv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)" | anew -q js/nojs_links.txt
[ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | grep -iE "\.js($|\?)" | anew -q .tmp/url_extract_js.txt
- cat .tmp/url_extract_js.txt | python3 $tools/urless/urless.py | anew -q js/url_extract_js.txt 2>>"$LOGFILE" &>/dev/null
+ cat .tmp/url_extract_js.txt | python3 $tools/urless/urless.py | anew -q js/url_extract_js.txt 2>>"$LOGFILE" >/dev/null
printf "${yellow} Running : Resolving JS Urls 2/5${reset}\n"
if [ ! "$AXIOM" = true ]; then
[ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -status-code -content-type -retries 2 -no-color | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
else
- [ -s "js/url_extract_js.txt" ] && axiom-scan js/url_extract_js.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -content-type -retries 2 -no-color -o .tmp/js_livelinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s "js/url_extract_js.txt" ] && axiom-scan js/url_extract_js.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -content-type -retries 2 -no-color -o .tmp/js_livelinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/js_livelinks.txt" ] && cat .tmp/js_livelinks.txt | anew .tmp/web_full_info.txt | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
fi
printf "${yellow} Running : Gathering endpoints 3/5${reset}\n"
- [ -s "js/js_livelinks.txt" ] && python3 $tools/xnLinkFinder/xnLinkFinder.py -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d $XNLINKFINDER_DEPTH -o .tmp/js_endpoints.txt 2>>"$LOGFILE" &>/dev/null
- [ -s "parameters.txt" ] && rm -f parameters.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s "js/js_livelinks.txt" ] && python3 $tools/xnLinkFinder/xnLinkFinder.py -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d $XNLINKFINDER_DEPTH -o .tmp/js_endpoints.txt 2>>"$LOGFILE" >/dev/null
+ [ -s "parameters.txt" ] && rm -f parameters.txt 2>>"$LOGFILE" >/dev/null
if [ -s ".tmp/js_endpoints.txt" ]; then
sed -i '/^\//!d' .tmp/js_endpoints.txt
cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt
@@ -1441,7 +1444,7 @@ function jschecks(){
[ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua ${HEADER} -o js/js_secrets.txt $AXIOM_EXTRA_ARGS &>/dev/null
fi
printf "${yellow} Running : Building wordlist 5/5${reset}\n"
- [ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "python3 $tools/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" &>/dev/null
+ [ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "python3 $tools/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" >/dev/null
end_func "Results are saved in $domain/js folder" ${FUNCNAME[0]}
else
end_func "No JS urls found for $domain, function skipped" ${FUNCNAME[0]}
@@ -1468,7 +1471,7 @@ function wordlist_gen(){
end_func "Results are saved in $domain/webs/dict_[words|paths].txt" ${FUNCNAME[0]}
if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/all_paths.txt | wc -l) -le $DEEP_LIMIT2 ]]; then
notification "Sending urls to proxy" info
- ffuf -mc all -w webs/all_paths.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null
+ ffuf -mc all -w webs/all_paths.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null
fi
else
if [ "$WORDLIST" = false ]; then
@@ -1500,7 +1503,7 @@ function password_dict(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PASSWORD_DICT" = true ]; then
start_func ${FUNCNAME[0]} "Password dictionary generation"
word=${domain%%.*}
- python3 $tools/pydictor/pydictor.py -extend $word --leet 0 1 2 11 21 --len ${PASSWORD_MIN_LENGTH} ${PASSWORD_MAX_LENGTH} -o webs/password_dict.txt 2>>"$LOGFILE" &>/dev/null
+ python3 $tools/pydictor/pydictor.py -extend $word --leet 0 1 2 11 21 --len ${PASSWORD_MIN_LENGTH} ${PASSWORD_MAX_LENGTH} -o webs/password_dict.txt 2>>"$LOGFILE" >/dev/null
end_func "Results are saved in $domain/webs/password_dict.txt" ${FUNCNAME[0]}
else
if [ "$PASSWORD_DICT" = false ]; then
@@ -1522,18 +1525,18 @@ function brokenLinks(){
if [ ! "$AXIOM" = true ]; then
if [ ! -s ".tmp/katana.txt" ]; then
if [ "$DEEP" = true ]; then
- [ -s ".tmp/webs_all.txt" ] && katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -o .tmp/katana.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/webs_all.txt" ] && katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
else
- [ -s ".tmp/webs_all.txt" ] && katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -o .tmp/katana.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/webs_all.txt" ] && katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
fi
fi
[ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt
else
if [ ! -s ".tmp/katana.txt" ]; then
if [ "$DEEP" = true ]; then
- [ -s ".tmp/webs_all.txt" ] && axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 3 -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/webs_all.txt" ] && axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 3 -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
else
- [ -s ".tmp/webs_all.txt" ] && axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 2 -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/webs_all.txt" ] && axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 2 -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
[ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt
fi
@@ -1578,18 +1581,18 @@ function xss(){
else
if [ "$DEEP" = true ]; then
if [ -n "$XSS_SERVER" ]; then
- [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
else
printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n"
- [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
else
if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le $DEEP_LIMIT ]]; then
if [ -n "$XSS_SERVER" ]; then
- axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
else
printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n"
- axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null
+ axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
else
printf "${bred} Skipping XSS: Too many URLs to test, try with --deep flag${reset}\n"
@@ -1612,7 +1615,7 @@ function cors(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CORS" = true ]; then
start_func ${FUNCNAME[0]} "CORS Scan"
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
- [ -s ".tmp/webs_all.txt" ] && python3 $tools/Corsy/corsy.py -i .tmp/webs_all.txt -o vulns/cors.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/webs_all.txt" ] && python3 $tools/Corsy/corsy.py -i .tmp/webs_all.txt -o vulns/cors.txt 2>>"$LOGFILE" >/dev/null
end_func "Results are saved in vulns/cors.txt" ${FUNCNAME[0]}
else
if [ "$CORS" = false ]; then
@@ -1689,7 +1692,7 @@ function crlf_checks(){
start_func ${FUNCNAME[0]} "CRLF checks"
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
if [ "$DEEP" = true ] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then
- crlfuzz -l .tmp/webs_all.txt -o vulns/crlf.txt 2>>"$LOGFILE" &>/dev/null
+ crlfuzz -l .tmp/webs_all.txt -o vulns/crlf.txt 2>>"$LOGFILE" >/dev/null
end_func "Results are saved in vulns/crlf.txt" ${FUNCNAME[0]}
else
end_func "Skipping CRLF: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]}
@@ -1756,10 +1759,10 @@ function sqli(){
cat gf/sqli.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_sqli.txt
if [ "$DEEP" = true ] || [[ $(cat .tmp/tmp_sqli.txt | wc -l) -le $DEEP_LIMIT ]]; then
if [ "$SQLMAP" = true ];then
- python3 $tools/sqlmap/sqlmap.py -m .tmp/tmp_sqli.txt -b -o --smart --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap 2>>"$LOGFILE" &>/dev/null
+ python3 $tools/sqlmap/sqlmap.py -m .tmp/tmp_sqli.txt -b -o --smart --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap 2>>"$LOGFILE" >/dev/null
fi
if [ "$GHAURI" = true ];then
- interlace -tL .tmp/tmp_sqli.txt -threads ${INTERLACE_THREADS} -c "ghauri -u _target_ --batch -H \"${HEADER}\" --force-ssl >> vulns/ghauri_log.txt" 2>>"$LOGFILE" &>/dev/null
+ interlace -tL .tmp/tmp_sqli.txt -threads ${INTERLACE_THREADS} -c "ghauri -u _target_ --batch -H \"${HEADER}\" --force-ssl >> vulns/ghauri_log.txt" 2>>"$LOGFILE" >/dev/null
fi
end_func "Results are saved in vulns/sqlmap folder" ${FUNCNAME[0]}
else
@@ -1794,7 +1797,7 @@ function spraying(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SPRAY" = true ]; then
start_func ${FUNCNAME[0]} "Password spraying"
cd "$tools/brutespray" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
- python3 brutespray.py --file $dir/hosts/portscan_active.gnmap --threads $BRUTESPRAY_THREADS --hosts $BRUTESPRAY_CONCURRENCE -o $dir/vulns/brutespray 2>>"$LOGFILE" &>/dev/null
+ python3 brutespray.py --file $dir/hosts/portscan_active.gnmap --threads $BRUTESPRAY_THREADS --hosts $BRUTESPRAY_CONCURRENCE -o $dir/vulns/brutespray 2>>"$LOGFILE" >/dev/null
cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
end_func "Results are saved in vulns/brutespray folder" ${FUNCNAME[0]}
else
@@ -1811,7 +1814,7 @@ function command_injection(){
start_func ${FUNCNAME[0]} "Command Injection checks"
[ -s "gf/rce.txt" ] && cat gf/rce.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_rce.txt
if [ "$DEEP" = true ] || [[ $(cat .tmp/tmp_rce.txt | wc -l) -le $DEEP_LIMIT ]]; then
- [ -s ".tmp/tmp_rce.txt" ] && python3 $tools/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection.txt 2>>"$LOGFILE" &>/dev/null
+ [ -s ".tmp/tmp_rce.txt" ] && python3 $tools/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection.txt 2>>"$LOGFILE" >/dev/null
end_func "Results are saved in vulns/command_injection folder" ${FUNCNAME[0]}
else
end_func "Skipping Command injection: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]}
@@ -2044,16 +2047,16 @@ function sendToNotify {
notification "Sending ${domain} data over Telegram" info
telegram_chat_id=$(cat ${NOTIFY_CONFIG} | grep '^ telegram_chat_id\|^telegram_chat_id\|^ telegram_chat_id' | xargs | cut -d' ' -f2)
telegram_key=$(cat ${NOTIFY_CONFIG} | grep '^ telegram_api_key\|^telegram_api_key\|^ telegram_apikey' | xargs | cut -d' ' -f2 )
- curl -F document=@${1} "https://api.telegram.org/bot${telegram_key}/sendDocument?chat_id=${telegram_chat_id}" 2>>"$LOGFILE" &>/dev/null
+ curl -F document=@${1} "https://api.telegram.org/bot${telegram_key}/sendDocument?chat_id=${telegram_chat_id}" 2>>"$LOGFILE" >/dev/null
fi
if grep -q '^ discord\|^discord\|^ discord' $NOTIFY_CONFIG ; then
notification "Sending ${domain} data over Discord" info
discord_url=$(cat ${NOTIFY_CONFIG} | grep '^ discord_webhook_url\|^discord_webhook_url\|^ discord_webhook_url' | xargs | cut -d' ' -f2)
- curl -v -i -H "Accept: application/json" -H "Content-Type: multipart/form-data" -X POST -F file1=@${1} $discord_url 2>>"$LOGFILE" &>/dev/null
+ curl -v -i -H "Accept: application/json" -H "Content-Type: multipart/form-data" -X POST -F file1=@${1} $discord_url 2>>"$LOGFILE" >/dev/null
fi
if [[ -n "$slack_channel" ]] && [[ -n "$slack_auth" ]]; then
notification "Sending ${domain} data over Slack" info
- curl -F file=@${1} -F "initial_comment=reconftw zip file" -F channels=${slack_channel} -H "Authorization: Bearer ${slack_auth}" https://slack.com/api/files.upload 2>>"$LOGFILE" &>/dev/null
+ curl -F file=@${1} -F "initial_comment=reconftw zip file" -F channels=${slack_channel} -H "Authorization: Bearer ${slack_auth}" https://slack.com/api/files.upload 2>>"$LOGFILE" >/dev/null
fi
fi
}
@@ -2099,10 +2102,10 @@ function resolvers_update(){
if [ ! -s "$resolvers" ] || [[ $(find "$resolvers" -mtime +1 -print) ]] ; then
notification "Resolvers seem older than 1 day\n Generating custom resolvers..." warn
eval rm -f $resolvers 2>>"$LOGFILE"
- dnsvalidator -tL https://public-dns.info/nameservers.txt -threads $DNSVALIDATOR_THREADS -o $resolvers 2>>"$LOGFILE" &>/dev/null
- dnsvalidator -tL https://raw.githubusercontent.com/blechschmidt/massdns/master/lists/resolvers.txt -threads $DNSVALIDATOR_THREADS -o tmp_resolvers 2>>"$LOGFILE" &>/dev/null
+ dnsvalidator -tL https://public-dns.info/nameservers.txt -threads $DNSVALIDATOR_THREADS -o $resolvers 2>>"$LOGFILE" >/dev/null
+ dnsvalidator -tL https://raw.githubusercontent.com/blechschmidt/massdns/master/lists/resolvers.txt -threads $DNSVALIDATOR_THREADS -o tmp_resolvers 2>>"$LOGFILE" >/dev/null
[ -s "tmp_resolvers" ] && cat tmp_resolvers | anew -q $resolvers
- [ -s "tmp_resolvers" ] && rm -f tmp_resolvers 2>>"$LOGFILE" &>/dev/null
+ [ -s "tmp_resolvers" ] && rm -f tmp_resolvers 2>>"$LOGFILE" >/dev/null
[ ! -s "$resolvers" ] && wget -q -O - ${resolvers_url} > $resolvers
[ ! -s "$resolvers_trusted" ] && wget -q -O - ${resolvers_trusted_url} > $resolvers_trusted
notification "Updated\n" good
@@ -2111,8 +2114,8 @@ function resolvers_update(){
notification "Checking resolvers lists...\n Accurate resolvers are the key to great results\n This may take around 10 minutes if it's not updated" warn
# shellcheck disable=SC2016
axiom-exec 'if [ $(find "/home/op/lists/resolvers.txt" -mtime +1 -print) ] || [ $(cat /home/op/lists/resolvers.txt | wc -l) -le 40 ] ; then dnsvalidator -tL https://public-dns.info/nameservers.txt -threads 200 -o /home/op/lists/resolvers.txt ; fi' &>/dev/null
- axiom-exec "wget -q -O - ${resolvers_url} > /home/op/lists/resolvers.txt" 2>>"$LOGFILE" &>/dev/null
- axiom-exec "wget -q -O - ${resolvers_trusted_url} > /home/op/lists/resolvers_trusted.txt" 2>>"$LOGFILE" &>/dev/null
+ axiom-exec "wget -q -O - ${resolvers_url} > /home/op/lists/resolvers.txt" 2>>"$LOGFILE" >/dev/null
+ axiom-exec "wget -q -O - ${resolvers_trusted_url} > /home/op/lists/resolvers_trusted.txt" 2>>"$LOGFILE" >/dev/null
notification "Updated\n" good
fi
generate_resolvers=false
@@ -2135,8 +2138,8 @@ function resolvers_update_quick_local(){
}
function resolvers_update_quick_axiom(){
- axiom-exec "wget -q -O - ${resolvers_url} > /home/op/lists/resolvers.txt" 2>>"$LOGFILE" &>/dev/null
- axiom-exec "wget -q -O - ${resolvers_trusted_url} > /home/op/lists/resolvers_trusted.txt" 2>>"$LOGFILE" &>/dev/null
+ axiom-exec "wget -q -O - ${resolvers_url} > /home/op/lists/resolvers.txt" 2>>"$LOGFILE" >/dev/null
+ axiom-exec "wget -q -O - ${resolvers_trusted_url} > /home/op/lists/resolvers_trusted.txt" 2>>"$LOGFILE" >/dev/null
}
function ipcidr_target(){
@@ -2163,7 +2166,7 @@ function axiom_lauch(){
# let's fire up a FLEET!
if [ "$AXIOM_FLEET_LAUNCH" = true ] && [ -n "$AXIOM_FLEET_NAME" ] && [ -n "$AXIOM_FLEET_COUNT" ]; then
start_func ${FUNCNAME[0]} "Launching our Axiom fleet"
- python3 -m pip install --upgrade linode-cli 2>>"$LOGFILE" &>/dev/null
+ python3 -m pip install --upgrade linode-cli 2>>"$LOGFILE" >/dev/null
# Check to see if we have a fleet already, if so, SKIP THIS!
NUMOFNODES=$(timeout 30 axiom-ls | grep -c "$AXIOM_FLEET_NAME")
if [[ $NUMOFNODES -ge $AXIOM_FLEET_COUNT ]]; then
@@ -2183,7 +2186,7 @@ function axiom_lauch(){
axiom-fleet ${AXIOM_FLEET_NAME} ${AXIOM_ARGS}
axiom-select "$AXIOM_FLEET_NAME*"
if [ -n "$AXIOM_POST_START" ]; then
- eval "$AXIOM_POST_START" 2>>"$LOGFILE" &>/dev/null
+ eval "$AXIOM_POST_START" 2>>"$LOGFILE" >/dev/null
fi
NUMOFNODES=$(timeout 30 axiom-ls | grep -c "$AXIOM_FLEET_NAME" )
@@ -2286,14 +2289,14 @@ function start(){
function end(){
- find $dir -type f -empty -print | grep -v '.called_fn' | grep -v '.log' | grep -v '.tmp' | xargs rm -f 2>>"$LOGFILE" &>/dev/null
- find $dir -type d -empty -print -delete 2>>"$LOGFILE" &>/dev/null
+ find $dir -type f -empty -print | grep -v '.called_fn' | grep -v '.log' | grep -v '.tmp' | xargs rm -f 2>>"$LOGFILE" >/dev/null
+ find $dir -type d -empty -print -delete 2>>"$LOGFILE" >/dev/null
echo "End $(date +"%F") $(date +"%T")" >> "${LOGFILE}"
if [ ! "$PRESERVE" = true ]; then
- find $dir -type f -empty | grep -v "called_fn" | xargs rm -f 2>>"$LOGFILE" &>/dev/null
- find $dir -type d -empty | grep -v "called_fn" | xargs rm -rf 2>>"$LOGFILE" &>/dev/null
+ find $dir -type f -empty | grep -v "called_fn" | xargs rm -f 2>>"$LOGFILE" >/dev/null
+ find $dir -type d -empty | grep -v "called_fn" | xargs rm -rf 2>>"$LOGFILE" >/dev/null
fi
if [ "$REMOVETMP" = true ]; then
@@ -2618,9 +2621,9 @@ function multi_recon(){
NUMOFLINES_webs_total=$(find . -type f -name 'webs_uncommon_ports.txt' -exec cat {} + | anew webs/webs_uncommon_ports.txt | sed '/^$/d' | wc -l)
NUMOFLINES_ips_total=$(find . -type f -name 'ips.txt' -exec cat {} + | anew hosts/ips.txt | sed '/^$/d' | wc -l)
NUMOFLINES_cloudsprov_total=$(find . -type f -name 'cdn_providers.txt' -exec cat {} + | anew hosts/cdn_providers.txt | sed '/^$/d' | wc -l)
- find . -type f -name 'portscan_active.txt' -exec cat {} + > hosts/portscan_active.txt 2>>"$LOGFILE" &>/dev/null
- find . -type f -name 'portscan_active.gnmap' -exec cat {} + > hosts/portscan_active.gnmap 2>>"$LOGFILE" &>/dev/null
- find . -type f -name 'portscan_passive.txt' -exec cat {} + > hosts/portscan_passive.txt 2>>"$LOGFILE" &>/dev/null
+ find . -type f -name 'portscan_active.txt' -exec cat {} + | tee -a hosts/portscan_active.txt >> "$LOGFILE" 2>&1 >/dev/null
+ find . -type f -name 'portscan_active.gnmap' -exec cat {} + | tee hosts/portscan_active.gnmap 2>>"$LOGFILE" >/dev/null
+ find . -type f -name 'portscan_passive.txt' -exec cat {} + | tee hosts/portscan_passive.txt 2>&1 >> "$LOGFILE" >/dev/null
notification "- ${NUMOFLINES_users_total} total users found" good
notification "- ${NUMOFLINES_pwndb_total} total creds leaked" good
@@ -3128,7 +3131,7 @@ case $opt_mode in
fi
if [ -n "$list" ]; then
sed -i 's/\r$//' $list
- for domain in $(cat $list); do
+ while IFS= read -r domain; do
start
osint
end
From 6d0d751e5b42ad659aa6921edb58f8035dd47fc6 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 8 Jun 2023 12:37:00 +0200
Subject: [PATCH 23/40] changed license to MIT
---
LICENSE | 696 ++------------------------------------------------------
1 file changed, 22 insertions(+), 674 deletions(-)
diff --git a/LICENSE b/LICENSE
index f288702d..ec6bce53 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,674 +1,22 @@
- GNU GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-the GNU General Public License is intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users. We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors. You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights. Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received. You must make sure that they, too, receive
-or can get the source code. And you must show them these terms so they
-know their rights.
-
- Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
-
- For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software. For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
-
- Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so. This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software. The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable. Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
-
- Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Use with the GNU Affero General Public License.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
- If the program does terminal interaction, make it output a short
-notice like this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, your program's commands
-might be different; for a GUI interface, you would use an "about box".
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU GPL, see
-.
-
- The GNU General Public License does not permit incorporating your program
-into proprietary programs. If your program is a subroutine library, you
-may consider it more useful to permit linking proprietary applications with
-the library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License. But first, please read
-.
+MIT License
+
+Copyright (c) 2023 Alexis Fernández
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
From cc8e8253997493e2159aa3cbc7560560238a2d57 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 8 Jun 2023 12:39:21 +0200
Subject: [PATCH 24/40] MIT license readme
---
README.md | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/README.md b/README.md
index 0a674413..c83c3a29 100644
--- a/README.md
+++ b/README.md
@@ -11,8 +11,8 @@
-
-
+
+
@@ -649,4 +649,4 @@ If you want to contribute to this project, you can do it in multiple ways:
Usage of this program for attacking targets without consent is illegal. It is the user's responsibility to obey all applicable laws. The developer assumes no liability and is not responsible for any misuse or damage caused by this program. Please use responsibly.
-The material contained in this repository is licensed under GNU GPLv3.
+The material contained in this repository is licensed under MIT.
From 492b2f93cb53cbe406f5732eb4691ae2e73b8dd7 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 8 Jun 2023 15:22:12 +0200
Subject: [PATCH 25/40] Delete LICENSE
---
LICENSE | 22 ----------------------
1 file changed, 22 deletions(-)
delete mode 100644 LICENSE
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index ec6bce53..00000000
--- a/LICENSE
+++ /dev/null
@@ -1,22 +0,0 @@
-MIT License
-
-Copyright (c) 2023 Alexis Fernández
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
From 1a4d3ebdae9fd72bd6bd1a57827621d303b906c0 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Fri, 9 Jun 2023 09:24:56 +0200
Subject: [PATCH 26/40] h8mail removed from web
---
install.sh | 2 -
reconftw.sh | 25 ----
web/apikeys/config.py | 51 +-------
web/apikeys/views.py | 42 +------
web/scans/migrations/0001_initial.py | 1 -
web/scans/models.py | 1 -
web/scans/utils.py | 10 +-
web/templates/apikeys_settings.html | 171 ---------------------------
8 files changed, 3 insertions(+), 300 deletions(-)
diff --git a/install.sh b/install.sh
index c78e63ce..f1eff761 100755
--- a/install.sh
+++ b/install.sh
@@ -594,8 +594,6 @@ else
fi
fi
-#eval h8mail -g $DEBUG_STD
-
## Stripping all Go binaries
eval strip -s "$HOME"/go/bin/* $DEBUG_STD
diff --git a/reconftw.sh b/reconftw.sh
index e3cbeab6..88a332c1 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -228,31 +228,6 @@ function emails(){
cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
[ -s ".tmp/infoga.txt" ] && cat .tmp/infoga.txt | cut -d " " -f3 | grep -v "-" | anew -q osint/emails.txt
-# COMMENTED THEHARVESTER, H8MAIL AND PWNDB AS THEY'RE NOT WORKING AS EXPECTED
-# cd "$tools/theHarvester" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
-# python3 theHarvester.py -d $domain -b all -f $dir/.tmp/harvester.json 2>>"$LOGFILE" >/dev/null
-# cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
-# if [ -s ".tmp/harvester.json" ]; then
-# cat .tmp/harvester.json | jq -r 'try .emails[]' 2>/dev/null | anew -q osint/emails.txt
-# cat .tmp/harvester.json | jq -r 'try .linkedin_people[]' 2>/dev/null | anew -q osint/employees.txt
-# cat .tmp/harvester.json | jq -r 'try .linkedin_links[]' 2>/dev/null | anew -q osint/linkedin.txt
-# fi
-# h8mail -t $domain -q domain --loose -c $tools/h8mail_config.ini -j .tmp/h8_results.json 2>>"$LOGFILE" >/dev/null
-# [ -s ".tmp/h8_results.json" ] && cat .tmp/h8_results.json | jq -r '.targets[0] | .data[] | .[]' | awk '{print $12}' | anew -q osint/h8mail.txt
-#
-# PWNDB_STATUS=$(timeout 30s curl -Is --socks5-hostname localhost:9050 http://pwndb2am4tzkvold.onion | grep HTTP | cut -d ' ' -f2)
-#
-# if [ "$PWNDB_STATUS" = 200 ]; then
-# cd "$tools/pwndb" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
-# python3 pwndb.py --target "@${domain}" | sed '/^[-]/d' | anew -q $dir/osint/passwords.txt
-# cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
-# [ -s "osint/passwords.txt" ] && sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" osint/passwords.txt
-# [ -s "osint/passwords.txt" ] && sed -i '1,2d' osint/passwords.txt
-# else
-# text="${yellow}\n pwndb is currently down :(\n\n Check xjypo5vzgmo7jca6b322dnqbsdnp3amd24ybx26x5nxbusccjkm4pwid.onion${reset}\n"
-# printf "${text}" && printf "${text}" | $NOTIFY
-# fi
-
end_func "Results are saved in $domain/osint/emails.txt" ${FUNCNAME[0]}
else
if [ "$EMAILS" = false ] || [ "$OSINT" = false ]; then
diff --git a/web/apikeys/config.py b/web/apikeys/config.py
index e08b4fc3..bf2e5b30 100644
--- a/web/apikeys/config.py
+++ b/web/apikeys/config.py
@@ -201,53 +201,4 @@ def theHarvesterConfig(name, key=None, get=None):
if result == None:
return ''
else:
- return result
-
-def h8mailConfig(name, key=None, get=None):
- file = str(Path.home())+"/Tools/h8mail_config.ini"
-
- lines = open(file, "r").readlines()
- sub=""
-
- if key != None:
- for line in lines:
- line = line.replace("\n", "")
- if name in line and key != "":
- sub = line
-
- very = line.split("=")[1].replace(" ", "")
-
- if key != very:
- while line[0] == ";":
- line = line.replace(";", "", 1)
-
- while line[0] == " ":
- line = line.replace(" ", "", 1)
-
-
- final = line.split("=")[0]+"= "+key
- replace = Path(file)
- replace.write_text(replace.read_text().replace(sub, final, 1))
- break
-
- elif name in line and key == "":
- final = ";"+name+" = "
- sub = line
-
-
- replace = Path(file)
- replace.write_text(replace.read_text().replace(sub, final, 1))
- break
- elif get == True:
- key = ""
- for line in lines:
-
- line = line.replace("\n", "")
- if name in line:
- sub = line
-
- key = line.split("=")[1].replace(" ", "")
-
- break
-
- return key
\ No newline at end of file
+ return result
\ No newline at end of file
diff --git a/web/apikeys/views.py b/web/apikeys/views.py
index 8567a52e..92326a13 100644
--- a/web/apikeys/views.py
+++ b/web/apikeys/views.py
@@ -1,8 +1,7 @@
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from editprofile.imgUser import imgUser
-from apikeys.config import amassConfig, ReconConfig, GithubConfig, h8mailConfig
-#from apikeys.config import amassConfig, ReconConfig, theHarvesterConfig, GithubConfig, h8mailConfig
+from apikeys.config import amassConfig, ReconConfig, GithubConfig
# Create your views here.
otherNames = {'passivedns': '360PassiveDNS', 'digicert': 'CertCentral', 'psbdmp':'Pastebin', 'rikiq':'PassiveTotal', 'quake360':'quake', 'cisco':'Umbrella', 'leaklookup_priv':'leak-lookup_priv', 'leaklookup_pub':'leak-lookup_pub'}
@@ -53,17 +52,6 @@ def conf(request):
# if name != "spyse":
# theHarvesterConfig(name, key=key)
- elif keys["type"][0] == "h8mail":
- del keys["type"]
-
- for key in keys:
- name = key
- key = key=keys[key][0]
- if name in otherNames:
- name = otherNames[name]
-
- h8mailConfig(name, key=key)
-
@login_required(login_url='/login/')
def index(request):
@@ -148,34 +136,6 @@ def index(request):
'token_5_value': GithubConfig('5', get=True),
'token_6_value': GithubConfig('6', get=True),
-# 'binaryedge_value': theHarvesterConfig("binaryedge", get=True),
-# 'bing_value': theHarvesterConfig("bing", get=True),
-# 'censys2_value': theHarvesterConfig("censys", get=True),
-# 'fullhunt2_value': theHarvesterConfig("fullhunt", get=True),
-# 'github2_value': theHarvesterConfig("github", get=True),
-# 'hunter2_value': theHarvesterConfig("hunter", get=True),
-# 'intelx2_value': theHarvesterConfig("intelx", get=True),
-# 'pentesttools2_value': theHarvesterConfig("pentestTools", get=True),
-# 'chaos2_value': theHarvesterConfig("chaos", get=True),
-# 'rocketreach_value': theHarvesterConfig("rocketreach", get=True),
-# 'securitytrails2_value': theHarvesterConfig("securityTrails", get=True),
-# 'shodan3_value': theHarvesterConfig("shodan", get=True),
-# 'zoomeye2_value': theHarvesterConfig("zoomeye", get=True),
-
- 'hunter3_value': h8mailConfig("hunter", get=True),
- 'snusbase_value': h8mailConfig("snusbase", get=True),
- 'emailrep_value': h8mailConfig("emailrep", get=True),
- 'intelx3_value': h8mailConfig("intelx", get=True),
- 'hibp_value': h8mailConfig("hibp", get=True),
- 'leaklookup_pub_value': h8mailConfig("leak-lookup_pub", get=True),
- 'leaklookup_priv_value': h8mailConfig("leak-lookup_priv", get=True),
- 'weleakinfo_priv_value': h8mailConfig("weleakinfo_priv", get=True),
- 'weleakinfo_pub_value': h8mailConfig("weleakinfo_pub", get=True),
- 'dehashed_email_value': h8mailConfig("dehashed_email", get=True),
- 'dehashed_key_value': h8mailConfig("dehashed_key", get=True),
- 'breachdirectory_user_value': h8mailConfig("breachdirectory_user", get=True),
- 'breachdirectory_pass_value': h8mailConfig("breachdirectory_pass", get=True),
-
"imagePath": imagePath,
"apikeys_settings": "API Keys Settings",
}
diff --git a/web/scans/migrations/0001_initial.py b/web/scans/migrations/0001_initial.py
index 73746898..e6197965 100644
--- a/web/scans/migrations/0001_initial.py
+++ b/web/scans/migrations/0001_initial.py
@@ -227,7 +227,6 @@ class Migration(migrations.Migration):
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('emails', models.TextField()),
('users', models.TextField()),
- ('h8mail', models.TextField()),
('passwords', models.TextField()),
('employees', models.TextField()),
('linkedin', models.TextField()),
diff --git a/web/scans/models.py b/web/scans/models.py
index b2211520..40fb5abc 100644
--- a/web/scans/models.py
+++ b/web/scans/models.py
@@ -363,7 +363,6 @@ def __str__(self):
class OSINTUsersInfo(models.Model):
emails = models.TextField()
users = models.TextField()
- h8mail = models.TextField()
passwords = models.TextField()
employees = models.TextField()
linkedin = models.TextField()
diff --git a/web/scans/utils.py b/web/scans/utils.py
index 90a67490..57bb4860 100644
--- a/web/scans/utils.py
+++ b/web/scans/utils.py
@@ -1316,13 +1316,6 @@ def osintusersinfo_f2db(project_id):
else:
users = 'N/A'
- if 'h8mail.txt' in ld:
- with open(f"{osintusers_path}/h8mail.txt") as f:
- h8mail = f.read()
- f.close()
- else:
- h8mail = 'N/A'
-
if 'passwords.txt' in ld:
with open(f"{osintusers_path}/passwords.txt") as f:
passwords = f.read()
@@ -1345,8 +1338,7 @@ def osintusersinfo_f2db(project_id):
linkedin = 'N/A'
- osintusers_save.create(emails=emails, users=users, h8mail=h8mail,
- passwords=passwords, employees=employees,
+ osintusers_save.create(emails=emails, users=users, passwords=passwords, employees=employees,
linkedin=linkedin, project_id=project_id)
diff --git a/web/templates/apikeys_settings.html b/web/templates/apikeys_settings.html
index 78e802d7..68a480cb 100644
--- a/web/templates/apikeys_settings.html
+++ b/web/templates/apikeys_settings.html
@@ -1742,177 +1742,6 @@ THE HARVESTER
-
-
From 2e6acaad1aad19342380cb05caf7ca233c1b7e1a Mon Sep 17 00:00:00 2001
From: Nikita Sveshnikov
Date: Wed, 14 Jun 2023 00:18:34 +0300
Subject: [PATCH 27/40] Fix timeout -k implementation error
---
reconftw.sh | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/reconftw.sh b/reconftw.sh
index 821711c7..a7b72c8c 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -272,7 +272,7 @@ function domain_info(){
start_func ${FUNCNAME[0]} "Searching domain info (whois, registrant name/email domains)"
whois -H $domain > osint/domain_info_general.txt
if [ "$DEEP" = true ] || [ "$REVERSE_WHOIS" = true ]; then
- timeout -k $AMASS_INTEL_TIMEOUT amass intel -d ${domain} -whois -timeout $AMASS_INTEL_TIMEOUT -o osint/domain_info_reverse_whois.txt 2>>"$LOGFILE" &>/dev/null
+ timeout -k 1m ${AMASS_INTEL_TIMEOUT}m amass intel -d ${domain} -whois -timeout $AMASS_INTEL_TIMEOUT -o osint/domain_info_reverse_whois.txt 2>>"$LOGFILE" &>/dev/null
fi
end_func "Results are saved in $domain/osint/domain_info_[general/name/email/ip].txt" ${FUNCNAME[0]}
else
@@ -381,7 +381,7 @@ function sub_passive(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPASSIVE" = true ]; then
start_subfunc ${FUNCNAME[0]} "Running : Passive Subdomain Enumeration"
if [ ! "$AXIOM" = true ]; then
- [[ $RUNAMASS == true ]] && timeout -k $AMASS_ENUM_TIMEOUT amass enum -passive -d $domain -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT -json .tmp/amass_json.json 2>>"$LOGFILE" &>/dev/null
+ [[ $RUNAMASS == true ]] && timeout -k 1m ${AMASS_ENUM_TIMEOUT} amass enum -passive -d $domain -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT -json .tmp/amass_json.json 2>>"$LOGFILE" &>/dev/null
[ -s ".tmp/amass_json.json" ] && cat .tmp/amass_json.json | jq -r '.name' | anew -q .tmp/amass_psub.txt
[[ $RUNSUBFINDER == true ]] && subfinder -all -d $domain -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" &>/dev/null
else
@@ -752,7 +752,7 @@ function sub_recursive_passive(){
[ -s "subdomains/subdomains.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE > .tmp/subdomains_recurs_top.txt
if [ ! "$AXIOM" = true ]; then
resolvers_update_quick_local
- [ -s ".tmp/subdomains_recurs_top.txt" ] && timeout -k $AMASS_ENUM_TIMEOUT amass enum -passive -df .tmp/subdomains_recurs_top.txt -nf subdomains/subdomains.txt -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt
+ [ -s ".tmp/subdomains_recurs_top.txt" ] && timeout -k 1m ${AMASS_ENUM_TIMEOUT}m amass enum -passive -df .tmp/subdomains_recurs_top.txt -nf subdomains/subdomains.txt -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt
[ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null
else
resolvers_update_quick_axiom
@@ -1307,7 +1307,7 @@ function cms_scanner(){
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
if [ -s ".tmp/webs_all.txt" ]; then
tr '\n' ',' < .tmp/webs_all.txt > .tmp/cms.txt
- timeout -k $CMSSCAN_TIMEOUT python3 $tools/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r 2>>"$LOGFILE" &>/dev/null
+ timeout -k 1m ${CMSSCAN_TIMEOUT}s python3 $tools/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r 2>>"$LOGFILE" &>/dev/null
exit_status=$?
if [[ $exit_status -eq 125 ]]; then
echo "TIMEOUT cmseek.py - investigate manually for $dir" >> "$LOGFILE"
From 367ee0116ef44d9785e5dc70982bdef57b7a09de Mon Sep 17 00:00:00 2001
From: six2dez
Date: Wed, 14 Jun 2023 15:46:55 +0200
Subject: [PATCH 28/40] Added nuclei fuzzing module
---
install.sh | 2 +-
reconftw.cfg | 1 +
reconftw.sh | 27 +++++++++++++++++++++++++++
3 files changed, 29 insertions(+), 1 deletion(-)
diff --git a/install.sh b/install.sh
index f1eff761..56575fcc 100755
--- a/install.sh
+++ b/install.sh
@@ -430,10 +430,10 @@ printf "${bblue}\n Running: Installing repositories (${#repos[@]})${reset}\n\n"
# Repos with special configs
eval git clone https://github.com/projectdiscovery/nuclei-templates ~/nuclei-templates $DEBUG_STD
eval git clone https://github.com/geeknik/the-nuclei-templates.git ~/nuclei-templates/extra_templates $DEBUG_STD
+eval git clone https://github.com/projectdiscovery/fuzzing-templates $tools/fuzzing-templates $DEBUG_STD
eval wget -q -O - https://raw.githubusercontent.com/NagliNagli/BountyTricks/main/ssrf.yaml > ~/nuclei-templates/ssrf_nagli.yaml $DEBUG_STD
eval wget -q -O - https://raw.githubusercontent.com/NagliNagli/BountyTricks/main/sap-redirect.yaml > ~/nuclei-templates/sap-redirect_nagli.yaml $DEBUG_STD
eval nuclei -update-templates $DEBUG_STD
-cd ~/nuclei-templates/extra_templates && eval git pull $DEBUG_STD
cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
eval git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git $dir/sqlmap $DEBUG_STD
eval git clone --depth 1 https://github.com/drwetter/testssl.sh.git $dir/testssl.sh $DEBUG_STD
diff --git a/reconftw.cfg b/reconftw.cfg
index 5c35e4bc..2b33e24b 100644
--- a/reconftw.cfg
+++ b/reconftw.cfg
@@ -135,6 +135,7 @@ PROTO_POLLUTION=true # Check for prototype pollution flaws
SMUGGLING=true # Check for HTTP request smuggling flaws
WEBCACHE=true # Check for Web Cache issues
BYPASSER4XX=true # Check for 4XX bypasses
+FUZZPARAMS=true # Fuzz parameters values
# Extra features
NOTIFICATION=false # Notification for every function
diff --git a/reconftw.sh b/reconftw.sh
index 730a8ff9..8f5bad61 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1890,6 +1890,32 @@ function webcache(){
fi
}
+function fuzzparams(){
+ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$FUZZPARAMS" = true ] ; then
+ start_func ${FUNCNAME[0]} "Fuzzing params values checks"
+ if [ "$DEEP" = true ] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then
+ if [ ! "$AXIOM" = true ]; then
+ nuclei -update 2>>"$LOGFILE" >/dev/null
+ git -C $tools/fuzzing-templates pull
+ cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t $tools/fuzzing-templates -o .tmp/fuzzparams.txt
+ else
+ axiom-exec "git clone https://github.com/projectdiscovery/fuzzing-templates /home/op/fuzzing-templates" &>/dev/null
+ axiom-scan webs/url_extract.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -rl $NUCLEI_RATELIMIT -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+ fi
+ [ -s ".tmp/fuzzparams.txt" ] && cat .tmp/fuzzparams.txt | anew -q vulns/fuzzparams.txt
+ end_func "Results are saved in vulns/fuzzparams.txt" ${FUNCNAME[0]}
+ else
+ end_func "Fuzzing params values: Too many entries to test, try with --deep flag" ${FUNCNAME[0]}
+ fi
+ else
+ if [ "$FUZZPARAMS" = false ]; then
+ printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+ else
+ printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+ fi
+ fi
+}
+
###############################################################################################################
########################################## OPTIONS & MGMT #####################################################
###############################################################################################################
@@ -2378,6 +2404,7 @@ function vulns(){
webcache
spraying
brokenLinks
+ fuzzparams
test_ssl
fi
}
From 538dc5057718cf19e328d308edb99a3909d26a92 Mon Sep 17 00:00:00 2001
From: DaniBoy <52128795+ddaniboy@users.noreply.github.com>
Date: Wed, 14 Jun 2023 15:56:30 -0300
Subject: [PATCH 29/40] remove The Harvester from API Keys
---
web/templates/apikeys_settings.html | 173 +---------------------------
1 file changed, 1 insertion(+), 172 deletions(-)
diff --git a/web/templates/apikeys_settings.html b/web/templates/apikeys_settings.html
index 68a480cb..264fc5cf 100644
--- a/web/templates/apikeys_settings.html
+++ b/web/templates/apikeys_settings.html
@@ -1571,177 +1571,6 @@ GITHUB TOKENS
-
-
-
-
-
-
-
-
-
THE HARVESTER
-
-
-
-
-
-
-
-
-
@@ -2037,4 +1866,4 @@ GENERAL OPTIONS
}
-{% endblock %}
\ No newline at end of file
+{% endblock %}
From 6c875562ead79aa0fd7e138a758af67ce5ac0446 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Wed, 14 Jun 2023 23:51:28 +0200
Subject: [PATCH 30/40] fix nuclei
---
reconftw.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/reconftw.sh b/reconftw.sh
index 8f5bad61..c7876d3b 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1160,7 +1160,7 @@ function nuclei_check(){
else
if [ -s ".tmp/webs_subs.txt" ]; then
set -f # avoid globbing (expansion of *).
- array=("${NUCLEI_SEVERITY//,/ }")
+ array=(${NUCLEI_SEVERITY//,/ })
for i in "${!array[@]}"
do
crit=${array[i]}
From 2e0514dba69fea01e7618faf7c760e917998dd91 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Wed, 14 Jun 2023 23:57:53 +0200
Subject: [PATCH 31/40] fix 2 nuclei :)
---
reconftw.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/reconftw.sh b/reconftw.sh
index c7876d3b..0b19007a 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1149,7 +1149,7 @@ function nuclei_check(){
[ ! -s ".tmp/webs_subs.txt" ] && cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt
if [ ! "$AXIOM" = true ]; then
set -f # avoid globbing (expansion of *).
- array=("${NUCLEI_SEVERITY//,/ }")
+ array=(${NUCLEI_SEVERITY//,/ })
for i in "${!array[@]}"
do
crit=${array[i]}
From 657ca1c73e78322ffa06c6b7c2d2590fdf03da94 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 15 Jun 2023 00:14:32 +0200
Subject: [PATCH 32/40] much better solution for the nuclei issue
---
reconftw.sh | 14 +++++---------
1 file changed, 5 insertions(+), 9 deletions(-)
diff --git a/reconftw.sh b/reconftw.sh
index 0b19007a..b91b183b 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1147,23 +1147,19 @@ function nuclei_check(){
mkdir -p nuclei_output
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
[ ! -s ".tmp/webs_subs.txt" ] && cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt
- if [ ! "$AXIOM" = true ]; then
- set -f # avoid globbing (expansion of *).
- array=(${NUCLEI_SEVERITY//,/ })
- for i in "${!array[@]}"
+ if [ ! "$AXIOM" = true ]; then # avoid globbing (expansion of *).
+ IFS=',' read -ra severity_array <<< "$NUCLEI_SEVERITY"
+ for crit in "${severity_array[@]}"
do
- crit=${array[i]}
printf "${yellow}\n Running : Nuclei $crit ${reset}\n\n"
cat .tmp/webs_subs.txt 2>/dev/null | nuclei $NUCLEI_FLAGS -severity $crit -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt
done
printf "\n\n"
else
if [ -s ".tmp/webs_subs.txt" ]; then
- set -f # avoid globbing (expansion of *).
- array=(${NUCLEI_SEVERITY//,/ })
- for i in "${!array[@]}"
+ IFS=',' read -ra severity_array <<< "$NUCLEI_SEVERITY"
+ for crit in "${severity_array[@]}"
do
- crit=${array[i]}
printf "${yellow}\n Running : Nuclei $crit, check results on nuclei_output folder${reset}\n\n"
axiom-scan .tmp/webs_subs.txt -m nuclei -severity ${crit} -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
done
From 9b21a3a448111e87effe6dcbd73a22c10a9e0dd9 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 15 Jun 2023 00:17:31 +0200
Subject: [PATCH 33/40] Check fuzzing templates exists
---
reconftw.sh | 1 +
1 file changed, 1 insertion(+)
diff --git a/reconftw.sh b/reconftw.sh
index b91b183b..aaecca21 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -73,6 +73,7 @@ function tools_installed(){
which gotator &>/dev/null || { printf "${bred} [*] gotator [NO]${reset}\n"; allinstalled=false;}
which nuclei &>/dev/null || { printf "${bred} [*] Nuclei [NO]${reset}\n"; allinstalled=false;}
[ -d ~/nuclei-templates ] || { printf "${bred} [*] Nuclei templates [NO]${reset}\n"; allinstalled=false;}
+ [ -d ~/fuzzing-templates ] || { printf "${bred} [*] Fuzzing templates [NO]${reset}\n"; allinstalled=false;}
which gf &>/dev/null || { printf "${bred} [*] Gf [NO]${reset}\n"; allinstalled=false;}
which Gxss &>/dev/null || { printf "${bred} [*] Gxss [NO]${reset}\n"; allinstalled=false;}
which subjs &>/dev/null || { printf "${bred} [*] subjs [NO]${reset}\n"; allinstalled=false;}
From 9593492f5cef93c2029ec903a2a5abd65385dd52 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 15 Jun 2023 00:20:25 +0200
Subject: [PATCH 34/40] Shellcheck fixes
---
install.sh | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/install.sh b/install.sh
index 56575fcc..28c17f4f 100755
--- a/install.sh
+++ b/install.sh
@@ -551,7 +551,7 @@ if [ "$double_check" = "true" ]; then
repos_step=0
for repo in "${!repos[@]}"; do
repos_step=$((repos_step + 1))
- eval cd $dir/$repo $DEBUG_STD || { eval git clone https://github.com/${repos[$repo]} $dir/$repo $DEBUG_STD && cd $dir/$repo; }
+ eval cd $dir/$repo $DEBUG_STD || { eval git clone https://github.com/${repos[$repo]} $dir/$repo $DEBUG_STD && cd $dir/$repo || { echo "Failed to cd directory '$dir'"; exit 1; }; }
eval git pull $DEBUG_STD
exit_status=$?
if [ -s "setup.py" ]; then
@@ -574,7 +574,7 @@ printf "${bblue} Running: Performing last configurations ${reset}\n\n"
## Last steps
if [ "$generate_resolvers" = true ]; then
if [ ! -s "$resolvers" ] || [[ $(find "$resolvers" -mtime +1 -print) ]] ; then
- ${reset}\n\n"Checking resolvers lists...\n Accurate resolvers are the key to great results\n This may take around 10 minutes if it's not updated ${reset}\n\n"
+ printf "${reset}\n\nChecking resolvers lists...\n Accurate resolvers are the key to great results\n This may take around 10 minutes if it's not updated\n\n"
eval rm -f $resolvers 2>>"$LOGFILE"
dnsvalidator -tL https://public-dns.info/nameservers.txt -threads $DNSVALIDATOR_THREADS -o $resolvers &>/dev/null
dnsvalidator -tL https://raw.githubusercontent.com/blechschmidt/massdns/master/lists/resolvers.txt -threads $DNSVALIDATOR_THREADS -o tmp_resolvers &>/dev/null
From af87c728a9eb05b29dbf46dec342c02382676ed9 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 15 Jun 2023 11:45:33 +0200
Subject: [PATCH 35/40] Added related azure tenant domains
---
reconftw.sh | 3 +++
1 file changed, 3 insertions(+)
diff --git a/reconftw.sh b/reconftw.sh
index aaecca21..744c3d4d 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -252,6 +252,9 @@ function domain_info(){
if [ "$DEEP" = true ] || [ "$REVERSE_WHOIS" = true ]; then
timeout -k 1m ${AMASS_INTEL_TIMEOUT}m amass intel -d ${domain} -whois -timeout $AMASS_INTEL_TIMEOUT -o osint/domain_info_reverse_whois.txt 2>>"$LOGFILE" &>/dev/null
fi
+
+ curl -s "https://aadinternals.azurewebsites.net/api/tenantinfo?domainName=${domain}" -H "Origin: https://aadinternals.com" | jq -r .domains[].name > osint/azure_tenant_domains.txt
+
end_func "Results are saved in $domain/osint/domain_info_[general/name/email/ip].txt" ${FUNCNAME[0]}
else
if [ "$DOMAIN_INFO" = false ] || [ "$OSINT" = false ]; then
From 05e4124dde639c7c02ad1f316cac1fcb0c359879 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 15 Jun 2023 12:23:10 +0200
Subject: [PATCH 36/40] fuzzing templates install fixed
---
install.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/install.sh b/install.sh
index 28c17f4f..7a170a14 100755
--- a/install.sh
+++ b/install.sh
@@ -430,7 +430,7 @@ printf "${bblue}\n Running: Installing repositories (${#repos[@]})${reset}\n\n"
# Repos with special configs
eval git clone https://github.com/projectdiscovery/nuclei-templates ~/nuclei-templates $DEBUG_STD
eval git clone https://github.com/geeknik/the-nuclei-templates.git ~/nuclei-templates/extra_templates $DEBUG_STD
-eval git clone https://github.com/projectdiscovery/fuzzing-templates $tools/fuzzing-templates $DEBUG_STD
+eval git clone https://github.com/projectdiscovery/fuzzing-templates ~/fuzzing-templates $DEBUG_STD
eval wget -q -O - https://raw.githubusercontent.com/NagliNagli/BountyTricks/main/ssrf.yaml > ~/nuclei-templates/ssrf_nagli.yaml $DEBUG_STD
eval wget -q -O - https://raw.githubusercontent.com/NagliNagli/BountyTricks/main/sap-redirect.yaml > ~/nuclei-templates/sap-redirect_nagli.yaml $DEBUG_STD
eval nuclei -update-templates $DEBUG_STD
From 4fdd28f57b757b8996b36804c256e1ca9010ff58 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 15 Jun 2023 15:45:38 +0200
Subject: [PATCH 37/40] Added dynamic gowitness timeout
---
reconftw.cfg | 1 +
reconftw.sh | 8 ++++++--
2 files changed, 7 insertions(+), 2 deletions(-)
diff --git a/reconftw.cfg b/reconftw.cfg
index 2b33e24b..28d9fb6d 100644
--- a/reconftw.cfg
+++ b/reconftw.cfg
@@ -192,6 +192,7 @@ FFUF_MAXTIME=900 # Seconds
HTTPX_TIMEOUT=10 # Seconds
HTTPX_UNCOMMONPORTS_TIMEOUT=10 # Seconds
PERMUTATIONS_LIMIT=21474836480 # Bytes, default is 20 GB
+GOWITNESS_TIMEOUT_PER_SITE=20 # Seconds
# lists
fuzz_wordlist=${tools}/fuzz_wordlist.txt
diff --git a/reconftw.sh b/reconftw.sh
index 744c3d4d..c328f652 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -988,10 +988,14 @@ function screenshot(){
if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBSCREENSHOT" = true ]; then
start_func ${FUNCNAME[0]} "Web Screenshots"
[ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt
+
+ num_lines=$(wc -l < .tmp/webs_all.txt)
+ dynamic_gowitness_timeout=$(expr $num_lines \* $GOWITNESS_TIMEOUT_PER_SITE)
+
if [ ! "$AXIOM" = true ]; then
- [ -s ".tmp/webs_all.txt" ] && gowitness file -f .tmp/webs_all.txt -t $GOWITNESS_THREADS $GOWITNESS_FLAGS 2>>"$LOGFILE"
+ [ -s ".tmp/webs_all.txt" ] && timeout -k 1m ${dynamic_gowitness_timeout}s gowitness file -f .tmp/webs_all.txt -t $GOWITNESS_THREADS $GOWITNESS_FLAGS -o screenshots 2>>"$LOGFILE"
else
- axiom-scan .tmp/webs_all.txt -m gowitness -t $GOWITNESS_THREADS $GOWITNESS_FLAGS -o screenshots $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+ timeout -k 1m ${dynamic_gowitness_timeout}s axiom-scan .tmp/webs_all.txt -m gowitness -t $GOWITNESS_THREADS $GOWITNESS_FLAGS -o screenshots $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
end_func "Results are saved in $domain/screenshots folder" ${FUNCNAME[0]}
else
From 58533a911d59efb5081b5c99f0736bf892c36c7b Mon Sep 17 00:00:00 2001
From: six2dez
Date: Thu, 15 Jun 2023 15:47:02 +0200
Subject: [PATCH 38/40] fix output
---
reconftw.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/reconftw.sh b/reconftw.sh
index c328f652..37256a34 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -993,7 +993,7 @@ function screenshot(){
dynamic_gowitness_timeout=$(expr $num_lines \* $GOWITNESS_TIMEOUT_PER_SITE)
if [ ! "$AXIOM" = true ]; then
- [ -s ".tmp/webs_all.txt" ] && timeout -k 1m ${dynamic_gowitness_timeout}s gowitness file -f .tmp/webs_all.txt -t $GOWITNESS_THREADS $GOWITNESS_FLAGS -o screenshots 2>>"$LOGFILE"
+ [ -s ".tmp/webs_all.txt" ] && timeout -k 1m ${dynamic_gowitness_timeout}s gowitness file -f .tmp/webs_all.txt -t $GOWITNESS_THREADS $GOWITNESS_FLAGS 2>>"$LOGFILE"
else
timeout -k 1m ${dynamic_gowitness_timeout}s axiom-scan .tmp/webs_all.txt -m gowitness -t $GOWITNESS_THREADS $GOWITNESS_FLAGS -o screenshots $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
fi
From f2111e5f2f048e0d4980e5d86fd5e26ca91fcc26 Mon Sep 17 00:00:00 2001
From: six2dez
Date: Sat, 17 Jun 2023 01:23:32 +0200
Subject: [PATCH 39/40] fuzzing better sorting
---
reconftw.sh | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/reconftw.sh b/reconftw.sh
index 37256a34..732f77ae 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1194,7 +1194,7 @@ function fuzz(){
interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -o _output_/_cleantarget_.json" -o $dir/.tmp/fuzzing 2>>"$LOGFILE" >/dev/null
for sub in $(cat .tmp/webs_all.txt); do
sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
- [ -s "$dir/.tmp/fuzzing/${sub_out}.json" ] && cat $dir/.tmp/fuzzing/${sub_out}.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort | anew -q $dir/fuzzing/${sub_out}.txt
+ [ -s "$dir/.tmp/fuzzing/${sub_out}.json" ] && cat $dir/.tmp/fuzzing/${sub_out}.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt
done
find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | sort -k3 | anew -q $dir/fuzzing/fuzzing_full.txt
else
@@ -1204,7 +1204,7 @@ function fuzz(){
axiom-scan .tmp/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
for sub in $(cat .tmp/webs_all.txt); do
sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
- [ -s "$dir/.tmp/ffuf-content.json" ] && cat .tmp/ffuf-content.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | grep $sub | sort | sort -k1 | anew -q fuzzing/${sub_out}.txt
+ [ -s "$dir/.tmp/ffuf-content.json" ] && cat .tmp/ffuf-content.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | grep $sub | sort -k1 | anew -q fuzzing/${sub_out}.txt
done
find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | sort -k3 | anew -q $dir/fuzzing/fuzzing_full.txt
fi
From 91b8d54f09b8bb548c591dc3922fe457d8ed48ea Mon Sep 17 00:00:00 2001
From: six2dez
Date: Sat, 17 Jun 2023 02:02:01 +0200
Subject: [PATCH 40/40] 2.7 release
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index c83c3a29..08d40a5f 100644
--- a/README.md
+++ b/README.md
@@ -7,8 +7,8 @@
-
-
+
+