diff --git a/README.md b/README.md index 1984b767..b754703c 100644 --- a/README.md +++ b/README.md @@ -8,8 +8,8 @@
- - + + @@ -37,9 +37,8 @@ **ReconFTW** automates the entire process of reconnaisance for you. It outperforms the work of subdomain enumeration along with various vulnerability checks and obtaining maximum information about your target. -ReconFTW uses around 5 techniques (passive, bruteforce, permutations, certificate transparency, source code scraping) for subdomain enumeration which helps you getting the maximum and the most interesting subdomains so that you be ahead of the competition. - - +ReconFTW uses lot of techniques (passive, bruteforce, permutations, certificate transparency, source code scraping, analytics, DNS records...) for subdomain enumeration which helps you getting the maximum and the most interesting subdomains so that you be ahead of the competition. + It also performs various vulnerability checks like XSS, Open Redirects, SSRF, CRLF, LFI, SQLi, SSL tests, SSTI, DNS zone transfers, and much more. Along with these, it performs OSINT techniques, directory fuzzing, dorking, ports scanning, screenshots, nuclei scan on your target. So, what are you waiting for Go! Go! Go! :boom: @@ -47,23 +46,24 @@ So, what are you waiting for Go! Go! Go! :boom: 📔 Table of Contents ----------------- -- [💿 Installation](#-installation) +- [💿 Installation:](#-installation) - [a) In your PC/VPS/VM](#a-in-your-pcvpsvm) - [b) Docker container 🐳 (2 options)](#b-docker-container--2-options) - [1) From DockerHub](#1-from-dockerhub) - [2) From repository](#2-from-repository) -- [⚙️ Config file](#️-config-file) -- [Usage](#usage) - - [Example Usage](#example-usage) -- [Axiom Support :cloud:](#axiom-support-cloud) -- [Sample video](#sample-video) +- [⚙️ Config file:](#️-config-file) +- [Usage:](#usage) +- [Example Usage:](#example-usage) +- [Axiom Support: :cloud:](#axiom-support-cloud) +- [Sample video:](#sample-video) - [:fire: Features :fire:](#fire-features-fire) - [Mindmap/Workflow](#mindmapworkflow) - [Data Keep](#data-keep) - - [Main commands](#main-commands) - - [How to contribute](#how-to-contribute) -- [Need help?](#need-help) -- [Support this project](#you-can-support-this-work-buying-me-a-coffee) + - [Main commands:](#main-commands) + - [How to contribute:](#how-to-contribute) + - [Need help? :information_source:](#need-help-information_source) + - [You can support this work buying me a coffee:](#you-can-support-this-work-buying-me-a-coffee) +- [Sponsors ❤️](#sponsors-️) - [Thanks :pray:](#thanks-pray) --- @@ -121,19 +121,9 @@ So, what are you waiting for Go! Go! Go! :boom: ```yaml ################################################################# -# reconFTW config file # +# reconFTW config file # ################################################################# - -# TERM COLORS -bred='\033[1;31m' -bblue='\033[1;34m' -bgreen='\033[1;32m' -yellow='\033[0;33m' -red='\033[0;31m' -blue='\033[0;34m' -green='\033[0;32m' -reset='\033[0m' - + # General values tools=~/Tools SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" @@ -142,18 +132,18 @@ reconftw_version=$(git branch --show-current)-$(git describe --tags) update_resolvers=true proxy_url="http://127.0.0.1:8080/" #dir_output=/custom/output/path - + # Golang Vars (Comment or change on your own) export GOROOT=/usr/local/go export GOPATH=$HOME/go export PATH=$GOPATH/bin:$GOROOT/bin:$HOME/.local/bin:$PATH - + # Tools config files #NOTIFY_CONFIG=~/.config/notify/notify.conf # No need to define #SUBFINDER_CONFIG=~/.config/subfinder/config.yaml # No need to define AMASS_CONFIG=~/.config/amass/config.ini GITHUB_TOKENS=${tools}/.github_tokens - + # APIs/TOKENS - Uncomment the lines you set removing the '#' at the beginning of the line #SHODAN_API_KEY="XXXXXXXXXXXXX" #XSS_SERVER="XXXXXXXXXXXXXXXXX" @@ -162,13 +152,13 @@ GITHUB_TOKENS=${tools}/.github_tokens #findomain_spyse_token="XXXXXXXXXXXXXXXXX" #findomain_securitytrails_token="XXXXXXXXXXXXXXXXX" #findomain_fb_token="XXXXXXXXXXXXXXXXX" -slack_channel="XXXXXXXX" -slack_auth="xoXX-XXX-XXX-XXX" +#slack_channel="XXXXXXXX" +#slack_auth="xoXX-XXX-XXX-XXX" # File descriptors DEBUG_STD="&>/dev/null" DEBUG_ERROR="2>/dev/null" - + # Osint OSINT=true GOOGLE_DORKS=true @@ -176,25 +166,27 @@ GITHUB_DORKS=true METADATA=true EMAILS=true DOMAIN_INFO=true - +METAFINDER_LIMIT=20 # Max 250 + # Subdomains SUBCRT=true +SUBANALYTICS=true SUBBRUTE=true SUBSCRAPING=true SUBPERMUTE=true SUBTAKEOVER=true SUBRECURSIVE=true +SUB_RECURSIVE_PASSIVE=false # Uses a lot of API keys queries ZONETRANSFER=true S3BUCKETS=true - + # Web detection WEBPROBESIMPLE=true WEBPROBEFULL=true WEBSCREENSHOT=true UNCOMMON_PORTS_WEB="81,300,591,593,832,981,1010,1311,1099,2082,2095,2096,2480,3000,3128,3333,4243,4567,4711,4712,4993,5000,5104,5108,5280,5281,5601,5800,6543,7000,7001,7396,7474,8000,8001,8008,8014,8042,8060,8069,8080,8081,8083,8088,8090,8091,8095,8118,8123,8172,8181,8222,8243,8280,8281,8333,8337,8443,8500,8834,8880,8888,8983,9000,9001,9043,9060,9080,9090,9091,9200,9443,9502,9800,9981,10000,10250,11371,12443,15672,16080,17778,18091,18092,20720,32000,55440,55672" # You can change to aquatone if gowitness fails, comment the one you don't want -AXIOM_SCREENSHOT_MODULE=gowitness -#AXIOM_SCREENSHOT_MODULE=aquatone +AXIOM_SCREENSHOT_MODULE=webscreenshot # Choose between aquatone,gowitness,webscreenshot # Host FAVICON=true @@ -202,7 +194,7 @@ PORTSCANNER=true PORTSCAN_PASSIVE=true PORTSCAN_ACTIVE=true CLOUD_IP=true - + # Web analysis WAF_DETECTION=true NUCLEICHECK=true @@ -214,7 +206,7 @@ PARAMS=true FUZZ=true CMS_SCANNER=true WORDLIST=true - + # Vulns XSS=true CORS=true @@ -228,7 +220,8 @@ SQLI=true BROKENLINKS=true SPRAY=true BYPASSER4XX=true - +COMM_INJ=true + # Extra features NOTIFICATION=false DEEP=false @@ -237,10 +230,11 @@ REMOVETMP=false REMOVELOG=false PROXY=false SENDZIPNOTIFY=false +PRESERVE=false # set to true to avoid deleting the .called_fn files on really large scans # HTTP options HEADER="User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" - + # Threads FFUF_THREADS=40 HTTPX_THREADS=50 @@ -255,16 +249,19 @@ DALFOX_THREADS=200 PUREDNS_PUBLIC_LIMIT=0 # Set between 2000 - 10000 if your router blows up, 0 is unlimited PUREDNS_TRUSTED_LIMIT=400 DIRDAR_THREADS=200 +WEBSCREENSHOT_THREADS=200 +RESOLVE_DOMAINS_THREADS=150 # Timeouts CMSSCAN_TIMEOUT=3600 FFUF_MAXTIME=900 # Seconds -HTTPX_TIMEOUT=15 # Seconds +HTTPX_TIMEOUT=10 # Seconds HTTPX_UNCOMMONPORTS_TIMEOUT=10 # Seconds # lists fuzz_wordlist=${tools}/fuzz_wordlist.txt lfi_wordlist=${tools}/lfi_wordlist.txt +ssti_wordlist=${tools}/ssti_wordlist.txt subs_wordlist=${tools}/subdomains.txt subs_wordlist_big=${tools}/subdomains_big.txt resolvers=${tools}/resolvers.txt @@ -272,7 +269,7 @@ resolvers_trusted=${tools}/resolvers_trusted.txt # Axiom Fleet # Will not start a new fleet if one exist w/ same name and size (or larger) -AXIOM_FLEET_LAUNCH=true +AXIOM_FLEET_LAUNCH=false AXIOM_FLEET_NAME="reconFTW" AXIOM_FLEET_COUNT=5 AXIOM_FLEET_REGIONS="" @@ -280,6 +277,16 @@ AXIOM_FLEET_SHUTDOWN=true # This is a script on your reconftw host that might prep things your way... #AXIOM_POST_START="$HOME/bin/yourScript" +# TERM COLORS +bred='\033[1;31m' +bblue='\033[1;34m' +bgreen='\033[1;32m' +yellow='\033[0;33m' +red='\033[0;31m' +blue='\033[0;34m' +green='\033[0;32m' +reset='\033[0m' + ``` @@ -295,6 +302,7 @@ AXIOM_FLEET_SHUTDOWN=true | -l | List of targets *(one per line)* | | -m | Multiple domain target *(companyName)* | | -x | Exclude subdomains list *(Out Of Scope)* | +| -i | Include subdomains list *(In Scope)* | **MODE OPTIONS** @@ -313,6 +321,7 @@ AXIOM_FLEET_SHUTDOWN=true | Flag | Description | |------|-------------| | --deep | Deep scan (Enable some slow options for deeper scan, _vps intended mode_) | +| -f | Custom config file path | | -o | Output directory | # Example Usage: @@ -378,33 +387,33 @@ AXIOM_FLEET_SHUTDOWN=true - Metadata finder ([MetaFinder](https://github.com/Josue87/MetaFinder)) - Google Dorks ([degoogle_hunter](https://github.com/six2dez/degoogle_hunter)) - Github Dorks ([GitDorker](https://github.com/obheda12/GitDorker)) -- Multiple subdomain enumeration techniques (passive, bruteforce, permutations and scraping) - - Passive ([subfinder](https://github.com/projectdiscovery/subfinder), [assetfinder](https://github.com/tomnomnom/assetfinder), [amass](https://github.com/OWASP/Amass), [findomain](https://github.com/Findomain/Findomain), [crobat](https://github.com/cgboal/sonarsearch), [waybackurls](https://github.com/tomnomnom/waybackurls), [github-subdomains](https://github.com/gwen001/github-subdomains), [Anubis](https://jldc.me) and [mildew](https://github.com/daehee/mildew)) +- Multiple subdomain enumeration techniques (passive, bruteforce, permutations, DNS records, scraping) + - Passive ([subfinder](https://github.com/projectdiscovery/subfinder), [assetfinder](https://github.com/tomnomnom/assetfinder), [amass](https://github.com/OWASP/Amass), [findomain](https://github.com/Findomain/Findomain), [crobat](https://github.com/cgboal/sonarsearch), [waybackurls](https://github.com/tomnomnom/waybackurls), [github-subdomains](https://github.com/gwen001/github-subdomains), [Anubis](https://jldc.me), [gauplus](https://github.com/bp0lr/gauplus) and [mildew](https://github.com/daehee/mildew)) - Certificate transparency ([ctfr](https://github.com/UnaPibaGeek/ctfr), [tls.bufferover](tls.bufferover.run) and [dns.bufferover](dns.bufferover.run))) - Bruteforce ([puredns](https://github.com/d3mondev/puredns)) - Permutations ([DNScewl](https://github.com/codingo/DNSCewl)) - JS files & Source Code Scraping ([gospider](https://github.com/jaeles-project/gospider), [analyticsRelationship](https://github.com/Josue87/analyticsRelationship)) - - CNAME Records ([dnsx](https://github.com/projectdiscovery/dnsx)) + - DNS Records ([dnsx](https://github.com/projectdiscovery/dnsx)) - Nuclei Sub TKO templates ([nuclei](https://github.com/projectdiscovery/nuclei)) -- Web Prober ([httpx](https://github.com/projectdiscovery/httpx) and [naabu](https://github.com/projectdiscovery/naabu)) -- Web screenshot ([gowitness](https://github.com/sensepost/gowitness)) -- Web templates scanner ([nuclei](https://github.com/projectdiscovery/nuclei)) +- Web Prober ([httpx](https://github.com/projectdiscovery/httpx) and [unimap](https://github.com/Edu4rdSHL/unimap)) +- Web screenshot ([webscreenshot](https://github.com/maaaaz/webscreenshot)) +- Web templates scanner ([nuclei](https://github.com/projectdiscovery/nuclei) and [nuclei geeknik](https://github.com/geeknik/the-nuclei-templates.git)) - IP and subdomains WAF checker ([cf-check](https://github.com/dwisiswant0/cf-check) and [wafw00f](https://github.com/EnableSecurity/wafw00f)) -- Port Scanner (Active with [nmap](https://github.com/nmap/nmap) and passive with [shodan-cli](https://cli.shodan.io/)) -- Url extraction ([waybackurls](https://github.com/tomnomnom/waybackurls), [gauplus](https://github.com/bp0lr/gauplus), [gospider](https://github.com/jaeles-project/gospider), [github-endpoints](https://gist.github.com/six2dez/d1d516b606557526e9a78d7dd49cacd3)) +- Port Scanner (Active with [nmap](https://github.com/nmap/nmap) and passive with [shodan-cli](https://cli.shodan.io/), Subdomains IP resolution with[resolveDomains](https://github.com/Josue87/resolveDomains)) +- Url extraction ([waybackurls](https://github.com/tomnomnom/waybackurls), [gauplus](https://github.com/bp0lr/gauplus), [gospider](https://github.com/jaeles-project/gospider), [github-endpoints](https://gist.github.com/six2dez/d1d516b606557526e9a78d7dd49cacd3) and [JSA](https://github.com/w9w/JSA)) - Pattern Search ([gf](https://github.com/tomnomnom/gf) and [gf-patterns](https://github.com/1ndianl33t/Gf-Patterns)) - Param discovery ([paramspider](https://github.com/devanshbatham/ParamSpider) and [arjun](https://github.com/s0md3v/Arjun)) - XSS ([dalfox](https://github.com/hahwul/dalfox)) - Open redirect ([Openredirex](https://github.com/devanshbatham/OpenRedireX)) -- SSRF (headers [asyncio_ssrf.py](https://gist.github.com/h4ms1k/adcc340495d418fcd72ec727a116fea2) and param values with [ffuf](https://github.com/ffuf/ffuf)) +- SSRF (headers [interactsh](https://github.com/projectdiscovery/interactsh) and param values with [ffuf](https://github.com/ffuf/ffuf)) - CRLF ([crlfuzz](https://github.com/dwisiswant0/crlfuzz)) - Favicon Real IP ([fav-up](https://github.com/pielco11/fav-up)) - Javascript analysis ([LinkFinder](https://github.com/GerbenJavado/LinkFinder), scripts from [JSFScan](https://github.com/KathanP19/JSFScan.sh)) - Fuzzing ([ffuf](https://github.com/ffuf/ffuf)) - Cors ([Corsy](https://github.com/s0md3v/Corsy)) -- LFI Checks (manual/[ffuf](https://github.com/ffuf/ffuf)) +- LFI Checks ([ffuf](https://github.com/ffuf/ffuf)) - SQLi Check ([SQLMap](https://github.com/sqlmapproject/sqlmap)) -- SSTI (manual/[ffuf](https://github.com/ffuf/ffuf)) +- SSTI ([ffuf](https://github.com/ffuf/ffuf)) - CMS Scanner ([CMSeeK](https://github.com/Tuhinshubhra/CMSeeK)) - SSL tests ([testssl](https://github.com/drwetter/testssl.sh)) - Multithread in some steps ([Interlace](https://github.com/codingo/Interlace)) @@ -416,12 +425,15 @@ AXIOM_FLEET_SHUTDOWN=true - DNS Zone Transfer ([dnsrecon](https://github.com/darkoperator/dnsrecon)) - Docker container included and [DockerHub](https://hub.docker.com/r/six2dez/reconftw) integration - Cloud providers check ([ip2provider](https://github.com/oldrho/ip2provider)) +- URL sorting by extension +- Wordlist generation +- Allows IP/CIDR as target - Resume the scan from last performed step - Custom output folder option - All in one installer/updater script compatible with most distros - Diff support for continuous running (cron mode) - Support for targets with multiple domains -- RaspberryPi/ARM support +- Raspberry Pi/ARM support - Send scan results zipped over Slack, Discord and Telegram - 6 modes (recon, passive, subdomains, web, osint and all) - Out of Scope Support @@ -429,7 +441,7 @@ AXIOM_FLEET_SHUTDOWN=true # Mindmap/Workflow -![Mindmap](images/mindmap_0321.png) +![Mindmap](images/mindmap.png) ## Data Keep diff --git a/images/mindmap.png b/images/mindmap.png new file mode 100755 index 00000000..68c56fe2 Binary files /dev/null and b/images/mindmap.png differ diff --git a/images/mindmap_0321.png b/images/mindmap_0321.png deleted file mode 100644 index 7517f371..00000000 Binary files a/images/mindmap_0321.png and /dev/null differ diff --git a/install.sh b/install.sh index 02ff5bda..94adf259 100755 --- a/install.sh +++ b/install.sh @@ -93,16 +93,17 @@ install_apt(){ eval $SUDO apt update -y $DEBUG_STD eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium-browser -y $DEBUG_STD eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium -y $DEBUG_STD - eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install python3 python3-pip gcc build-essential cmake ruby git curl libpcap-dev wget zip python3-dev pv dnsutils libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx tor medusa xvfb -y $DEBUG_STD + eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install python3 python3-pip build-essential gcc cmake ruby git curl libpcap-dev wget zip python3-dev pv dnsutils libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx tor medusa xvfb -y $DEBUG_STD eval $SUDO systemctl enable tor $DEBUG_STD } install_yum(){ - eval $SUDO yum install python3 python3-pip ruby git curl libpcap-devel chromium wget zip openssl-devel bind-utils python3-devel lynx libxslt-devel libffi-devel libxml2-devel nmap pv zlib-devel jq -y $DEBUG_STD + eval $SUDO yum groupinstall "Development Tools" -y $DEBUG_STD + eval $SUDO yum install python3 python3-pip gcc cmake ruby git curl libpcap-dev wget zip python3-devel pv bind-utils libopenssl-devel libffi-devel libxml2-devel libxslt-devel zlib-devel nmap jq lynx tor medusa xorg-x11-server-xvfb -y $DEBUG_STD } install_pacman(){ - eval $SUDO pacman -Sy install python python-pip dnsutils ruby curl zip git libpcap nmap chromium wget jq medusa tor lynx -y $DEBUG_STD + eval $SUDO pacman -Sy install python python-pip base-devel gcc cmake ruby git curl libpcap wget zip pv bind openssl libffi libxml2 libxslt zlib nmap jq lynx tor medusa xorg-server-xvfb -y $DEBUG_STD eval $SUDO systemctl enable --now tor.service $DEBUG_STD } diff --git a/reconftw.sh b/reconftw.sh index 809cd259..4cf8aaa6 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -843,24 +843,15 @@ function cms_scanner(){ function params(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PARAMS" = true ]; then start_func "Parameter Discovery" - printf "${yellow}\n\n Running : Searching params with paramspider${reset}\n" - if [ -s "webs/webs.txt" ]; then - cat webs/webs.txt | sed -r "s/https?:\/\///" | anew -q .tmp/probed_nohttp.txt - [ -s ".tmp/probed_nohttp.txt" ] && interlace -tL .tmp/probed_nohttp.txt -threads 10 -c "python3 $tools/ParamSpider/paramspider.py -d _target_ -l high -q --exclude eot,jpg,jpeg,gif,css,tif,tiff,png,ttf,otf,woff,woff2,ico,pdf,svg,txt,js" &>/dev/null - cat output/*.txt 2>>"$LOGFILE" | anew -q .tmp/param_tmp.txt - sed '/^FUZZ/d' -i .tmp/param_tmp.txt - rm -rf output/ 2>>"$LOGFILE" + if [ -s ".tmp/url_extract_uddup.txt" ]; then if [ "$DEEP" = true ]; then - printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - [ -s ".tmp/param_tmp.txt" ] && arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt 2>>"$LOGFILE" &>/dev/null + arjun -i .tmp/url_extract_uddup.txt -t $ARJUN_THREADS -oT webs/param.txt 2>>"$LOGFILE" &>/dev/null + elif [[ $(cat .tmp/url_extract_uddup.txt | wc -l) -le 50 ]]; then + arjun -i .tmp/url_extract_uddup.txt -t $ARJUN_THREADS -oT webs/param.txt 2>>"$LOGFILE" &>/dev/null else - if [[ $(cat .tmp/param_tmp.txt | wc -l) -le 50 ]]; then - printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - [ -s ".tmp/param_tmp.txt" ] && arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt 2>>"$LOGFILE" &>/dev/null - else - [ -s ".tmp/param_tmp.txt" ] && cp .tmp/param_tmp.txt webs/param.txt - fi + end_func "Skipping Param discovery: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]} fi + [ -s "webs/param.txt" ] && cat webs/param.txt | anew -q webs/url_extract.txt fi end_func "Results are saved in $domain/webs/param.txt" ${FUNCNAME[0]} else @@ -887,6 +878,10 @@ function urlchecks(){ gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt fi fi + interlace -tL webs/webs.txt -threads 10 -c "python3 $tools/ParamSpider/paramspider.py -d _target_ -l high -q -o _output_/_cleantarget_" -o output &>/dev/null + find output/ -type f -exec cat {} \; | sed '/^FUZZ/d' | anew -q .tmp/param_tmp.txt + rm -rf output/ 2>>"$LOGFILE" + [ -s ".tmp/param_tmp.txt" ] && cat .tmp/param_tmp.txt | anew -q .tmp/gospider.txt sed -i '/^.\{2048\}./d' .tmp/gospider.txt [ -s ".tmp/gospider.txt" ] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain" | anew -q .tmp/url_extract_tmp.txt if [ -s "${GITHUB_TOKENS}" ]; then @@ -897,7 +892,7 @@ function urlchecks(){ if [ "$DEEP" = true ]; then [ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | python3 $tools/JSA/jsa.py | anew -q .tmp/url_extract_tmp.txt fi - cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt [ -s ".tmp/url_extract_tmp2.txt" ] && cat .tmp/url_extract_tmp2.txt | urldedupe -s -qs | anew -q .tmp/url_extract_uddup.txt 2>>"$LOGFILE" &>/dev/null NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | wc -l) notification "${NUMOFLINES} new urls with params" info @@ -1138,8 +1133,8 @@ function ssrf_checks(){ cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_FIX} | anew -q .tmp/tmp_ssrf.txt cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_URL} | anew -q .tmp/tmp_ssrf.txt ffuf -v -H "${HEADER}" -t $FFUF_THREADS -w .tmp/tmp_ssrf.txt -u FUZZ 2>>"$LOGFILE" | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf_requests_url.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,.tmp/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,.tmp/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt sleep 5 [ -s ".tmp/ssrf_callback.txt" ] && cat .tmp/ssrf_callback.txt | tail -n+11 | anew -q vulns/ssrf_callback.txt && NUMOFLINES=$(cat .tmp/ssrf_callback.txt | tail -n+12 | wc -l) notification "SSRF: ${NUMOFLINES} callbacks received" info @@ -1149,8 +1144,8 @@ function ssrf_checks(){ cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_FIX} | anew -q .tmp/tmp_ssrf.txt cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_URL} | anew -q .tmp/tmp_ssrf.txt ffuf -v -H "${HEADER}" -t $FFUF_THREADS -w .tmp/tmp_ssrf.txt -u FUZZ 2>>"$LOGFILE" | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf_requests_url.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,.tmp/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,.tmp/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt sleep 5 [ -s ".tmp/ssrf_callback.txt" ] && cat .tmp/ssrf_callback.txt | tail -n+11 | anew -q vulns/ssrf_callback.txt && NUMOFLINES=$(cat .tmp/ssrf_callback.txt | tail -n+12 | wc -l) notification "SSRF: ${NUMOFLINES} callbacks received" info @@ -1212,8 +1207,8 @@ function ssti(){ if [ -s "gf/ssti.txt" ]; then cat gf/ssti.txt | qsreplace FUZZ | anew -q .tmp/tmp_ssti.txt for url in $(cat .tmp/tmp_ssti.txt); do - ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $ssti_wordlist -u $url -mr "ssti49" 2>>"$LOGFILE" | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt - done + ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $ssti_wordlist -u $url -mr "ssti49" 2>>"$LOGFILE" | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt + done fi end_func "Results are saved in vulns/ssti.txt" ${FUNCNAME[0]} else @@ -1232,7 +1227,7 @@ function sqli(){ start_func "SQLi checks" if [ -s "gf/sqli.txt" ]; then cat gf/sqli.txt | qsreplace FUZZ | anew -q .tmp/tmp_sqli.txt - interlace -tL .tmp/tmp_sqli.txt -threads 10 -c "python3 $tools/sqlmap/sqlmap.py -u _target_ -b --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap" &>/dev/null + interlace -tL .tmp/tmp_sqli.txt -threads 10 -c "python3 $tools/sqlmap/sqlmap.py -u _target_ -b --batch --disable-coloring --random-agent --output-dir=_output_" -o vulns/sqlmap &>/dev/null fi end_func "Results are saved in vulns/sqlmap folder" ${FUNCNAME[0]} else @@ -1690,8 +1685,8 @@ function recon(){ waf_checks nuclei_check fuzz - params urlchecks + params jschecks cloudprovider cms_scanner @@ -1817,8 +1812,8 @@ function multi_recon(){ cd "$dir" || { echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } loopstart=$(date +%s) fuzz - params urlchecks + params jschecks currently=$(date +"%H:%M:%S") loopend=$(date +%s) @@ -1877,8 +1872,8 @@ function webs_menu(){ fuzz 4xxbypass cors - params urlchecks + params url_gf jschecks wordlist_gen diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index ae41ee38..292716c5 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -870,24 +870,15 @@ function cms_scanner(){ function params(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PARAMS" = true ]; then start_func "Parameter Discovery" - printf "${yellow}\n\n Running : Searching params with paramspider${reset}\n" - if [ -s "webs/webs.txt" ]; then - cat webs/webs.txt | sed -r "s/https?:\/\///" | anew -q .tmp/probed_nohttp.txt - axiom-scan .tmp/probed_nohttp.txt -m paramspider -l high -q --exclude eot,jpg,jpeg,gif,css,tif,tiff,png,ttf,otf,woff,woff2,ico,pdf,svg,txt,js -o output_paramspider 2>>"$LOGFILE" &>/dev/null - find output_paramspider/ -name '*.txt' -exec cat {} \; | anew -q .tmp/param_tmp.txt - sed '/^FUZZ/d' -i .tmp/param_tmp.txt - rm -rf output_paramspider/ 2>>"$LOGFILE" + if [ -s ".tmp/url_extract_uddup.txt" ]; then if [ "$DEEP" = true ]; then - printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - axiom-scan .tmp/param_tmp.txt -m arjun -t $ARJUN_THREADS -o webs/param.txt 2>>"$LOGFILE" &>/dev/null + axiom-scan .tmp/url_extract_uddup.txt -m arjun -t $ARJUN_THREADS -o webs/param.txt 2>>"$LOGFILE" &>/dev/null + elif [[ $(cat .tmp/url_extract_uddup.txt | wc -l) -le 50 ]]; then + axiom-scan .tmp/url_extract_uddup.txt -m arjun -t $ARJUN_THREADS -o webs/param.txt 2>>"$LOGFILE" &>/dev/null else - if [[ $(cat .tmp/param_tmp.txt | wc -l) -le 50 ]]; then - printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - axiom-scan .tmp/param_tmp.txt -m arjun -t $ARJUN_THREADS -o webs/param.txt 2>>"$LOGFILE" &>/dev/null - else - cp .tmp/param_tmp.txt webs/param.txt - fi + end_func "Skipping Param discovery: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]} fi + [ -s "webs/param.txt" ] && cat webs/param.txt | anew -q webs/url_extract.txt fi end_func "Results are saved in $domain/webs/param.txt" ${FUNCNAME[0]} else @@ -919,6 +910,10 @@ function urlchecks(){ fi [[ -d .tmp/gospider/ ]] && NUMFILES=$(find .tmp/gospider/ -type f | wc -l) [[ $NUMFILES -gt 0 ]] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain" | anew -q .tmp/url_extract_tmp.txt + axiom-scan webs/webs.txt -m paramspider -l high -q -o output_paramspider 2>>"$LOGFILE" &>/dev/null + find output_paramspider/ -type f -exec cat {} \; | sed '/^FUZZ/d' | anew -q .tmp/param_tmp.txt + rm -rf output_paramspider/ 2>>"$LOGFILE" + [ -s ".tmp/param_tmp.txt" ] && cat .tmp/param_tmp.txt | anew -q .tmp/gospider.txt if [ -s "${GITHUB_TOKENS}" ]; then github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt @@ -927,7 +922,7 @@ function urlchecks(){ if [ "$DEEP" = true ]; then [ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | python3 $tools/JSA/jsa.py | anew -q .tmp/url_extract_tmp.txt fi - cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt [ -s ".tmp/url_extract_tmp2.txt" ] && cat .tmp/url_extract_tmp2.txt | urldedupe -s -qs | anew -q .tmp/url_extract_uddup.txt 2>>"$LOGFILE" &>/dev/null NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | wc -l) notification "${NUMOFLINES} new urls with params" info @@ -1167,8 +1162,8 @@ function ssrf_checks(){ cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_FIX} | anew -q .tmp/tmp_ssrf.txt cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_URL} | anew -q .tmp/tmp_ssrf.txt ffuf -v -H "${HEADER}" -t $FFUF_THREADS -w .tmp/tmp_ssrf.txt -u FUZZ 2>>"$LOGFILE" | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf_requests_url.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,.tmp/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,.tmp/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt sleep 5 [ -s ".tmp/ssrf_callback.txt" ] && cat .tmp/ssrf_callback.txt | tail -n+11 | anew -q vulns/ssrf_callback.txt && NUMOFLINES=$(cat .tmp/ssrf_callback.txt | tail -n+12 | wc -l) notification "SSRF: ${NUMOFLINES} callbacks received" info @@ -1178,8 +1173,8 @@ function ssrf_checks(){ cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_FIX} | anew -q .tmp/tmp_ssrf.txt cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_URL} | anew -q .tmp/tmp_ssrf.txt ffuf -v -H "${HEADER}" -t $FFUF_THREADS -w .tmp/tmp_ssrf.txt -u FUZZ 2>>"$LOGFILE" | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf_requests_url.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,.tmp/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,.tmp/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -u W1 2>>"$LOGFILE" | anew -q vulns/ssrf_requests_headers.txt sleep 5 [ -s ".tmp/ssrf_callback.txt" ] && cat .tmp/ssrf_callback.txt | tail -n+11 | anew -q vulns/ssrf_callback.txt && NUMOFLINES=$(cat .tmp/ssrf_callback.txt | tail -n+12 | wc -l) notification "SSRF: ${NUMOFLINES} callbacks received" info @@ -1261,7 +1256,7 @@ function sqli(){ start_func "SQLi checks" if [ -s "gf/sqli.txt" ]; then cat gf/sqli.txt | qsreplace FUZZ | anew -q .tmp/tmp_sqli.txt - interlace -tL .tmp/tmp_sqli.txt -threads 10 -c "python3 $tools/sqlmap/sqlmap.py -u _target_ -b --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap" &>/dev/null + interlace -tL .tmp/tmp_sqli.txt -threads 10 -c "python3 $tools/sqlmap/sqlmap.py -u _target_ -b --batch --disable-coloring --random-agent --output-dir=_output_" -o vulns/sqlmap &>/dev/null fi end_func "Results are saved in vulns/sqlmap folder" ${FUNCNAME[0]} else @@ -1802,8 +1797,8 @@ function recon(){ waf_checks nuclei_check fuzz - params urlchecks + params jschecks axiom_shutdown @@ -1935,8 +1930,8 @@ function multi_recon(){ cd "$dir" || { echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } loopstart=$(date +%s) fuzz - params urlchecks + params jschecks currently=$(date +"%H:%M:%S") loopend=$(date +%s) @@ -2003,8 +1998,8 @@ function webs_menu(){ fuzz 4xxbypass cors - params urlchecks + params url_gf jschecks wordlist_gen