diff --git a/README.md b/README.md
index c31c0555..5b3b1843 100644
--- a/README.md
+++ b/README.md
@@ -7,8 +7,8 @@
-
-
+
+
@@ -144,9 +144,9 @@ Yes! reconFTW can also be easily deployed with Terraform and Ansible to AWS, if
:point_right: Click here to view default config file :point_left:
```yaml
-#################################################################
+#############################################
# reconFTW config file #
-#################################################################
+#############################################
# General values
tools=~/Tools # Path installed tools
@@ -162,8 +162,6 @@ proxy_url="http://127.0.0.1:8080/" # Proxy url
install_golang=true # Set it to false if you already have Golang configured and ready
upgrade_tools=true
upgrade_before_running=false # Upgrade tools before running
-#SPINNY_FRAMES=(. .. ... .... ..... " ...." " ..." " .." " ." " " )
-SPINNY_DELAY=0.1
#dir_output=/custom/output/path
# Golang Vars (Comment or change on your own)
@@ -201,6 +199,8 @@ DOMAIN_INFO=true # whois info
REVERSE_WHOIS=true # amass intel reverse whois info, takes some time
IP_INFO=true # Reverse IP search, geolocation and whois
API_LEAKS=true # Check for API leaks
+THIRD_PARTIES=true # Check for 3rd parties misconfigs
+SPOOF=true # Check spoofable domains
METAFINDER_LIMIT=20 # Max 250
# Subdomains
@@ -238,8 +238,10 @@ UNCOMMON_PORTS_WEB="81,300,591,593,832,981,1010,1311,1099,2082,2095,2096,2480,30
# Host
FAVICON=true # Check Favicon domain discovery
PORTSCANNER=true # Enable or disable the whole Port scanner module
+GEO_INFO=true # Fetch Geolocalization info
PORTSCAN_PASSIVE=true # Port scanner with Shodan
PORTSCAN_ACTIVE=true # Port scanner with nmap
+PORTSCAN_ACTIVE_OPTIONS="--top-ports 200 -sV -n -Pn --open --max-retries 2 --script vulners"
CDN_IP=true # Check which IPs belongs to CDN
# Web analysis
@@ -256,6 +258,7 @@ URL_GF=true # Url patterns classification
URL_EXT=true # Returns a list of files divided by extension
JSCHECKS=true # JS analysis
FUZZ=true # Web fuzzing
+IIS_SHORTNAME=true
CMS_SCANNER=true # CMS scanner
WORDLIST=true # Wordlist generation
ROBOTSWORDLIST=true # Check historic disallow entries on waybackMachine
@@ -292,12 +295,12 @@ DEEP=false # DEEP mode, really slow and don't care about the number of results
DEEP_LIMIT=500 # First limit to not run unless you run DEEP
DEEP_LIMIT2=1500 # Second limit to not run unless you run DEEP
DIFF=false # Diff function, run every module over an already scanned target, printing only new findings (but save everything)
-REMOVETMP=false # Delete temporary files after execution (to free up space)
+REMOVETMP=true # Delete temporary files after execution (to free up space)
REMOVELOG=false # Delete logs after execution
PROXY=false # Send to proxy the websites found
SENDZIPNOTIFY=false # Send to zip the results (over notify)
PRESERVE=true # set to true to avoid deleting the .called_fn files on really large scans
-FFUF_FLAGS=" -mc all -fc 404 -ach -sf -of json" # Ffuf flags
+FFUF_FLAGS=" -mc all -fc 404 -sf -noninteractive -of json" # Ffuf flags
HTTPX_FLAGS=" -follow-redirects -random-agent -status-code -silent -title -web-server -tech-detect -location -content-length" # Httpx flags for simple web probing
# HTTP options
@@ -318,7 +321,6 @@ PUREDNS_TRUSTED_LIMIT=400
PUREDNS_WILDCARDTEST_LIMIT=30
PUREDNS_WILDCARDBATCH_LIMIT=1500000
RESOLVE_DOMAINS_THREADS=150
-PPFUZZ_THREADS=30
DNSVALIDATOR_THREADS=200
INTERLACE_THREADS=10
TLSX_THREADS=1000
@@ -482,6 +484,8 @@ reset='\033[0m'
- Google Dorks ([dorks_hunter](https://github.com/six2dez/dorks_hunter))
- Github Dorks ([gitdorks_go](https://github.com/damit5/gitdorks_go))
- GitHub org analysis ([enumerepo](https://github.com/trickest/enumerepo), [trufflehog](https://github.com/trufflesecurity/trufflehog) and [gitleaks](https://github.com/gitleaks/gitleaks))
+- 3rd parties misconfigurations([misconfig-mapper](https://github.com/intigriti/misconfig-mapper))
+- Spoofable domains ([spoofcheck](https://github.com/MattKeeley/Spoofy))
## Subdomains
@@ -508,6 +512,7 @@ reset='\033[0m'
- Port Scanner (Active with [nmap](https://github.com/nmap/nmap) and passive with [smap](https://github.com/s0md3v/Smap))
- Port services vulnerability checks ([vulners](https://github.com/vulnersCom/nmap-vulners))
- Password spraying ([brutespray](https://github.com/x90skysn3k/brutespray))
+- Geolocalization info (ipapi.co)
## Webs
@@ -518,7 +523,8 @@ reset='\033[0m'
- Url extraction ([gau](https://github.com/lc/gau),[waymore](https://github.com/xnl-h4ck3r/waymore), [katana](https://github.com/projectdiscovery/katana), [github-endpoints](https://gist.github.com/six2dez/d1d516b606557526e9a78d7dd49cacd3) and [JSA](https://github.com/w9w/JSA))
- URL patterns Search and filtering ([urless](https://github.com/xnl-h4ck3r/urless), [gf](https://github.com/tomnomnom/gf) and [gf-patterns](https://github.com/1ndianl33t/Gf-Patterns))
- Favicon Real IP ([fav-up](https://github.com/pielco11/fav-up))
-- Javascript analysis ([subjs](https://github.com/lc/subjs), [JSA](https://github.com/w9w/JSA), [xnLinkFinder](https://github.com/xnl-h4ck3r/xnLinkFinder), [getjswords](https://github.com/m4ll0k/BBTz), [mantra](https://github.com/MrEmpy/mantra))
+- Javascript analysis ([subjs](https://github.com/lc/subjs), [JSA](https://github.com/w9w/JSA), [xnLinkFinder](https://github.com/xnl-h4ck3r/xnLinkFinder), [getjswords](https://github.com/m4ll0k/BBTz), [mantra](https://github.com/MrEmpy/mantra), [jsluice](https://github.com/BishopFox/jsluice))
+- Sourcemap JS extraction ([sourcemapper](https://github.com/denandz/sourcemapper))
- Fuzzing ([ffuf](https://github.com/ffuf/ffuf))
- URL sorting by extension
- Wordlist generation
diff --git a/images/mindmap_obsidian.png b/images/mindmap_obsidian.png
index e78364fe..46dd3855 100644
Binary files a/images/mindmap_obsidian.png and b/images/mindmap_obsidian.png differ
diff --git a/install.sh b/install.sh
index e7b98376..a4bca791 100755
--- a/install.sh
+++ b/install.sh
@@ -75,6 +75,8 @@ gotools["nmapurls"]="go install -v github.com/sdcampbell/nmapurls@latest"
gotools["shortscan"]="go install -v github.com/bitquark/shortscan/cmd/shortscan@latest"
gotools["sns"]="go install github.com/sw33tLie/sns@latest"
gotools["ppmap"]="go install -v github.com/kleiton0x00/ppmap@latest"
+gotools["sourcemapper"]="go install -v github.com/denandz/sourcemapper@latest"
+gotools["jsluice"]="go install -v github.com/BishopFox/jsluice/cmd/jsluice@latest"
# Declaring repositories and their paths
declare -A repos
@@ -108,6 +110,7 @@ repos["SwaggerSpy"]="UndeadSec/SwaggerSpy"
repos["LeakSearch"]="JoelGMSec/LeakSearch"
repos["ffufPostprocessing"]="Damian89/ffufPostprocessing"
repos["misconfig-mapper"]="intigriti/misconfig-mapper"
+repos["Spoofy"]="MattKeeley/Spoofy"
function banner() {
tput clear
diff --git a/reconftw.cfg b/reconftw.cfg
index a659f3b1..a0319579 100644
--- a/reconftw.cfg
+++ b/reconftw.cfg
@@ -54,6 +54,7 @@ REVERSE_WHOIS=true # amass intel reverse whois info, takes some time
IP_INFO=true # Reverse IP search, geolocation and whois
API_LEAKS=true # Check for API leaks
THIRD_PARTIES=true # Check for 3rd parties misconfigs
+SPOOF=true # Check spoofable domains
METAFINDER_LIMIT=20 # Max 250
# Subdomains
@@ -148,7 +149,7 @@ DEEP=false # DEEP mode, really slow and don't care about the number of results
DEEP_LIMIT=500 # First limit to not run unless you run DEEP
DEEP_LIMIT2=1500 # Second limit to not run unless you run DEEP
DIFF=false # Diff function, run every module over an already scanned target, printing only new findings (but save everything)
-REMOVETMP=false # Delete temporary files after execution (to free up space)
+REMOVETMP=true # Delete temporary files after execution (to free up space)
REMOVELOG=false # Delete logs after execution
PROXY=false # Send to proxy the websites found
SENDZIPNOTIFY=false # Send to zip the results (over notify)
diff --git a/reconftw.sh b/reconftw.sh
index 7cbe6ae3..f744b5b9 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -189,6 +189,10 @@ function tools_installed() {
printf "${bred} [*] misconfig-mapper [NO]${reset}\n"
allinstalled=false
}
+ [ -f "${tools}/Spoofy/spoofy.py" ] || {
+ printf "${bred} [*] spoofy [NO]${reset}\n"
+ allinstalled=false
+ }
[ -f "${tools}/SwaggerSpy/swaggerspy.py" ] || {
printf "${bred} [*] swaggerspy [NO]${reset}\n"
allinstalled=false
@@ -413,6 +417,14 @@ function tools_installed() {
printf "${bred} [*] sns [NO]${reset}\n"
allinstalled=false
}
+ command -v sourcemapper &>/dev/null || {
+ printf "${bred} [*] sourcemapper [NO]${reset}\n"
+ allinstalled=false
+ }
+ command -v jsluice &>/dev/null || {
+ printf "${bred} [*] jsluice [NO]${reset}\n"
+ allinstalled=false
+ }
if [[ ${allinstalled} == true ]]; then
printf "${bgreen} Good! All installed! ${reset}\n\n"
else
@@ -652,7 +664,7 @@ function domain_info() {
function third_party_misconfigs() {
- mkdir -p 3rdparties
+ mkdir -p osint
if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $THIRD_PARTIES == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
start_func ${FUNCNAME[0]} "Searching for third parties misconfigurations"
company_name=$(echo $domain | unfurl format %r)
@@ -660,13 +672,13 @@ function third_party_misconfigs() {
pushd "${tools}/misconfig-mapper" >/dev/null || {
echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
}
- ./misconfig-mapper -target $company_name -service "*" | grep "\[-\]" > ${dir}/3rdparties/visma_misconfigurations.txt
+ ./misconfig-mapper -target $company_name -service "*" | grep -v "\[-\]" > ${dir}/osint/3rdparts_misconfigurations.txt
popd >/dev/null || {
echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
}
- end_func "Results are saved in $domain/3rdparties" ${FUNCNAME[0]}
+ end_func "Results are saved in $domain/osint/3rdparts_misconfigurations.txt" ${FUNCNAME[0]}
else
if [[ $THIRD_PARTIES == false ]] || [[ $OSINT == false ]]; then
@@ -684,6 +696,39 @@ function third_party_misconfigs() {
}
+function spoof() {
+
+ mkdir -p osint
+ if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SPOOF == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+ start_func ${FUNCNAME[0]} "Searching for spoofable domains"
+
+ pushd "${tools}/Spoofy" >/dev/null || {
+ echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
+ }
+ ./spoofy.py -d $domain > ${dir}/osint/spoof.txt
+
+ popd >/dev/null || {
+ echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
+ }
+
+ end_func "Results are saved in $domain/osint/spoof.txt" ${FUNCNAME[0]}
+
+ else
+ if [[ $SPOOF == false ]] || [[ $OSINT == false ]]; then
+ printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+ elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+ return
+ else
+ if [[ $SPOOF == false ]] || [[ $OSINT == false ]]; then
+ printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+ else
+ printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+ fi
+ fi
+ fi
+
+}
+
function ip_info() {
mkdir -p osint
@@ -1365,7 +1410,13 @@ function geo_info() {
start_func ${FUNCNAME[0]} "Running: ipinfo and geoinfo"
ips_file="${dir}/hosts/ips.txt"
if [ ! -f $ips_file ]; then
- echo "File ${dir}/hosts/ips.txt does not exist."
+ if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+ [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try . | "\(.host) \(.a[0])"' | anew -q .tmp/subs_ips.txt
+ [ -s ".tmp/subs_ips.txt" ] && awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt
+ [ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt
+ else
+ echo $domain | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt
+ fi
else
for ip in $(cat "$ips_file"); do
json_output=$(curl -s https://ipapi.co/$ip/json)
@@ -1932,6 +1983,7 @@ function urlchecks() {
[ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt
[ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | anew -q .tmp/url_extract_tmp.txt
[ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep -aEi "\.(js)" | anew -q .tmp/url_extract_js.txt
+ [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep -aEi "\.(js\.map)" | anew -q .tmp/url_extract_jsmap.txt
if [[ $DEEP == true ]]; then
[ -s ".tmp/url_extract_js.txt" ] && interlace -tL .tmp/url_extract_js.txt -threads 10 -c "python3 ${tools}/JSA/jsa.py -f target | anew -q .tmp/url_extract_tmp.txt" &>/dev/null
fi
@@ -2017,7 +2069,8 @@ function jschecks() {
if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $JSCHECKS == true ]]; then
start_func ${FUNCNAME[0]} "Javascript Scan"
if [[ -s ".tmp/url_extract_js.txt" ]]; then
- printf "${yellow} Running : Fetching Urls 1/5${reset}\n"
+
+ printf "${yellow} Running : Fetching Urls 1/6${reset}\n"
if [[ $AXIOM != true ]]; then
cat .tmp/url_extract_js.txt | subjs -ua "Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" -c 40 | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subjslinks.txt
else
@@ -2026,31 +2079,36 @@ function jschecks() {
[ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | egrep -iv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)" | anew -q js/nojs_links.txt
[ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | grep -iE "\.js($|\?)" | anew -q .tmp/url_extract_js.txt
cat .tmp/url_extract_js.txt | python3 ${tools}/urless/urless/urless.py | anew -q js/url_extract_js.txt 2>>"$LOGFILE" >/dev/null
- printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Resolving JS Urls 2/5${reset}\n"
+
+ printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Resolving JS Urls 2/6${reset}\n"
if [[ $AXIOM != true ]]; then
[ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -status-code -content-type -retries 2 -no-color | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
else
[ -s "js/url_extract_js.txt" ] && axiom-scan js/url_extract_js.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -content-type -retries 2 -no-color -o .tmp/js_livelinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
[ -s ".tmp/js_livelinks.txt" ] && cat .tmp/js_livelinks.txt | anew .tmp/web_full_info.txt | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
fi
- printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Gathering endpoints 3/5${reset}\n"
+
+ printf "${yellow} Running : Extracting JS from sourcemaps 3/6${reset}\n"
+ mkdir -p .tmp/sourcemapper
+ [ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "sourcemapper -jsurl '_target_' -output _output_/_cleantarget_" -o .tmp/sourcemapper 2>>"$LOGFILE" >/dev/null
+ [ -s ".tmp/url_extract_jsmap.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "sourcemapper -url '_target_' -output _output_/_cleantarget_" -o .tmp/sourcemapper 2>>"$LOGFILE" >/dev/null
+
+ printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Gathering endpoints 4/6${reset}\n"
[ -s "js/js_livelinks.txt" ] && xnLinkFinder -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d $XNLINKFINDER_DEPTH -o .tmp/js_endpoints.txt 2>>"$LOGFILE" >/dev/null
+ find .tmp/sourcemapper/ \( -name "*.js" -o -name "*.ts" \) -type f | jsluice urls | jq -r .url | anew -q .tmp/js_endpoints.txt
[ -s "parameters.txt" ] && rm -f parameters.txt 2>>"$LOGFILE" >/dev/null
if [[ -s ".tmp/js_endpoints.txt" ]]; then
sed -i '/^\//!d' .tmp/js_endpoints.txt
cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt
fi
- printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Gathering secrets 4/5${reset}\n"
- if [[ $AXIOM != true ]]; then
- [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | mantra -ua ${HEADER} -s | anew -q js/js_secrets.txt
- [ -s "js/js_secrets.txt" ] && trufflehog filesystem js/js_secrets.txt --only-verified -j 2>/dev/null | jq -c | anew -q js/js_secrets_trufflehog.txt
- else
- [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua \"${HEADER}\" -s -o js/js_secrets.txt $AXIOM_EXTRA_ARGS &>/dev/null
- [ -s "js/js_secrets.txt" ] && trufflehog filesystem js/js_secrets.txt --only-verified -j 2>/dev/null | jq -c | anew -q js/js_secrets_trufflehog.txt
- fi
+ printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Gathering secrets 5/6${reset}\n"
+ [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua \"${HEADER}\" -s -o js/js_secrets.txt $AXIOM_EXTRA_ARGS &>/dev/null
+ [ -s "js/js_secrets.txt" ] && trufflehog filesystem js/js_secrets.txt -j 2>/dev/null | jq -c | anew -q js/js_secrets_trufflehog.txt
+ [ -s "js/js_secrets.txt" ] && trufflehog filesystem .tmp/sourcemapper/ -j 2>/dev/null | jq -c | anew -q js/js_secrets_trufflehog.txt
[ -s "js/js_secrets.txt" ] && sed -r "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2};?)?)?[mGK]//g" -i js/js_secrets.txt
- printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Building wordlist 5/5${reset}\n"
+
+ printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Building wordlist 6/6${reset}\n"
[ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "python3 ${tools}/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" >/dev/null
end_func "Results are saved in $domain/js folder" ${FUNCNAME[0]}
else