From f05bb592e5bc7cc7999ee19406dba908d62eea78 Mon Sep 17 00:00:00 2001 From: six2dez Date: Tue, 11 May 2021 09:07:14 +0200 Subject: [PATCH 01/32] Axiom init control && smal fixes --- install.sh | 2 +- reconftw.cfg | 20 ++++++++++---------- reconftw_axiom.sh | 19 ++++++++++++++++--- 3 files changed, 27 insertions(+), 14 deletions(-) diff --git a/install.sh b/install.sh index 00d58846..f7b5859d 100755 --- a/install.sh +++ b/install.sh @@ -289,6 +289,6 @@ eval h8mail -g $DEBUG_STD ## Stripping all Go binaries eval strip -s $HOME/go/bin/* $DEBUG_STD -printf "${yellow} Remember set your api keys:\n - amass (~/.config/amass/config.ini)\n - subfinder (~/.config/subfinder/config.yaml)\n - GitHub (~/Tools/.github_tokens)\n - SHODAN (SHODAN_API_KEY in reconftw.cfg)\n - SSRF Server (COLLAB_SERVER in reconftw.cfg) \n - Blind XSS Server (XSS_SERVER in reconftw.cfg) \n - theHarvester (~/Tools/theHarvester/api-keys.yml)\n - H8mail (~/Tools/h8mail_config.ini)\n\n${reset}" +printf "${yellow} Remember set your api keys:\n - amass (~/.config/amass/config.ini)\n - subfinder (~/.config/subfinder/config.yaml)\n - GitHub (~/Tools/.github_tokens)\n - SHODAN (SHODAN_API_KEY in reconftw.cfg)\n - SSRF Server (COLLAB_SERVER in reconftw.cfg) \n - Blind XSS Server (XSS_SERVER in reconftw.cfg) \n - notify (~/.config/notify/notify.conf) \n - theHarvester (~/Tools/theHarvester/api-keys.yml)\n - H8mail (~/Tools/h8mail_config.ini)\n\n${reset}" printf "${bgreen} Finished!${reset}\n\n" printf "\n\n${bgreen}#######################################################################${reset}\n" diff --git a/reconftw.cfg b/reconftw.cfg index 32c7ac06..19a023c5 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -2,16 +2,6 @@ # reconFTW config file # ################################################################# -# TERM COLORS -bred='\033[1;31m' -bblue='\033[1;34m' -bgreen='\033[1;32m' -yellow='\033[0;33m' -red='\033[0;31m' -blue='\033[0;34m' -green='\033[0;32m' -reset='\033[0m' - # General values tools=~/Tools SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" @@ -157,3 +147,13 @@ AXIOM_FLEET_REGIONS="" AXIOM_FLEET_SHUTDOWN=true # This is a script on your reconftw host that might prep things your way... #AXIOM_POST_START="$HOME/bin/yourScript" + +# TERM COLORS +bred='\033[1;31m' +bblue='\033[1;34m' +bgreen='\033[1;32m' +yellow='\033[0;33m' +red='\033[0;31m' +blue='\033[0;34m' +green='\033[0;32m' +reset='\033[0m' \ No newline at end of file diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index a87b9338..9e524f99 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -1426,7 +1426,7 @@ function axiom_lauch(){ # if [ -n "$AXIOM_POST_START" ]; then # eval "$AXIOM_POST_START" # fi - end_func "Axiom fleet $AXIOM_FLEET_NAME already has $NUMOFNODES instances" + end_func "Axiom fleet $AXIOM_FLEET_NAME already has $NUMOFNODES instances" # elif [[ $NUMOFNODES -eq 0 ]]; then else if [[ $NUMOFNODES -eq 0 ]]; then @@ -1436,8 +1436,8 @@ function axiom_lauch(){ fi axiom_args=" -i=$startcount " # Temporarily disabled multiple axiom regions - # [ -n "$AXIOM_FLEET_REGIONS" ] && axiom_args="$axiom_args --regions=\"$AXIOM_FLEET_REGIONS\" " - + # [ -n "$AXIOM_FLEET_REGIONS" ] && axiom_args="$axiom_args --regions=\"$AXIOM_FLEET_REGIONS\" " + echo "axiom-fleet $AXIOM_FLEET_NAME $axiom_args" axiom-fleet $AXIOM_FLEET_NAME "$axiom_args" axiom-select "$AXIOM_FLEET_NAME*" @@ -1465,6 +1465,12 @@ function axiom_shutdown(){ } function axiom_selected(){ + + if [[ ! $(axiom-ls | tail -n +2 | sed '$ d' | wc -l) -gt 0 ]]; then + notification "\n\n${bred} No axiom instances running ${reset}\n\n" error + exit + fi + if [[ ! $(cat ~/.axiom/selected.conf | sed '/^\s*$/d' | wc -l) -gt 0 ]]; then notification "\n\n${bred} No axiom instances selected ${reset}\n\n" error exit @@ -1853,12 +1859,19 @@ function multi_recon(){ function subs_menu(){ start + + axiom_lauch + axiom_selected + subdomains_full webprobe_full screenshot subtakeover zonetransfer s3buckets + + axiom_shutdown + end } From 78a9b5e7e2a76e0ef33f088005728edda284b7a8 Mon Sep 17 00:00:00 2001 From: ItsGudEnuf Date: Wed, 12 May 2021 12:08:08 -0500 Subject: [PATCH 02/32] Added long command options & alt config file option --- reconftw_axiom.sh | 312 ++++++++++++++++++++++++++++++---------------- 1 file changed, 207 insertions(+), 105 deletions(-) diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 9e524f99..c6744166 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -1,6 +1,5 @@ #!/usr/bin/env bash -. ./reconftw.cfg function banner(){ printf "\n${bgreen}" @@ -1904,26 +1903,27 @@ function webs_menu(){ function help(){ printf "\n Usage: $0 [-d domain.tld] [-m name] [-l list.txt] [-x oos.txt] [-i in.txt] " - printf "\n [-r] [-s] [-p] [-a] [-w] [-i] [-h] [--deep] [--fs] [-o OUTPUT]\n\n" + printf "\n [-r] [-s] [-p] [-a] [-w] [-n] [-i] [-h] [--deep] [--fs] [-o OUTPUT]\n\n" printf " ${bblue}TARGET OPTIONS${reset}\n" - printf " -d domain.tld Target domain\n" - printf " -m company Target company name\n" - printf " -l list.txt Targets list, one per line\n" - printf " -x oos.txt Exclude subdomains list (Out Of Scope)\n" - printf " -i in.txt Include subdomains list\n" + printf " -d domain.tld Target domain\n" + printf " -m company Target company name\n" + printf " -l list.txt Targets list, one per line\n" + printf " -x oos.txt Exclude subdomains list (Out Of Scope)\n" + printf " -i in.txt Include subdomains list\n" printf " \n" printf " ${bblue}MODE OPTIONS${reset}\n" - printf " -r Recon - Full recon process (only recon without attacks)\n" - printf " -s Subdomains - Search subdomains, check tko and web probe\n" - printf " -p Passive - Performs only passive steps \n" - printf " -a All - Perform all checks and exploitations\n" - printf " -w Web - Just web checks from list provided\n" - printf " -n OSINT - Just checks public intel info\n" - printf " -h Help - Show this help\n" + printf " -r, --recon Recon - Full recon process (only recon without attacks)\n" + printf " -s, --subdomains Subdomains - Search subdomains, check tko and web probe\n" + printf " -p, --passive Passive - Performs only passive steps \n" + printf " -a, --all All - Perform all checks and exploitations\n" + printf " -w, --web Web - Just web checks from list provided\n" + printf " -n, --osint OSINT - Just checks public intel info\n" + printf " -h Help - Show this help\n" printf " \n" printf " ${bblue}GENERAL OPTIONS${reset}\n" - printf " --deep Deep scan (Enable some slow options for deeper scan)\n" - printf " -o output/path Define output folder\n" + printf " -f confile_file Alternate reconftw.cfg file\n" + printf " --deep Deep scan (Enable some slow options for deeper scan)\n" + printf " -o output/path Define output folder\n" printf " \n" printf " ${bblue}USAGE EXAMPLES${reset}\n" printf " Recon:\n" @@ -1942,121 +1942,216 @@ function help(){ printf " ./reconftw.sh -d example.com -x out.txt -a -o custom/path\n" } + ############################################################################################################### ########################################### START SCRIPT ##################################################### ############################################################################################################### -banner +PROGARGS=$(getopt -o 'd:m:l:x:i:o:f:rspawvh::' --long 'domain:,list:,recon,subdomains,passive,all,web,osint,deep,help' -n 'reconFTW' -- "$@") + + +# Note the quotes around "$PROGARGS": they are essential! +eval set -- "$PROGARGS" +unset PROGARGS + +while true; do + case "$1" in + '-d'|'--domain') + domain=$2 + shift 2 + continue + ;; + '-m') + multi=$2 + shift 2 + continue + ;; + '-l'|'--list') + list=$2 + shift 2 + continue + ;; + '-x') + outOfScope_file=$2 + shift 2 + continue + ;; + '-i') + inScope_file=$2 + shift 2 + continue + ;; + + # modes + '-r'|'--recon') + opt_mode='r' + shift + continue + ;; + '-s'|'--subdomains') + opt_mode='s' + shift + continue + ;; + '-p'|'--passive') + opt_mode='p' + shift + continue + ;; + '-a'|'--all') + opt_mode='a' + shift + continue + ;; + '-w'|'--web') + opt_mode='w' + shift + continue + ;; + '-n'|'--osint') + opt_mode='i' + shift + continue + ;; + + # extra stuff + '-o') + dir_output=$2 + output + + shift 2 + continue + ;; + '-f') + config_file=$2 + shift 2 + continue + ;; + '--deep') + opt_deep=true + shift + continue + ;; + + '--') + shift + break + ;; + '--help'| '-h'| *) + # echo "Unknown argument: $1" + banner + help + exit 1 + ;; + + esac +done -check_version -startdir=${PWD} +# This is the first thing to do to read in alternate config +if [ -s "$config_file" ]; then + . "${config_file}" +else + . ./reconftw.cfg +fi -if [ -z "$1" ]; then - help - tools_installed - exit +if [ $opt_deep ]; then + DEEP=true fi -while getopts ":hd:-:l:m:x:i:varnspxwo:" opt; do - general=$@ - if [[ $general == *"--deep"* ]]; then - DEEP=true - fi - case ${opt} in +if [ -n "$outOfScope_file" ]; then + isAsciiText $outOfScope_file + if [ "False" = "$IS_ASCII" ] + then + printf "\n\n${bred} Out of Scope file is not a text file${reset}\n\n" + exit + fi +fi - ## TARGETS +if [ -n "$inScope_file" ]; then + isAsciiText $inScope_file + if [ "False" = "$IS_ASCII" ] + then + printf "\n\n${bred} In Scope file is not a text file${reset}\n\n" + exit + fi +fi - m ) multi=$OPTARG - ;; - d ) domain=$OPTARG - ;; - l ) list=$OPTARG - if [ -n "$list" ]; then - if [[ "$list" = ./* ]]; then - flist="${startdir}/${list:2}" - elif [[ "$list" = ~* ]]; then - flist="${HOME}/${list:2}" - elif [[ "$list" = /* ]]; then - flist=$list - else - flist="$startdir/$list" - fi - else - flist='' - fi - ;; - x ) outOfScope_file=$OPTARG - isAsciiText $outOfScope_file - if [ "False" = "$IS_ASCII" ]; then - printf "\n\n${bred} Out of Scope file is not a text file${reset}\n\n" - exit - fi - ;; - i ) inScope_file=$OPTARG - isAsciiText $inScope_file - if [ "False" = "$IS_ASCII" ]; then - printf "\n\n${bred} In Scope file is not a text file${reset}\n\n" - exit - fi - ;; +startdir=${PWD} - ## MODES +banner + +check_version - r ) if [ -n "$multi" ]; then +startdir=${PWD} +if [ -n "$list" ]; then + if [[ "$list" = ./* ]]; then + flist="${startdir}/${list:2}" + elif [[ "$list" = ~* ]]; then + flist="${HOME}/${list:2}" + elif [[ "$list" = /* ]]; then + flist=$list + else + flist="$startdir/$list" + fi +else + flist='' +fi + +case $opt_mode in + 'r') + if [ -n "$multi" ]; then + mode="multi_recon" multi_recon exit fi if [ -n "$list" ]; then + + mode="list_recon" for domain in $(cat $list); do start recon end done else + mode="recon" start recon end fi - exit - ;; - n ) PRESERVE=true - if [ -n "$multi" ]; then - multi_osint - exit - fi - if [ -n "$list" ]; then + ;; + 's') + if [ -n "$list" ]; then + mode="subs_menu" for domain in $(cat $list); do - start - osint - end + subs_menu done else - start - osint - end + subs_menu fi - exit - ;; - s ) if [ -n "$list" ]; then + ;; + 'p') + if [ -n "$list" ]; then + mode="passive" for domain in $(cat $list); do - subs_menu + passive done else - subs_menu + passive fi - exit - ;; - a ) if [ -n "$list" ]; then + ;; + 'a') + if [ -n "$list" ]; then + mode="all" for domain in $(cat $list); do all done else all fi - exit - ;; - w ) start + ;; + 'w') + start if [ -n "$list" ]; then if [[ "$list" = /* ]]; then cp $list $dir/webs/webs.txt @@ -2066,22 +2161,29 @@ while getopts ":hd:-:l:m:x:i:varnspxwo:" opt; do fi webs_menu exit - ;; - p ) if [ -n "$list" ]; then + ;; + 'i') + PRESERVE=true + if [ -n "$multi" ]; then + multi_osint + exit + fi + if [ -n "$list" ]; then for domain in $(cat $list); do - passive + start + osint + end done else - passive + start + osint + end fi - exit - ;; - o ) dir_output=$OPTARG - output - ;; - \? | h | : | - | * ) - help - ;; - esac -done -shift $((OPTIND -1)) + + ;; + # No mode selected. EXIT! + *) + help + exit 1 + ;; +esac From 4691dbdd7bb53eab7c453010eb7aac6378d7f6db Mon Sep 17 00:00:00 2001 From: ItsGudEnuf Date: Wed, 12 May 2021 12:28:18 -0500 Subject: [PATCH 03/32] Added long command options & alt config file option --- reconftw.sh | 374 ++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 260 insertions(+), 114 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 17c8e07e..aafc95e0 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1,6 +1,5 @@ #!/usr/bin/env bash -. ./reconftw.cfg function banner(){ printf "\n${bgreen}" @@ -1660,6 +1659,7 @@ function multi_recon(){ LOGFILE="${dir}/.log/${NOW}_${NOWT}.txt" touch .log/${NOW}_${NOWT}.txt + [ -n "$flist" ] && LISTTOTAL=$(cat "$flist" | wc -l ) for domain in $targets; do dir=$workdir/targets/$domain called_fn_dir=$dir/.called_fn @@ -1683,7 +1683,13 @@ function multi_recon(){ currently=$(date +"%H:%M:%S") loopend=$(date +%s) getElapsedTime $loopstart $loopend + printf "\n\n${reset}#######################################################################\n" printf "${bgreen} $domain finished 1st loop in ${runtime} $currently ${reset}\n" + if [ -n "$flist" ]; then + POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') + printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" + fi + printf "${reset}#######################################################################\n" done cd "$workdir" || { echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } @@ -1699,7 +1705,13 @@ function multi_recon(){ currently=$(date +"%H:%M:%S") loopend=$(date +%s) getElapsedTime $loopstart $loopend + printf "\n\n${reset}#######################################################################\n" printf "${bgreen} $domain finished 2nd loop in ${runtime} $currently ${reset}\n" + if [ -n "$flist" ]; then + POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') + printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" + fi + printf "${reset}#######################################################################\n\n" done cd "$workdir" || { echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } @@ -1737,18 +1749,33 @@ function multi_recon(){ currently=$(date +"%H:%M:%S") loopend=$(date +%s) getElapsedTime $loopstart $loopend + printf "\n\n${reset}#######################################################################\n" printf "${bgreen} $domain finished 3rd loop in ${runtime} $currently ${reset}\n" + if [ -n "$flist" ]; then + POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') + printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" + fi + printf "${reset}#######################################################################\n\n" done cloudprovider for domain in $targets; do loopstart=$(date +%s) + dir=$workdir/targets/$domain + called_fn_dir=$dir/.called_fn + cd "$dir" || { echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } cms_scanner url_gf wordlist_gen currently=$(date +"%H:%M:%S") loopend=$(date +%s) getElapsedTime $loopstart $loopend + printf "\n\n${reset}#######################################################################\n" printf "${bgreen} $domain finished final loop in ${runtime} $currently ${reset}\n" + if [ -n "$flist" ]; then + POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') + printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" + fi + printf "${reset}#######################################################################\n\n" done cd "$workdir" || { echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } dir=$workdir @@ -1768,54 +1795,55 @@ function subs_menu(){ } function webs_menu(){ - subtakeover - s3buckets - waf_checks - nuclei_check - cms_scanner - fuzz - 4xxbypass - cors - params - urlchecks - url_gf - jschecks - wordlist_gen - open_redirect - ssrf_checks - crlf_checks - lfi - ssti - sqli - xss - spraying - brokenLinks - test_ssl - end + subtakeover + s3buckets + waf_checks + nuclei_check + cms_scanner + fuzz + 4xxbypass + cors + params + urlchecks + url_gf + jschecks + wordlist_gen + open_redirect + ssrf_checks + crlf_checks + lfi + ssti + sqli + xss + spraying + brokenLinks + test_ssl + end } function help(){ printf "\n Usage: $0 [-d domain.tld] [-m name] [-l list.txt] [-x oos.txt] [-i in.txt] " - printf "\n [-r] [-s] [-p] [-a] [-w] [-i] [-h] [--deep] [--fs] [-o OUTPUT]\n\n" + printf "\n [-r] [-s] [-p] [-a] [-w] [-n] [-i] [-h] [--deep] [--fs] [-o OUTPUT]\n\n" printf " ${bblue}TARGET OPTIONS${reset}\n" - printf " -d domain.tld Target domain\n" - printf " -m company Target company name\n" - printf " -l list.txt Targets list, one per line\n" - printf " -x oos.txt Exclude subdomains list (Out Of Scope)\n" - printf " -i in.txt Include subdomains list\n" + printf " -d domain.tld Target domain\n" + printf " -m company Target company name\n" + printf " -l list.txt Targets list, one per line\n" + printf " -x oos.txt Exclude subdomains list (Out Of Scope)\n" + printf " -i in.txt Include subdomains list\n" printf " \n" printf " ${bblue}MODE OPTIONS${reset}\n" - printf " -r Recon - Full recon process (only recon without attacks)\n" - printf " -s Subdomains - Search subdomains, check tko and web probe\n" - printf " -p Passive - Performs only passive steps \n" - printf " -a All - Perform all checks and exploitations\n" - printf " -w Web - Just web checks from list provided\n" - printf " -n OSINT - Just checks public intel info\n" - printf " -h Help - Show this help\n" + printf " -r, --recon Recon - Full recon process (only recon without attacks)\n" + printf " -s, --subdomains Subdomains - Search subdomains, check tko and web probe\n" + printf " -p, --passive Passive - Performs only passive steps \n" + printf " -a, --all All - Perform all checks and exploitations\n" + printf " -w, --web Web - Just web checks from list provided\n" + printf " -n, --osint OSINT - Just checks public intel info\n" + printf " -h Help - Show this help\n" printf " \n" printf " ${bblue}GENERAL OPTIONS${reset}\n" - printf " --deep Deep scan (Enable some slow options for deeper scan)\n" - printf " -o output/path Define output folder\n" + printf " -f confile_file Alternate reconftw.cfg file\n" + printf " --deep Deep scan (Enable some slow options for deeper scan)\n" + printf " -o output/path Define output folder\n" printf " \n" printf " ${bblue}USAGE EXAMPLES${reset}\n" printf " Recon:\n" @@ -1834,105 +1862,216 @@ function help(){ printf " ./reconftw.sh -d example.com -x out.txt -a -o custom/path\n" } + ############################################################################################################### ########################################### START SCRIPT ##################################################### ############################################################################################################### -banner +PROGARGS=$(getopt -o 'd:m:l:x:i:o:f:rspawvh::' --long 'domain:,list:,recon,subdomains,passive,all,web,osint,deep,help' -n 'reconFTW' -- "$@") + + +# Note the quotes around "$PROGARGS": they are essential! +eval set -- "$PROGARGS" +unset PROGARGS + +while true; do + case "$1" in + '-d'|'--domain') + domain=$2 + shift 2 + continue + ;; + '-m') + multi=$2 + shift 2 + continue + ;; + '-l'|'--list') + list=$2 + shift 2 + continue + ;; + '-x') + outOfScope_file=$2 + shift 2 + continue + ;; + '-i') + inScope_file=$2 + shift 2 + continue + ;; + + # modes + '-r'|'--recon') + opt_mode='r' + shift + continue + ;; + '-s'|'--subdomains') + opt_mode='s' + shift + continue + ;; + '-p'|'--passive') + opt_mode='p' + shift + continue + ;; + '-a'|'--all') + opt_mode='a' + shift + continue + ;; + '-w'|'--web') + opt_mode='w' + shift + continue + ;; + '-n'|'--osint') + opt_mode='i' + shift + continue + ;; + + # extra stuff + '-o') + dir_output=$2 + output + + shift 2 + continue + ;; + '-f') + config_file=$2 + shift 2 + continue + ;; + '--deep') + opt_deep=true + shift + continue + ;; + + '--') + shift + break + ;; + '--help'| '-h'| *) + # echo "Unknown argument: $1" + banner + help + exit 1 + ;; + + esac +done -check_version -if [ -z "$1" ]; then - help - tools_installed - exit +# This is the first thing to do to read in alternate config +if [ -s "$config_file" ]; then + . "${config_file}" +else + . ./reconftw.cfg fi -while getopts ":hd:-:l:m:x:i:varnspxwo:" opt; do - general=$@ - if [[ $general == *"--deep"* ]]; then - DEEP=true - fi - case ${opt} in +if [ $opt_deep ]; then + DEEP=true +fi - ## TARGETS +if [ -n "$outOfScope_file" ]; then + isAsciiText $outOfScope_file + if [ "False" = "$IS_ASCII" ] + then + printf "\n\n${bred} Out of Scope file is not a text file${reset}\n\n" + exit + fi +fi - m ) multi=$OPTARG - ;; - d ) domain=$OPTARG - ;; - l ) list=$OPTARG - ;; - x ) outOfScope_file=$OPTARG - isAsciiText $outOfScope_file - if [ "False" = "$IS_ASCII" ]; then - printf "\n\n${bred} Out of Scope file is not a text file${reset}\n\n" - exit - fi - ;; - i ) inScope_file=$OPTARG - isAsciiText $inScope_file - if [ "False" = "$IS_ASCII" ]; then - printf "\n\n${bred} In Scope file is not a text file${reset}\n\n" - exit - fi - ;; +if [ -n "$inScope_file" ]; then + isAsciiText $inScope_file + if [ "False" = "$IS_ASCII" ] + then + printf "\n\n${bred} In Scope file is not a text file${reset}\n\n" + exit + fi +fi + +startdir=${PWD} - ## MODES +banner + +check_version + +startdir=${PWD} +if [ -n "$list" ]; then + if [[ "$list" = ./* ]]; then + flist="${startdir}/${list:2}" + elif [[ "$list" = ~* ]]; then + flist="${HOME}/${list:2}" + elif [[ "$list" = /* ]]; then + flist=$list + else + flist="$startdir/$list" + fi +else + flist='' +fi - r ) if [ -n "$multi" ]; then +case $opt_mode in + 'r') + if [ -n "$multi" ]; then + #mode="multi_recon" multi_recon exit fi if [ -n "$list" ]; then + + #mode="list_recon" for domain in $(cat $list); do start recon end done else + #mode="recon" start recon end fi - exit - ;; - n ) if [ -n "$multi" ]; then - multi_osint - exit - fi - if [ -n "$list" ]; then + ;; + 's') + if [ -n "$list" ]; then + #mode="subs_menu" for domain in $(cat $list); do - start - osint - end + subs_menu done else - start - osint - end + subs_menu fi - exit - ;; - s ) if [ -n "$list" ]; then + ;; + 'p') + if [ -n "$list" ]; then + #mode="passive" for domain in $(cat $list); do - subs_menu + passive done else - subs_menu + passive fi - exit - ;; - a ) if [ -n "$list" ]; then + ;; + 'a') + if [ -n "$list" ]; then + #mode="all" for domain in $(cat $list); do all done else all fi - exit - ;; - w ) start + ;; + 'w') + start if [ -n "$list" ]; then if [[ "$list" = /* ]]; then cp $list $dir/webs/webs.txt @@ -1942,22 +2081,29 @@ while getopts ":hd:-:l:m:x:i:varnspxwo:" opt; do fi webs_menu exit - ;; - p ) if [ -n "$list" ]; then + ;; + 'i') + PRESERVE=true + if [ -n "$multi" ]; then + multi_osint + exit + fi + if [ -n "$list" ]; then for domain in $(cat $list); do - passive + start + osint + end done else - passive + start + osint + end fi - exit - ;; - o ) dir_output=$OPTARG - output - ;; - \? | h | : | - | * ) - help - ;; - esac -done -shift $((OPTIND -1)) + + ;; + # No mode selected. EXIT! + *) + help + exit 1 + ;; +esac From fb0682fbb74d065dd889a49aafd9b1f7647126cd Mon Sep 17 00:00:00 2001 From: six2dez Date: Thu, 13 May 2021 09:30:42 +0200 Subject: [PATCH 04/32] Unimap added, sudoers advice and small fixes --- install.sh | 32 ++++++++++++++++++++--------- reconftw.sh | 52 +++++++++++++++++++++++++---------------------- reconftw_axiom.sh | 2 +- 3 files changed, 51 insertions(+), 35 deletions(-) diff --git a/install.sh b/install.sh index f7b5859d..54c26606 100755 --- a/install.sh +++ b/install.sh @@ -65,15 +65,22 @@ else IS_ARM="False"; fi +printf "\n\n${bgreen}#######################################################################${reset}\n" +printf "${bgreen} reconFTW installer/updater script ${reset}\n\n" +printf "${yellow} This may take time. So, go grab a coffee! ${reset}\n\n" + if [[ $(id -u | grep -o '^0$') == "0" ]]; then SUDO=" " else + if sudo -n false 2>/dev/null; then + printf "${bred} Is strongly recommended to add your user to sudoers${reset}\n" + printf "${bred} This will avoid prompts for sudo password in the middle of the installation${reset}\n" + printf "${bred} And more important, in the middle of the scan (needed for nmap SYN scan)${reset}\n\n" + printf "${bred} echo \"${USERNAME} ALL=(ALL:ALL) NOPASSWD: ALL\" > /etc/sudoers.d/reconFTW${reset}\n\n" + fi SUDO="sudo" fi -printf "\n\n${bgreen}#######################################################################${reset}\n" -printf "${bgreen} reconFTW installer/updater script ${reset}\n\n" -printf "${yellow} This may take time. So, go grab a coffee! ${reset}\n\n" install_apt(){ eval $SUDO apt update -y $DEBUG_STD eval $SUDO apt install chromium-browser -y $DEBUG_STD @@ -222,18 +229,21 @@ done if [ "True" = "$IS_ARM" ] then eval wget -N -c https://github.com/Findomain/Findomain/releases/latest/download/findomain-rpi $DEBUG_STD - eval $SUDO mv findomain-rpi /usr/local/bin/findomain + eval $SUDO mv findomain-rpi /usr/bin/findomain else eval wget -N -c https://github.com/Findomain/Findomain/releases/latest/download/findomain-linux $DEBUG_STD eval wget -N -c https://github.com/sensepost/gowitness/releases/download/2.3.4/gowitness-2.3.4-linux-amd64 $DEBUG_STD eval wget -N -c https://github.com/codingo/DNSCewl/raw/master/DNScewl $DEBUG_STD - eval $SUDO mv DNScewl /usr/local/bin/DNScewl - eval $SUDO mv gowitness-2.3.4-linux-amd64 /usr/local/bin/gowitness - eval $SUDO mv findomain-linux /usr/local/bin/findomain + eval wget -N -c https://github.com/Edu4rdSHL/unimap/releases/download/0.4.0/unimap-linux $DEBUG_STD + eval $SUDO mv DNScewl /usr/bin/DNScewl + eval $SUDO mv gowitness-2.3.4-linux-amd64 /usr/bin/gowitness + eval $SUDO mv findomain-linux /usr/bin/findomain + eval $SUDO mv unimap-linux /usr/bin/unimap fi -eval $SUDO chmod 755 /usr/local/bin/findomain -eval $SUDO chmod 755 /usr/local/bin/gowitness -eval $SUDO chmod 755 /usr/local/bin/DNScewl +eval $SUDO chmod 755 /usr/bin/findomain +eval $SUDO chmod 755 /usr/bin/gowitness +eval $SUDO chmod 755 /usr/bin/DNScewl +eval $SUDO chmod 755 /usr/bin/unimap eval subfinder $DEBUG_STD eval subfinder $DEBUG_STD @@ -289,6 +299,8 @@ eval h8mail -g $DEBUG_STD ## Stripping all Go binaries eval strip -s $HOME/go/bin/* $DEBUG_STD +eval $SUDO cp $HOME/go/bin/* /usr/bin $DEBUG_STD + printf "${yellow} Remember set your api keys:\n - amass (~/.config/amass/config.ini)\n - subfinder (~/.config/subfinder/config.yaml)\n - GitHub (~/Tools/.github_tokens)\n - SHODAN (SHODAN_API_KEY in reconftw.cfg)\n - SSRF Server (COLLAB_SERVER in reconftw.cfg) \n - Blind XSS Server (XSS_SERVER in reconftw.cfg) \n - notify (~/.config/notify/notify.conf) \n - theHarvester (~/Tools/theHarvester/api-keys.yml)\n - H8mail (~/Tools/h8mail_config.ini)\n\n${reset}" printf "${bgreen} Finished!${reset}\n\n" printf "\n\n${bgreen}#######################################################################${reset}\n" diff --git a/reconftw.sh b/reconftw.sh index aafc95e0..c14a95bc 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -325,7 +325,7 @@ function sub_crt(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBCRT" = true ]; then start_subfunc "Running : Crtsh Subdomain Enumeration" python3 $tools/ctfr/ctfr.py -d $domain -o .tmp/crtsh_subs_tmp.txt &>>"$LOGFILE" - curl "https://tls.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r .Results[] 2>>"$LOGFILE" | cut -d ',' -f3 | grep -F ".$domain" | anew -q .tmp/crtsh_subs.txt + curl "https://tls.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r .Results[] 2>>"$LOGFILE" | cut -d ',' -f3 | grep -F ".$domain" | anew -q .tmp/crtsh_subs_tmp.txt curl "https://dns.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r '.FDNS_A'[],'.RDNS'[] 2>>"$LOGFILE" | cut -d ',' -f2 | grep -F ".$domain" | anew -q .tmp/crtsh_subs_tmp.txt NUMOFLINES=$(cat .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" | anew .tmp/crtsh_subs.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (cert transparency)" ${FUNCNAME[0]} @@ -599,12 +599,14 @@ function webprobe_full(){ if ([ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]) && [ "$WEBPROBEFULL" = true ]; then start_func "Http probing non standard ports" - timeout_secs=$(($(cat subdomains/subdomains.txt | wc -l)*5+10)) + sudo unimap --fast-scan -f subdomains/subdomains.txt --ports $UNCOMMON_PORTS_WEB -q -k --url-output | anew -q .tmp/nmap_uncommonweb.txt + cat .tmp/nmap_uncommonweb.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout 10 -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain" | anew -q .tmp/probed_uncommon_ports_tmp.txt - cat subdomains/subdomains.txt | timeout $timeout_secs naabu -p $UNCOMMON_PORTS_WEB -o .tmp/nmap_uncommonweb.txt &>>"$LOGFILE" && uncommon_ports_checked=$(cat .tmp/nmap_uncommonweb.txt | cut -d ':' -f2 | sort -u | sed -e 'H;${x;s/\n/,/g;s/^,//;p;};d') - if [ -n "$uncommon_ports_checked" ]; then - cat subdomains/subdomains.txt | httpx -ports $uncommon_ports_checked -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout 10 -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain" | anew -q .tmp/probed_uncommon_ports_tmp.txt - fi + #timeout_secs=$(($(cat subdomains/subdomains.txt | wc -l)*5+10)) + #cat subdomains/subdomains.txt | timeout $timeout_secs naabu -p $UNCOMMON_PORTS_WEB -o .tmp/nmap_uncommonweb.txt &>>"$LOGFILE" && uncommon_ports_checked=$(cat .tmp/nmap_uncommonweb.txt | cut -d ':' -f2 | sort -u | sed -e 'H;${x;s/\n/,/g;s/^,//;p;};d') + #if [ -n "$uncommon_ports_checked" ]; then + #cat subdomains/subdomains.txt | httpx -ports $uncommon_ports_checked -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout 10 -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain" | anew -q .tmp/probed_uncommon_ports_tmp.txt + #fi NUMOFLINES=$(cat .tmp/probed_uncommon_ports_tmp.txt 2>>"$LOGFILE" | anew webs/webs_uncommon_ports.txt | wc -l) notification "Uncommon web ports: ${NUMOFLINES} new websites" good cat webs/webs_uncommon_ports.txt 2>>"$LOGFILE" @@ -682,7 +684,7 @@ function portscan(){ done fi if [ "$PORTSCAN_ACTIVE" = true ]; then - nmap --top-ports 1000 -sV -n --max-retries 2 -Pn -iL .tmp/ips_nowaf.txt -oN hosts/portscan_active.txt -oG .tmp/nmap_grep.gnmap &>>"$LOGFILE" + sudo nmap --top-ports 1000 -sV -n --max-retries 2 -Pn -iL .tmp/ips_nowaf.txt -oN hosts/portscan_active.txt -oG .tmp/nmap_grep.gnmap &>>"$LOGFILE" fi end_func "Results are saved in hosts/portscan_[passive|active].txt" ${FUNCNAME[0]} else @@ -953,7 +955,7 @@ function jschecks(){ fi if [ -s ".tmp/js_endpoints.txt" ]; then sed -i '/^\//!d' .tmp/js_endpoints.txt - cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt.txt + cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt fi printf "${yellow} Running : Gathering secrets 4/5${reset}\n" if [ -s "js/js_livelinks.txt" ]; then @@ -1572,6 +1574,7 @@ function multi_osint(){ fi if [ -s "$list" ]; then + sed -i 's/\r$//' $list targets=$(cat $list) else notification "Target list not provided" error @@ -1642,6 +1645,7 @@ function multi_recon(){ fi if [ -s "$list" ]; then + sed -i 's/\r$//' $list targets=$(cat $list) else notification "Target list not provided" error @@ -1823,7 +1827,7 @@ function webs_menu(){ function help(){ printf "\n Usage: $0 [-d domain.tld] [-m name] [-l list.txt] [-x oos.txt] [-i in.txt] " - printf "\n [-r] [-s] [-p] [-a] [-w] [-n] [-i] [-h] [--deep] [--fs] [-o OUTPUT]\n\n" + printf "\n [-r] [-s] [-p] [-a] [-w] [-n] [-i] [-h] [-f] [--deep] [-o OUTPUT]\n\n" printf " ${bblue}TARGET OPTIONS${reset}\n" printf " -d domain.tld Target domain\n" printf " -m company Target company name\n" @@ -1841,7 +1845,7 @@ function help(){ printf " -h Help - Show this help\n" printf " \n" printf " ${bblue}GENERAL OPTIONS${reset}\n" - printf " -f confile_file Alternate reconftw.cfg file\n" + printf " -f confile_file Alternate reconftw.cfg file\n" printf " --deep Deep scan (Enable some slow options for deeper scan)\n" printf " -o output/path Define output folder\n" printf " \n" @@ -1895,7 +1899,7 @@ while true; do outOfScope_file=$2 shift 2 continue - ;; + ;; '-i') inScope_file=$2 shift 2 @@ -1929,10 +1933,10 @@ while true; do continue ;; '-n'|'--osint') - opt_mode='i' + opt_mode='n' shift continue - ;; + ;; # extra stuff '-o') @@ -1941,18 +1945,17 @@ while true; do shift 2 continue - ;; + ;; '-f') config_file=$2 shift 2 continue - ;; + ;; '--deep') opt_deep=true shift continue ;; - '--') shift break @@ -1963,12 +1966,10 @@ while true; do help exit 1 ;; - esac done - -# This is the first thing to do to read in alternate config +# This is the first thing to do to read in alternate config if [ -s "$config_file" ]; then . "${config_file}" else @@ -2026,8 +2027,8 @@ case $opt_mode in exit fi if [ -n "$list" ]; then - #mode="list_recon" + sed -i 's/\r$//' $list for domain in $(cat $list); do start recon @@ -2043,6 +2044,7 @@ case $opt_mode in 's') if [ -n "$list" ]; then #mode="subs_menu" + sed -i 's/\r$//' $list for domain in $(cat $list); do subs_menu done @@ -2053,6 +2055,7 @@ case $opt_mode in 'p') if [ -n "$list" ]; then #mode="passive" + sed -i 's/\r$//' $list for domain in $(cat $list); do passive done @@ -2063,6 +2066,7 @@ case $opt_mode in 'a') if [ -n "$list" ]; then #mode="all" + sed -i 's/\r$//' $list for domain in $(cat $list); do all done @@ -2085,10 +2089,11 @@ case $opt_mode in 'i') PRESERVE=true if [ -n "$multi" ]; then - multi_osint + multi_osint exit fi if [ -n "$list" ]; then + sed -i 's/\r$//' $list for domain in $(cat $list); do start osint @@ -2099,11 +2104,10 @@ case $opt_mode in osint end fi - - ;; + ;; # No mode selected. EXIT! *) help exit 1 ;; -esac +esac \ No newline at end of file diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index c6744166..7568c1e9 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -954,7 +954,7 @@ function jschecks(){ fi if [ -s ".tmp/js_endpoints.txt" ]; then sed -i '/^\//!d' .tmp/js_endpoints.txt - cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt.txt + cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt fi printf "${yellow} Running : Gathering secrets 4/5${reset}\n" if [ -s "js/js_livelinks.txt" ]; then From 47047caf639bcf5bd97dda2a832fa9f849ca29f0 Mon Sep 17 00:00:00 2001 From: six2dez Date: Thu, 13 May 2021 10:16:07 +0200 Subject: [PATCH 05/32] Sync changes, unimap_axiom and s3scanner_axiom --- reconftw.sh | 111 +++++++++++++++++++++++---------------------- reconftw_axiom.sh | 112 +++++++++++++++++++++++++--------------------- 2 files changed, 118 insertions(+), 105 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index c14a95bc..54178fc4 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -95,7 +95,7 @@ function tools_installed(){ type -P notify &>/dev/null || { printf "${bred} [*] notify [NO]${reset}\n${reset}"; allinstalled=false;} type -P dalfox &>/dev/null || { printf "${bred} [*] dalfox [NO]${reset}\n${reset}"; allinstalled=false;} type -P puredns &>/dev/null || { printf "${bred} [*] puredns [NO]${reset}\n${reset}"; allinstalled=false;} - type -P naabu &>/dev/null || { printf "${bred} [*] naabu [NO]${reset}\n${reset}"; allinstalled=false;} + type -P unimap &>/dev/null || { printf "${bred} [*] unimap [NO]${reset}\n${reset}"; allinstalled=false;} if [ "${allinstalled}" = true ]; then printf "${bgreen} Good! All installed! ${reset}\n\n" @@ -124,7 +124,7 @@ function google_dorks(){ if [ "$GOOGLE_DORKS" = false ] || [ "$OSINT" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} are already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} are already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -147,7 +147,7 @@ function github_dorks(){ if [ "$GITHUB_DORKS" = false ] || [ "$OSINT" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -163,7 +163,7 @@ function metadata(){ if [ "$METADATA" = false ] || [ "$OSINT" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -199,7 +199,7 @@ function emails(){ if [ "$EMAILS" = false ] || [ "$OSINT" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi @@ -238,7 +238,7 @@ function domain_info(){ if [ "$DOMAIN_INFO" = false ] || [ "$OSINT" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -317,7 +317,7 @@ function sub_passive(){ NUMOFLINES=$(cat .tmp/*_psub.txt 2>>"$LOGFILE" | sed "s/*.//" | anew .tmp/passive_subs.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (passive)" ${FUNCNAME[0]} else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi } @@ -333,7 +333,7 @@ function sub_crt(){ if [ "$SUBCRT" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -351,7 +351,7 @@ function sub_active(){ NUMOFLINES=$(cat .tmp/subdomains_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (active resolution)" ${FUNCNAME[0]} else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi } @@ -364,7 +364,7 @@ function sub_dns(){ NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]} else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi } @@ -385,7 +385,7 @@ function sub_brute(){ if [ "$SUBBRUTE" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -416,7 +416,7 @@ function sub_scraping(){ if [ "$SUBSCRAPING" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -474,7 +474,7 @@ function sub_permut(){ if [ "$SUBPERMUTE" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -504,7 +504,7 @@ function sub_recursive(){ if [ "$SUBRECURSIVE" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -523,7 +523,7 @@ function subtakeover(){ if [ "$SUBTAKEOVER" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -540,7 +540,7 @@ function zonetransfer(){ if [ "$ZONETRANSFER" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -548,7 +548,7 @@ function zonetransfer(){ function s3buckets(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$S3BUCKETS" = true ]; then start_func "AWS S3 buckets search" - s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | grep -iv "not_exist" | anew -q .tmp/s3buckets.txt + s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | grep -iv "not_exist" | grep -iv "Warning:" | anew -q .tmp/s3buckets.txt NUMOFLINES=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | anew subdomains/s3buckets.txt | wc -l) if [ "$NUMOFLINES" -gt 0 ]; then notification "${NUMOFLINES} new S3 buckets found" info @@ -558,7 +558,7 @@ function s3buckets(){ if [ "$S3BUCKETS" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -590,17 +590,17 @@ function webprobe_simple(){ if [ "$WEBPROBESIMPLE" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } function webprobe_full(){ - if ([ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]) && [ "$WEBPROBEFULL" = true ]; then + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBPROBEFULL" = true ]; then start_func "Http probing non standard ports" sudo unimap --fast-scan -f subdomains/subdomains.txt --ports $UNCOMMON_PORTS_WEB -q -k --url-output | anew -q .tmp/nmap_uncommonweb.txt - cat .tmp/nmap_uncommonweb.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout 10 -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain" | anew -q .tmp/probed_uncommon_ports_tmp.txt + cat .tmp/nmap_uncommonweb.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain" | anew -q .tmp/probed_uncommon_ports_tmp.txt #timeout_secs=$(($(cat subdomains/subdomains.txt | wc -l)*5+10)) #cat subdomains/subdomains.txt | timeout $timeout_secs naabu -p $UNCOMMON_PORTS_WEB -o .tmp/nmap_uncommonweb.txt &>>"$LOGFILE" && uncommon_ports_checked=$(cat .tmp/nmap_uncommonweb.txt | cut -d ':' -f2 | sort -u | sed -e 'H;${x;s/\n/,/g;s/^,//;p;};d') @@ -619,7 +619,7 @@ function webprobe_full(){ if [ "$WEBPROBEFULL" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -634,7 +634,7 @@ function screenshot(){ if [ "$WEBSCREENSHOT" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -661,7 +661,7 @@ function favicon(){ if [ "$FAVICON" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -691,7 +691,7 @@ function portscan(){ if [ "$PORTSCANNER" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -707,7 +707,7 @@ function cloudprovider(){ if [ "$CLOUD_IP" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -736,7 +736,7 @@ function waf_checks(){ if [ "$WAF" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -762,7 +762,7 @@ function nuclei_check(){ if [ "$NUCLEICHECK" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -787,7 +787,7 @@ function fuzz(){ if [ "$FUZZ" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -826,7 +826,7 @@ function cms_scanner(){ if [ "$CMS_SCANNER" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -856,7 +856,7 @@ function params(){ if [ "$PARAMS" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -892,7 +892,7 @@ function urlchecks(){ ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url &>>"$LOGFILE" fi else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi } @@ -914,7 +914,7 @@ function url_gf(){ if [ "$URL_GF" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -935,7 +935,7 @@ function url_ext(){ if [ "$URL_EXT" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -959,7 +959,7 @@ function jschecks(){ fi printf "${yellow} Running : Gathering secrets 4/5${reset}\n" if [ -s "js/js_livelinks.txt" ]; then - cat js/js_livelinks.txt | nuclei -silent -t ~/nuclei-templates/exposures/ -r $resolvers_trusted -o js/js_secrets.txt &>>"$LOGFILE" + cat js/js_livelinks.txt | nuclei -silent -t ~/nuclei-templates/exposures/tokens/ -r $resolvers_trusted -o js/js_secrets.txt &>>"$LOGFILE" fi printf "${yellow} Running : Building wordlist 5/5${reset}\n" if [ -s "js/js_livelinks.txt" ]; then @@ -973,7 +973,7 @@ function jschecks(){ if [ "$JSCHECKS" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -993,7 +993,7 @@ function wordlist_gen(){ if [ "$WORDLIST" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1021,7 +1021,7 @@ function brokenLinks(){ if [ "$BROKENLINKS" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1056,7 +1056,7 @@ function xss(){ elif [ ! -s "gf/xss.txt" ]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to XSS ${reset}\n\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1071,7 +1071,7 @@ function cors(){ if [ "$CORS" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1101,7 +1101,7 @@ function open_redirect(){ elif [ ! -s "gf/redirect.txt" ]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to Open Redirect ${reset}\n\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1146,7 +1146,7 @@ function ssrf_checks(){ elif [ ! -s "gf/ssrf.txt" ]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to SSRF ${reset}\n\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1160,7 +1160,7 @@ function crlf_checks(){ if [ "$CRLF_CHECKS" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1179,7 +1179,7 @@ function lfi(){ elif [ ! -s "gf/lfi.txt" ]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to LFI ${reset}\n\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1198,7 +1198,7 @@ function ssti(){ elif [ ! -s "gf/ssti.txt" ]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to SSTI ${reset}\n\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1215,7 +1215,7 @@ function sqli(){ elif [ ! -s "gf/sqli.txt" ]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to SQLi ${reset}\n\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1229,7 +1229,7 @@ function test_ssl(){ if [ "$TEST_SSL" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1245,7 +1245,7 @@ function spraying(){ if [ "$SPRAY" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1264,7 +1264,7 @@ function 4xxbypass(){ if [ "$BYPASSER4XX" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } @@ -1487,8 +1487,10 @@ function start(){ } function end(){ - find $dir -type f -empty | grep -v "called_fn" | xargs rm -f &>/dev/null - find $dir -type d -empty | grep -v "called_fn" | xargs rm -rf &>/dev/null + if [ ! "$PRESERVE" = true ]; then + find $dir -type f -empty | grep -v "called_fn" | xargs rm -f &>/dev/null + find $dir -type d -empty | grep -v "called_fn" | xargs rm -rf &>/dev/null + fi if [ "$REMOVETMP" = true ]; then rm -rf $dir/.tmp @@ -1664,6 +1666,7 @@ function multi_recon(){ touch .log/${NOW}_${NOWT}.txt [ -n "$flist" ] && LISTTOTAL=$(cat "$flist" | wc -l ) + for domain in $targets; do dir=$workdir/targets/$domain called_fn_dir=$dir/.called_fn @@ -1742,6 +1745,7 @@ function multi_recon(){ waf_checks nuclei_check for domain in $targets; do + loopstart=$(date +%s) dir=$workdir/targets/$domain called_fn_dir=$dir/.called_fn cd "$dir" || { echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } @@ -1956,6 +1960,7 @@ while true; do shift continue ;; + '--') shift break @@ -2086,7 +2091,7 @@ case $opt_mode in webs_menu exit ;; - 'i') + 'n') PRESERVE=true if [ -n "$multi" ]; then multi_osint @@ -2110,4 +2115,4 @@ case $opt_mode in help exit 1 ;; -esac \ No newline at end of file +esac diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 7568c1e9..ae60d5ee 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -95,7 +95,7 @@ function tools_installed(){ type -P notify &>/dev/null || { printf "${bred} [*] notify [NO]${reset}\n${reset}"; allinstalled=false;} type -P dalfox &>/dev/null || { printf "${bred} [*] dalfox [NO]${reset}\n${reset}"; allinstalled=false;} type -P puredns &>/dev/null || { printf "${bred} [*] puredns [NO]${reset}\n${reset}"; allinstalled=false;} - type -P naabu &>/dev/null || { printf "${bred} [*] naabu [NO]${reset}\n${reset}"; allinstalled=false;} + type -P unimap &>/dev/null || { printf "${bred} [*] unimap [NO]${reset}\n${reset}"; allinstalled=false;} type -P axiom-ls &>/dev/null || { printf "${bred} [*] axiom [NO]${reset}\n${reset}"; allinstalled=false;} if [ "${allinstalled}" = true ]; then @@ -135,9 +135,9 @@ function github_dorks(){ start_func "Github Dorks in process" if [ -s "${GITHUB_TOKENS}" ]; then if [ "$DEEP" = true ]; then - python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -d "$tools/GitDorker/Dorks/alldorksv3" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt &>>"$LOGFILE" + python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -ri -d "$tools/GitDorker/Dorks/alldorksv3" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt &>>"$LOGFILE" else - python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -d "$tools/GitDorker/Dorks/medium_dorks.txt" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt &>>"$LOGFILE" + python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -ri -d "$tools/GitDorker/Dorks/medium_dorks.txt" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt &>>"$LOGFILE" fi sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" osint/gitdorks.txt else @@ -327,7 +327,7 @@ function sub_crt(){ start_subfunc "Running : Crtsh Subdomain Enumeration" echo "python3 -u /home/op/recon/ctfr/ctfr.py -d ${domain} -o ${domain}_ctfr.txt; cat ${domain}_ctfr.txt" > .tmp/sub_ctrf_commands.txt axiom-scan .tmp/sub_ctrf_commands.txt -m exec -o .tmp/crtsh_subs_tmp.txt &>>"$LOGFILE" - curl "https://tls.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r .Results[] 2>>"$LOGFILE" | cut -d ',' -f3 | grep -F ".$domain" | anew -q .tmp/crtsh_subs.txt + curl "https://tls.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r .Results[] 2>>"$LOGFILE" | cut -d ',' -f3 | grep -F ".$domain" | anew -q .tmp/crtsh_subs_tmp.txt curl "https://dns.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r '.FDNS_A'[],'.RDNS'[] 2>>"$LOGFILE" | cut -d ',' -f2 | grep -F ".$domain" | anew -q .tmp/crtsh_subs_tmp.txt NUMOFLINES=$(cat .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" | anew .tmp/crtsh_subs.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (cert transparency)" ${FUNCNAME[0]} @@ -551,8 +551,8 @@ function zonetransfer(){ function s3buckets(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$S3BUCKETS" = true ]; then start_func "AWS S3 buckets search" - s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | grep -iv "not_exist" | anew -q .tmp/s3buckets.txt - #axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets.txt &>>"$LOGFILE" + axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt &>>"$LOGFILE" + cat .tmp/s3buckets_tmp.txt | grep -iv "not_exist" | grep -iv "Warning:" | anew -q .tmp/s3buckets.txt NUMOFLINES=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | anew subdomains/s3buckets.txt | wc -l) if [ "$NUMOFLINES" -gt 0 ]; then notification "${NUMOFLINES} new S3 buckets found" info @@ -602,10 +602,14 @@ function webprobe_simple(){ function webprobe_full(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBPROBEFULL" = true ]; then start_func "Http probing non standard ports" - axiom-scan subdomains/subdomains.txt -m naabu -p $UNCOMMON_PORTS_WEB -o .tmp/nmap_uncommonweb.txt &>>"$LOGFILE" && uncommon_ports_checked=$(cat .tmp/nmap_uncommonweb.txt | cut -d ':' -f2 | sort -u | sed -e 'H;${x;s/\n/,/g;s/^,//;p;};d') - if [ -n "$uncommon_ports_checked" ]; then - axiom-scan subdomains/subdomains.txt -m httpx -ports $uncommon_ports_checked -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout 10 -silent -retries 2 -no-color -o .tmp/probed_uncommon_ports_tmp_.txt &>>"$LOGFILE" && cat .tmp/probed_uncommon_ports_tmp_.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_uncommon_ports_tmp.txt - fi + + axiom-scan subdomains/subdomains.txt -m unimap --fast-scan --ports $UNCOMMON_PORTS_WEB -q -k --url-output -o .tmp/nmap_uncommonweb.txt + axiom-scan .tmp/nmap_uncommonweb.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_uncommon_ports_tmp_.txt &>>"$LOGFILE" && cat .tmp/probed_uncommon_ports_tmp_.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_uncommon_ports_tmp.txt + + #axiom-scan subdomains/subdomains.txt -m naabu -p $UNCOMMON_PORTS_WEB -o .tmp/nmap_uncommonweb.txt &>>"$LOGFILE" && uncommon_ports_checked=$(cat .tmp/nmap_uncommonweb.txt | cut -d ':' -f2 | sort -u | sed -e 'H;${x;s/\n/,/g;s/^,//;p;};d') + #if [ -n "$uncommon_ports_checked" ]; then + #axiom-scan subdomains/subdomains.txt -m httpx -ports $uncommon_ports_checked -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout 10 -silent -retries 2 -no-color -o .tmp/probed_uncommon_ports_tmp_.txt &>>"$LOGFILE" && cat .tmp/probed_uncommon_ports_tmp_.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_uncommon_ports_tmp.txt + #fi NUMOFLINES=$(cat .tmp/probed_uncommon_ports_tmp.txt 2>>"$LOGFILE" | anew webs/webs_uncommon_ports.txt | wc -l) notification "Uncommon web ports: ${NUMOFLINES} new websites" good cat webs/webs_uncommon_ports.txt 2>>"$LOGFILE" @@ -958,7 +962,7 @@ function jschecks(){ fi printf "${yellow} Running : Gathering secrets 4/5${reset}\n" if [ -s "js/js_livelinks.txt" ]; then - axiom-scan js/js_livelinks.txt -m nuclei -w /home/op/recon/nuclei/exposures/ -r /home/op/lists/resolvers_trusted.txt -o js/js_secrets.txt &>>"$LOGFILE" + axiom-scan js/js_livelinks.txt -m nuclei -w /home/op/recon/nuclei/exposures/tokens/ -r /home/op/lists/resolvers_trusted.txt -o js/js_secrets.txt &>>"$LOGFILE" fi printf "${yellow} Running : Building wordlist 5/5${reset}\n" if [ -s "js/js_livelinks.txt" ]; then @@ -1411,7 +1415,6 @@ function resolvers_update(){ axiom-exec 'wget -O /home/op/lists/resolvers_trusted.txt https://gist.githubusercontent.com/six2dez/ae9ed7e5c786461868abd3f2344401b6/raw' &>/dev/null update_resolvers=false fi - } function axiom_lauch(){ @@ -1633,6 +1636,7 @@ function multi_osint(){ fi if [ -s "$list" ]; then + sed -i 's/\r$//' $list targets=$(cat $list) else notification "Target list not provided" error @@ -1711,6 +1715,7 @@ function multi_recon(){ fi if [ -s "$list" ]; then + sed -i 's/\r$//' $list targets=$(cat $list) else notification "Target list not provided" error @@ -1752,6 +1757,7 @@ function multi_recon(){ favicon currently=$(date +"%H:%M:%S") loopend=$(date +%s) + getElapsedTime $loopstart $loopend printf "\n\n${reset}#######################################################################\n" printf "${bgreen} $domain finished 1st loop in ${runtime} $currently ${reset}\n" if [ -n "$flist" ]; then @@ -1812,8 +1818,9 @@ function multi_recon(){ for domain in $targets; do loopstart=$(date +%s) dir=$workdir/targets/$domain - called_fn_dir=$dir/.called_fn + called_fn_dir=$dir/.called_fn cd "$dir" || { echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + loopstart=$(date +%s) fuzz params urlchecks @@ -1875,35 +1882,35 @@ function subs_menu(){ } function webs_menu(){ - subtakeover - s3buckets - waf_checks - nuclei_check - cms_scanner - fuzz - 4xxbypass - cors - params - urlchecks - url_gf - jschecks - wordlist_gen - open_redirect - ssrf_checks - crlf_checks - lfi - ssti - sqli - xss - spraying - brokenLinks - test_ssl - end + subtakeover + s3buckets + waf_checks + nuclei_check + cms_scanner + fuzz + 4xxbypass + cors + params + urlchecks + url_gf + jschecks + wordlist_gen + open_redirect + ssrf_checks + crlf_checks + lfi + ssti + sqli + xss + spraying + brokenLinks + test_ssl + end } function help(){ printf "\n Usage: $0 [-d domain.tld] [-m name] [-l list.txt] [-x oos.txt] [-i in.txt] " - printf "\n [-r] [-s] [-p] [-a] [-w] [-n] [-i] [-h] [--deep] [--fs] [-o OUTPUT]\n\n" + printf "\n [-r] [-s] [-p] [-a] [-w] [-n] [-i] [-h] [-f] [--deep] [-o OUTPUT]\n\n" printf " ${bblue}TARGET OPTIONS${reset}\n" printf " -d domain.tld Target domain\n" printf " -m company Target company name\n" @@ -1921,7 +1928,7 @@ function help(){ printf " -h Help - Show this help\n" printf " \n" printf " ${bblue}GENERAL OPTIONS${reset}\n" - printf " -f confile_file Alternate reconftw.cfg file\n" + printf " -f confile_file Alternate reconftw.cfg file\n" printf " --deep Deep scan (Enable some slow options for deeper scan)\n" printf " -o output/path Define output folder\n" printf " \n" @@ -1975,7 +1982,7 @@ while true; do outOfScope_file=$2 shift 2 continue - ;; + ;; '-i') inScope_file=$2 shift 2 @@ -2009,10 +2016,10 @@ while true; do continue ;; '-n'|'--osint') - opt_mode='i' + opt_mode='n' shift continue - ;; + ;; # extra stuff '-o') @@ -2021,12 +2028,12 @@ while true; do shift 2 continue - ;; + ;; '-f') config_file=$2 shift 2 continue - ;; + ;; '--deep') opt_deep=true shift @@ -2043,12 +2050,10 @@ while true; do help exit 1 ;; - esac done - -# This is the first thing to do to read in alternate config +# This is the first thing to do to read in alternate config if [ -s "$config_file" ]; then . "${config_file}" else @@ -2106,8 +2111,8 @@ case $opt_mode in exit fi if [ -n "$list" ]; then - mode="list_recon" + sed -i 's/\r$//' $list for domain in $(cat $list); do start recon @@ -2123,6 +2128,7 @@ case $opt_mode in 's') if [ -n "$list" ]; then mode="subs_menu" + sed -i 's/\r$//' $list for domain in $(cat $list); do subs_menu done @@ -2133,6 +2139,7 @@ case $opt_mode in 'p') if [ -n "$list" ]; then mode="passive" + sed -i 's/\r$//' $list for domain in $(cat $list); do passive done @@ -2143,6 +2150,7 @@ case $opt_mode in 'a') if [ -n "$list" ]; then mode="all" + sed -i 's/\r$//' $list for domain in $(cat $list); do all done @@ -2162,13 +2170,14 @@ case $opt_mode in webs_menu exit ;; - 'i') + 'n') PRESERVE=true if [ -n "$multi" ]; then - multi_osint + multi_osint exit fi if [ -n "$list" ]; then + sed -i 's/\r$//' $list for domain in $(cat $list); do start osint @@ -2179,8 +2188,7 @@ case $opt_mode in osint end fi - - ;; + ;; # No mode selected. EXIT! *) help From eaf385cd3fc9448f615bec8832b4deaf2d9a7894 Mon Sep 17 00:00:00 2001 From: six2dez Date: Thu, 13 May 2021 10:31:07 +0200 Subject: [PATCH 06/32] Load default config in help --- reconftw.sh | 1 + reconftw_axiom.sh | 1 + 2 files changed, 2 insertions(+) diff --git a/reconftw.sh b/reconftw.sh index 54178fc4..1ba8c8fa 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1967,6 +1967,7 @@ while true; do ;; '--help'| '-h'| *) # echo "Unknown argument: $1" + . ./reconftw.cfg banner help exit 1 diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index ae60d5ee..7bb0217b 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -2046,6 +2046,7 @@ while true; do ;; '--help'| '-h'| *) # echo "Unknown argument: $1" + . ./reconftw.cfg banner help exit 1 From eadfcb86ee6c4d3a607cd82429eed151e28c3ebb Mon Sep 17 00:00:00 2001 From: d3mondev <55468528+d3mondev@users.noreply.github.com> Date: Fri, 14 May 2021 05:13:59 +0000 Subject: [PATCH 07/32] Fix possible hang during install_apt() The install.sh script could hang during the install_apt() function when apt-get expects user interaction, even though the -y argument is provided. I added DEBIAN_FRONTEND="noninteractive" in front of the calls to apt install to help prevent this. --- install.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/install.sh b/install.sh index 54c26606..d91167cc 100755 --- a/install.sh +++ b/install.sh @@ -83,9 +83,9 @@ fi install_apt(){ eval $SUDO apt update -y $DEBUG_STD - eval $SUDO apt install chromium-browser -y $DEBUG_STD - eval $SUDO apt install chromium -y $DEBUG_STD - eval $SUDO apt install python3 python3-pip gcc build-essential ruby git curl libpcap-dev wget zip python3-dev pv dnsutils libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx tor medusa -y $DEBUG_STD + eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium-browser -y $DEBUG_STD + eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium -y $DEBUG_STD + eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install python3 python3-pip gcc build-essential ruby git curl libpcap-dev wget zip python3-dev pv dnsutils libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx tor medusa -y $DEBUG_STD eval $SUDO systemctl enable tor $DEBUG_STD } From e1d62444faa21e24fc9dae10e7dea8a0e68b7789 Mon Sep 17 00:00:00 2001 From: six2dez Date: Fri, 14 May 2021 07:59:19 +0200 Subject: [PATCH 08/32] Error checks on gospider --- reconftw.sh | 10 +++++----- reconftw_axiom.sh | 23 +++++++++++++---------- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 1ba8c8fa..53444e88 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -399,9 +399,9 @@ function sub_scraping(){ cat .tmp/probed_tmp_scrap.txt | httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt cat .tmp/probed_tmp_scrap.txt | httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt if [ "$DEEP" = true ]; then - gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt + [ -f ".tmp/probed_tmp_scrap.txt" ] && gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt else - gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt + [ -f ".tmp/probed_tmp_scrap.txt" ] && gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt fi sed -i '/^.\{2048\}./d' .tmp/gospider.txt cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | unfurl --unique domains | grep ".$domain$" | anew -q .tmp/scrap_subs.txt @@ -870,13 +870,13 @@ function urlchecks(){ diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u webs/webs.txt) | wc -l) if [ $diff_webs != "0" ] || [ ! -s ".tmp/gospider.txt" ]; then if [ "$DEEP" = true ]; then - gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt + [ -f "webs/webs.txt" ] && gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt else - gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt + [ -f "webs/webs.txt" ] && gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt fi fi sed -i '/^.\{2048\}./d' .tmp/gospider.txt - cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain$" | anew -q .tmp/url_extract_tmp.txt + [ -f ".tmp/gospider.txt" ] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain$" | anew -q .tmp/url_extract_tmp.txt if [ -s "${GITHUB_TOKENS}" ]; then github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt &>>"$LOGFILE" cat .tmp/github-endpoints.txt 2>>"$LOGFILE" | anew -q .tmp/url_extract_tmp.txt diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 7bb0217b..943eafe4 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -397,15 +397,17 @@ function sub_scraping(){ start_subfunc "Running : Source code scraping subdomain search" touch .tmp/scrap_subs.txt if [ -s "$dir/subdomains/subdomains.txt" ]; then - axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap1.txt &>>"$LOGFILE" && cat .tmp/probed_tmp_scrap1.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt - axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap2.txt &>>"$LOGFILE" && cat .tmp/probed_tmp_scrap2.txt | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt - axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap3.txt &>>"$LOGFILE" && cat .tmp/probed_tmp_scrap3.txt | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt + axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap1.txt &>>"$LOGFILE" && cat .tmp/probed_tmp_scrap1.txt 2>>"$LOGFILE" | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt + axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap2.txt &>>"$LOGFILE" && cat .tmp/probed_tmp_scrap2.txt 2>>"$LOGFILE" | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt + axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap3.txt &>>"$LOGFILE" && cat .tmp/probed_tmp_scrap3.txt 2>>"$LOGFILE" | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt if [ "$DEEP" = true ]; then - axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -f ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" else - axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -f ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" fi - NUMFILES=$(find .tmp/gospider/ -type f | wc -l) + NUMFILES=0 + touch .tmp/gospider.txt + [[ -d .tmp/gospider/ ]] && NUMFILES=$(find .tmp/gospider/ -type f | wc -l) [[ $NUMFILES -gt 0 ]] && cat .tmp/gospider/* | sed '/^.\{2048\}./d' | anew -q .tmp/gospider.txt grep -Eo 'https?://[^ ]+' .tmp/gospider.txt | sed 's/]$//' | unfurl --unique domains | grep ".$domain$" | anew -q .tmp/scrap_subs.txt axiom-scan .tmp/scrap_subs.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/scrap_subs_resolved.txt &>>"$LOGFILE" @@ -873,13 +875,14 @@ function urlchecks(){ diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u webs/webs.txt) | wc -l) if [ $diff_webs != "0" ] || [ ! -s ".tmp/gospider.txt" ]; then if [ "$DEEP" = true ]; then - axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -f ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" else - axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -f ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" fi - cat .tmp/gospider/* | sed '/^.\{2048\}./d' | anew -q .tmp/gospider.txt + [[ -d .tmp/gospider/ ]] && cat .tmp/gospider/* | sed '/^.\{2048\}./d' | anew -q .tmp/gospider.txt fi - cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain$" | anew -q .tmp/url_extract_tmp.txt + [[ -d .tmp/gospider/ ]] && NUMFILES=$(find .tmp/gospider/ -type f | wc -l) + [[ $NUMFILES -gt 0 ]] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain$" | anew -q .tmp/url_extract_tmp.txt if [ -s "${GITHUB_TOKENS}" ]; then github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt &>>"$LOGFILE" cat .tmp/github-endpoints.txt 2>>"$LOGFILE" | anew -q .tmp/url_extract_tmp.txt From 48fdc5078a58be32e569d0dfc8772484739e63cd Mon Sep 17 00:00:00 2001 From: six2dez Date: Fri, 14 May 2021 08:07:01 +0200 Subject: [PATCH 09/32] Fixes #291 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 30aeb329..0c115ccf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -34,7 +34,7 @@ requests retrying==1.3.3 texttable==1.6.3 lxml -uvloop==0.15.2 +uvloop PySocks==1.6.8 h8mail argcomplete==1.10.0 From 2d038028eb9efb2d3d511bf97066da556661b8e0 Mon Sep 17 00:00:00 2001 From: six2dez Date: Fri, 14 May 2021 09:18:42 +0200 Subject: [PATCH 10/32] Added ip/cidr as target --- reconftw.sh | 17 ++++++++++++++--- reconftw_axiom.sh | 27 +++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 53444e88..9798914f 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1419,15 +1419,22 @@ function ipcidr_detection(){ if [[ $1 =~ /[0-9]+$ ]]; then prips $1 | hakrevdns prips $1 | gdn - # ./test.sh 199.120.48.0/24 | cut -d' ' -f3 | unfurl -u domains | sed 's/\.$//' | awk -F\. '{print $(NF-1) FS $NF}' | sort -u else echo $1 | hakrevdns echo $1 | gdn - # ./test.sh 199.120.48.0/24 | cut -d' ' -f3 | unfurl -u domains | sed 's/\.$//' | awk -F\. '{print $(NF-1) FS $NF}' | sort -u fi fi } +function ipcidr_target(){ + ipcidr_detection $1 | cut -d' ' -f3 | unfurl -u domains | sed 's/\.$//' | sort -u > ./target_reconftw_ipcidr.txt + if [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -eq 1 ]]; then + domain=$(cat ./target_reconftw_ipcidr.txt) + elif [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -gt 1 ]]; then + list=${PWD}/target_reconftw_ipcidr.txt + fi +} + function start(){ global_start=$(date +%s) @@ -1441,7 +1448,7 @@ function start(){ echo "Recon succesfully started on $domain" | $NOTIFY tools_installed -# ipcidr_detection $domain + [[ -n "$domain" ]] && ipcidr_target $domain if [ -z "$domain" ]; then if [ -n "$list" ]; then @@ -1575,6 +1582,8 @@ function multi_osint(){ NOTIFY="" fi + [[ -n "$domain" ]] && ipcidr_target $domain + if [ -s "$list" ]; then sed -i 's/\r$//' $list targets=$(cat $list) @@ -1646,6 +1655,8 @@ function multi_recon(){ NOTIFY="" fi + [[ -n "$domain" ]] && ipcidr_target $domain + if [ -s "$list" ]; then sed -i 's/\r$//' $list targets=$(cat $list) diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 943eafe4..b3ee0fad 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -1420,6 +1420,27 @@ function resolvers_update(){ fi } +function ipcidr_detection(){ + if [[ $1 =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + if [[ $1 =~ /[0-9]+$ ]]; then + prips $1 | hakrevdns + prips $1 | gdn + else + echo $1 | hakrevdns + echo $1 | gdn + fi + fi +} + +function ipcidr_target(){ + ipcidr_detection $1 | cut -d' ' -f3 | unfurl -u domains | sed 's/\.$//' | sort -u > ./target_reconftw_ipcidr.txt + if [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -eq 1 ]]; then + domain=$(cat ./target_reconftw_ipcidr.txt) + elif [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -gt 1 ]]; then + list=${PWD}/target_reconftw_ipcidr.txt + fi +} + function axiom_lauch(){ # let's fire up a FLEET! if [ "$AXIOM_FLEET_LAUNCH" = true ] && [ -n "$AXIOM_FLEET_NAME" ] && [ -n "$AXIOM_FLEET_COUNT" ]; then @@ -1495,6 +1516,8 @@ function start(){ echo "Recon succesfully started on $domain" | $NOTIFY tools_installed + [[ -n "$domain" ]] && ipcidr_target $domain + if [ -z "$domain" ]; then if [ -n "$list" ]; then if [ -z "$domain" ]; then @@ -1638,6 +1661,8 @@ function multi_osint(){ NOTIFY="" fi + [[ -n "$domain" ]] && ipcidr_target $domain + if [ -s "$list" ]; then sed -i 's/\r$//' $list targets=$(cat $list) @@ -1717,6 +1742,8 @@ function multi_recon(){ NOTIFY="" fi + [[ -n "$domain" ]] && ipcidr_target $domain + if [ -s "$list" ]; then sed -i 's/\r$//' $list targets=$(cat $list) From ded592f3d8a9106e84b86b9f6ce7fd081cb6d82c Mon Sep 17 00:00:00 2001 From: six2dez Date: Fri, 14 May 2021 09:26:53 +0200 Subject: [PATCH 11/32] ip/cidr tools install --- install.sh | 3 ++- reconftw.sh | 5 ++++- reconftw_axiom.sh | 5 ++++- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/install.sh b/install.sh index d91167cc..35cc8921 100755 --- a/install.sh +++ b/install.sh @@ -29,7 +29,8 @@ gotools["crobat"]="go get -v github.com/cgboal/sonarsearch/crobat" gotools["crlfuzz"]="GO111MODULE=on go get -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz" gotools["dalfox"]="GO111MODULE=on go get -v github.com/hahwul/dalfox/v2" gotools["puredns"]="GO111MODULE=on go get github.com/d3mondev/puredns/v2" -gotools["naabu"]="GO111MODULE=on go get -v github.com/projectdiscovery/naabu/v2/cmd/naabu" +gotools["hakrevdns"]="go get github.com/hakluke/hakrevdns" +gotools["gdn"]="GO111MODULE=on go get -v github.com/kmskrishna/gdn" declare -A repos repos["degoogle_hunter"]="six2dez/degoogle_hunter" diff --git a/reconftw.sh b/reconftw.sh index 9798914f..25d8abd7 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -79,7 +79,7 @@ function tools_installed(){ type -P DNScewl &>/dev/null || { printf "${bred} [*] DNScewl [NO]${reset}\n"; allinstalled=false;} type -P cf-check &>/dev/null || { printf "${bred} [*] Cf-check [NO]${reset}\n"; allinstalled=false;} type -P nuclei &>/dev/null || { printf "${bred} [*] Nuclei [NO]${reset}\n"; allinstalled=false;} - [ -d ~/nuclei-templates ] || { printf "${bred} [*] Nuclei templates [NO]${reset}\n"; allinstalled=false;} + [ -d ~/nuclei-templates ] || { printf "${bred} [*] Nuclei templates [NO]${reset}\n"; allinstalled=false;} type -P gf &>/dev/null || { printf "${bred} [*] Gf [NO]${reset}\n"; allinstalled=false;} type -P Gxss &>/dev/null || { printf "${bred} [*] Gxss [NO]${reset}\n"; allinstalled=false;} type -P subjs &>/dev/null || { printf "${bred} [*] subjs [NO]${reset}\n"; allinstalled=false;} @@ -96,6 +96,8 @@ function tools_installed(){ type -P dalfox &>/dev/null || { printf "${bred} [*] dalfox [NO]${reset}\n${reset}"; allinstalled=false;} type -P puredns &>/dev/null || { printf "${bred} [*] puredns [NO]${reset}\n${reset}"; allinstalled=false;} type -P unimap &>/dev/null || { printf "${bred} [*] unimap [NO]${reset}\n${reset}"; allinstalled=false;} + type -P hakrevdns &>/dev/null || { printf "${bred} [*] hakrevdns [NO]${reset}\n${reset}"; allinstalled=false;} + type -P gdn &>/dev/null || { printf "${bred} [*] gdn [NO]${reset}\n"; allinstalled=false;} if [ "${allinstalled}" = true ]; then printf "${bgreen} Good! All installed! ${reset}\n\n" @@ -2125,6 +2127,7 @@ case $opt_mode in # No mode selected. EXIT! *) help + tools_installed exit 1 ;; esac diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index b3ee0fad..cb006812 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -79,7 +79,7 @@ function tools_installed(){ type -P DNScewl &>/dev/null || { printf "${bred} [*] DNScewl [NO]${reset}\n"; allinstalled=false;} type -P cf-check &>/dev/null || { printf "${bred} [*] Cf-check [NO]${reset}\n"; allinstalled=false;} type -P nuclei &>/dev/null || { printf "${bred} [*] Nuclei [NO]${reset}\n"; allinstalled=false;} - [ -d ~/nuclei-templates ] || { printf "${bred} [*] Nuclei templates [NO]${reset}\n"; allinstalled=false;} + [ -d ~/nuclei-templates ] || { printf "${bred} [*] Nuclei templates [NO]${reset}\n"; allinstalled=false;} type -P gf &>/dev/null || { printf "${bred} [*] Gf [NO]${reset}\n"; allinstalled=false;} type -P Gxss &>/dev/null || { printf "${bred} [*] Gxss [NO]${reset}\n"; allinstalled=false;} type -P subjs &>/dev/null || { printf "${bred} [*] subjs [NO]${reset}\n"; allinstalled=false;} @@ -96,6 +96,8 @@ function tools_installed(){ type -P dalfox &>/dev/null || { printf "${bred} [*] dalfox [NO]${reset}\n${reset}"; allinstalled=false;} type -P puredns &>/dev/null || { printf "${bred} [*] puredns [NO]${reset}\n${reset}"; allinstalled=false;} type -P unimap &>/dev/null || { printf "${bred} [*] unimap [NO]${reset}\n${reset}"; allinstalled=false;} + type -P hakrevdns &>/dev/null || { printf "${bred} [*] hakrevdns [NO]${reset}\n${reset}"; allinstalled=false;} + type -P gdn &>/dev/null || { printf "${bred} [*] gdn [NO]${reset}\n"; allinstalled=false;} type -P axiom-ls &>/dev/null || { printf "${bred} [*] axiom [NO]${reset}\n${reset}"; allinstalled=false;} if [ "${allinstalled}" = true ]; then @@ -2223,6 +2225,7 @@ case $opt_mode in # No mode selected. EXIT! *) help + tools_installed exit 1 ;; esac From 7c215bdb372941b487687e51fcb3837b89bfff86 Mon Sep 17 00:00:00 2001 From: six2dez Date: Fri, 14 May 2021 11:14:17 +0200 Subject: [PATCH 12/32] Command injection added --- install.sh | 1 + reconftw.sh | 28 ++++++++++++++++++++++++---- reconftw_axiom.sh | 20 ++++++++++++++++++++ 3 files changed, 45 insertions(+), 4 deletions(-) diff --git a/install.sh b/install.sh index 35cc8921..43f40e16 100755 --- a/install.sh +++ b/install.sh @@ -56,6 +56,7 @@ repos["OpenRedireX"]="devanshbatham/OpenRedireX" repos["GitDorker"]="obheda12/GitDorker" repos["testssl"]="drwetter/testssl.sh" repos["ip2provider"]="oldrho/ip2provider" +repos["commix"]="commixproject/commix" dir=${tools} diff --git a/reconftw.sh b/reconftw.sh index 25d8abd7..46273d2b 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -55,8 +55,9 @@ function tools_installed(){ [ -f "$tools/CMSeeK/cmseek.py" ] || { printf "${bred} [*] CMSeeK [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/ctfr/ctfr.py" ] || { printf "${bred} [*] ctfr [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/fuzz_wordlist.txt" ] || { printf "${bred} [*] OneListForAll [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/LinkFinder/linkfinder.py" ] || { printf "${bred} [*] LinkFinder [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/GitDorker/GitDorker.py" ] || { printf "${bred} [*] GitDorker [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/LinkFinder/linkfinder.py" ] || { printf "${bred} [*] LinkFinder [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/GitDorker/GitDorker.py" ] || { printf "${bred} [*] GitDorker [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/commix/commix.py" ] || { printf "${bred} [*] commix [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/degoogle_hunter/degoogle_hunter.sh" ] || { printf "${bred} [*] degoogle_hunter [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/getjswords.py" ] || { printf "${bred} [*] getjswords [NO]${reset}\n"; allinstalled=false;} type -P arjun &>/dev/null || { printf "${bred} [*] Arjun [NO]${reset}\n"; allinstalled=false;} @@ -1209,8 +1210,8 @@ function sqli(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SQLI" = true ] && [ -s "gf/sqli.txt" ]; then start_func "SQLi checks" cat gf/sqli.txt | qsreplace FUZZ | anew -q .tmp/tmp_sqli.txt - interlace -tL .tmp/tmp_sqli.txt -threads 10 -c "python3 $tools/sqlmap/sqlmap.py -u _target_ -b --batch --disable-coloring --random-agent --output-dir=sqlmap" &>/dev/null - end_func "Results are saved in sqlmap folder" ${FUNCNAME[0]} + interlace -tL .tmp/tmp_sqli.txt -threads 10 -c "python3 $tools/sqlmap/sqlmap.py -u _target_ -b --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap" &>/dev/null + end_func "Results are saved in vulns/sqlmap folder" ${FUNCNAME[0]} else if [ "$SQLI" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" @@ -1271,6 +1272,23 @@ function 4xxbypass(){ fi } +function command_injection(){ + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$COMM_INJ" = true ] && [ -s "gf/rce.txt" ]; then + start_func "Command Injection checks" + cat gf/rce.txt | qsreplace FUZZ | anew -q .tmp/tmp_rce.txt + python3 $tools/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection + end_func "Results are saved in vulns/command_injection folder" ${FUNCNAME[0]} + else + if [ "$COMM_INJ" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + elif [ ! -s "gf/rce.txt" ]; then + printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to Command Injection ${reset}\n\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + ############################################################################################################### ########################################## OPTIONS & MGMT ##################################################### ############################################################################################################### @@ -1558,6 +1576,7 @@ function all(){ ssti sqli xss + command_injection spraying brokenLinks test_ssl @@ -1836,6 +1855,7 @@ function webs_menu(){ ssti sqli xss + command_injection spraying brokenLinks test_ssl diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index cb006812..c78cc1a7 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -1277,6 +1277,24 @@ function 4xxbypass(){ fi } +function command_injection(){ + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$COMM_INJ" = true ] && [ -s "gf/rce.txt" ]; then + start_func "Command Injection checks" + cat gf/rce.txt | qsreplace FUZZ | anew -q .tmp/tmp_rce.txt + python3 $tools/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection + #axiom_scan .tmp/tmp_rce.txt -m commix -o vulns/command_injection + end_func "Results are saved in vulns/command_injection folder" ${FUNCNAME[0]} + else + if [ "$COMM_INJ" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + elif [ ! -s "gf/rce.txt" ]; then + printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to Command Injection ${reset}\n\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + ############################################################################################################### ########################################## OPTIONS & MGMT ##################################################### ############################################################################################################### @@ -1637,6 +1655,7 @@ function all(){ ssti sqli xss + command_injection spraying brokenLinks test_ssl @@ -1934,6 +1953,7 @@ function webs_menu(){ ssti sqli xss + command_injection spraying brokenLinks test_ssl From fac3d95fec6b6729d49f24003534c94d1582672c Mon Sep 17 00:00:00 2001 From: six2dez Date: Fri, 14 May 2021 12:30:42 +0200 Subject: [PATCH 13/32] Added JSA and didar_axiom --- install.sh | 3 ++- reconftw.sh | 6 +++++- reconftw_axiom.sh | 9 +++++++-- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/install.sh b/install.sh index 43f40e16..01ae8fed 100755 --- a/install.sh +++ b/install.sh @@ -57,6 +57,7 @@ repos["GitDorker"]="obheda12/GitDorker" repos["testssl"]="drwetter/testssl.sh" repos["ip2provider"]="oldrho/ip2provider" repos["commix"]="commixproject/commix" +repos["JSA"]="w9w/JSA" dir=${tools} @@ -261,7 +262,7 @@ eval wget -O subdomains.txt https://gist.github.com/six2dez/a307a04a222fab5a5746 eval wget -O permutations_list.txt https://gist.github.com/six2dez/ffc2b14d283e8f8eff6ac83e20a3c4b4/raw $DEBUG_STD eval wget -nc -O ssrf.py https://gist.github.com/h4ms1k/adcc340495d418fcd72ec727a116fea2/raw $DEBUG_STD eval wget -nc -O fuzz_wordlist.txt https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt $DEBUG_STD -eval wget -O lfi_wordlist.txt https://gist.githubusercontent.com/detonxx/a885ce7dd64a7139cb6f5b6860499ba8/raw $DEBUG_STD +eval wget -O lfi_wordlist.txt https://gist.githubusercontent.com/six2dez/a89a0c7861d49bb61a09822d272d5395/raw $DEBUG_STD ## Last check printf "${bblue} Running: Double check for installed tools ${reset}\n\n" diff --git a/reconftw.sh b/reconftw.sh index 46273d2b..255d2b8f 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -60,6 +60,7 @@ function tools_installed(){ [ -f "$tools/commix/commix.py" ] || { printf "${bred} [*] commix [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/degoogle_hunter/degoogle_hunter.sh" ] || { printf "${bred} [*] degoogle_hunter [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/getjswords.py" ] || { printf "${bred} [*] getjswords [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/JSA/jsa.py" ] || { printf "${bred} [*] JSA [NO]${reset}\n"; allinstalled=false;} type -P arjun &>/dev/null || { printf "${bred} [*] Arjun [NO]${reset}\n"; allinstalled=false;} type -P dirdar &>/dev/null || { printf "${bred} [*] dirdar [NO]${reset}\n"; allinstalled=false;} type -P github-endpoints &>/dev/null || { printf "${bred} [*] github-endpoints [NO]${reset}\n"; allinstalled=false;} @@ -884,8 +885,11 @@ function urlchecks(){ github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt &>>"$LOGFILE" cat .tmp/github-endpoints.txt 2>>"$LOGFILE" | anew -q .tmp/url_extract_tmp.txt fi - cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -Ei "\.(js)" | anew -q js/url_extract_js.txt + if [ "$DEEP" = true ]; then + [ -f "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | python3 $tools/JSA/jsa.py | anew -q .tmp/url_extract_tmp.txt + fi + cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | wc -l) notification "${NUMOFLINES} new urls with params" info diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index c78cc1a7..bee3f1fa 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -59,6 +59,7 @@ function tools_installed(){ [ -f "$tools/GitDorker/GitDorker.py" ] || { printf "${bred} [*] GitDorker [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/degoogle_hunter/degoogle_hunter.sh" ] || { printf "${bred} [*] degoogle_hunter [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/getjswords.py" ] || { printf "${bred} [*] getjswords [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/JSA/jsa.py" ] || { printf "${bred} [*] JSA [NO]${reset}\n"; allinstalled=false;} type -P arjun &>/dev/null || { printf "${bred} [*] Arjun [NO]${reset}\n"; allinstalled=false;} type -P dirdar &>/dev/null || { printf "${bred} [*] dirdar [NO]${reset}\n"; allinstalled=false;} type -P github-endpoints &>/dev/null || { printf "${bred} [*] github-endpoints [NO]${reset}\n"; allinstalled=false;} @@ -889,8 +890,11 @@ function urlchecks(){ github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt &>>"$LOGFILE" cat .tmp/github-endpoints.txt 2>>"$LOGFILE" | anew -q .tmp/url_extract_tmp.txt fi - cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -Ei "\.(js)" | anew -q js/url_extract_js.txt + if [ "$DEEP" = true ]; then + [ -f "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | python3 $tools/JSA/jsa.py | anew -q .tmp/url_extract_tmp.txt + fi + cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | wc -l) notification "${NUMOFLINES} new urls with params" info @@ -1262,7 +1266,8 @@ function 4xxbypass(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$BYPASSER4XX" = true ]; then if [[ $(cat fuzzing/*.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 | wc -l) -le 1000 ]] || [ "$DEEP" = true ]; then start_func "403 bypass" - cat fuzzing/*.txt 2>>"$LOGFILE" | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 | dirdar -threads $DIRDAR_THREADS -only-ok > .tmp/dirdar.txt + cat fuzzing/*.txt 2>>"$LOGFILE" | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 > .tmp/dirdar_test.txt + axiom-scan .tmp/dirdar_test.txt -m dirdar -threads $DIRDAR_THREADS -only-ok > .tmp/dirdar.txt cat .tmp/dirdar.txt 2>>"$LOGFILE" | sed -e '1,12d' | sed '/^$/d' | anew -q vulns/4xxbypass.txt end_func "Results are saved in vulns/4xxbypass.txt" ${FUNCNAME[0]} else From ff6a4acf904430231354ebfb8a5265f57cac8333 Mon Sep 17 00:00:00 2001 From: Jinay Patel <50541295+0-0eth0@users.noreply.github.com> Date: Sun, 16 May 2021 19:10:44 +0530 Subject: [PATCH 14/32] Update README.md --- README.md | 267 +++++++++++++++++++++++++++++++----------------------- 1 file changed, 156 insertions(+), 111 deletions(-) diff --git a/README.md b/README.md index 1f4a2078..8aa39b22 100644 --- a/README.md +++ b/README.md @@ -5,12 +5,10 @@ reconFTW
- -

A simple bash script for full recon

- +

- - + + @@ -32,86 +30,97 @@ Docker Cloud Build Status

- + +

Summary

+ +**ReconFTW** automates the entire process of reconnaisance for you. It outperforms the work of subdomain enumeration along with various vulnerability checks and obtaining maximum information about your target. + +ReconFTW uses around 5 techniques(passive,bruteforce,permutations,certificate transparency,JS scraping) for subdomain enumeration which helps you getting the maximum and the most interesting subdomains so that you be ahead of the competition. + + +It also performs various vulnerability checks like XSS, Open Redirects, SSRF, CRLF, LFI, SQLi, SSL tests, SSTI, DNS zone transfers, and much more. Along with these, it performs OSINT techniques, directory fuzzing, dorking, ports scanning, screenshots, nuclei scan on your target. + +So, what are you waiting for Go! Go! Go! :boom: + 📔 Table of Contents ----------------- -- [💿 Installation:](#-installation) +- [💿 Installation](#-installation) - [a) In your PC/VPS/VM](#a-in-your-pcvpsvm) - [b) Docker container 🐳 (2 options)](#b-docker-container--2-options) - [1) From DockerHub](#1-from-dockerhub) - [2) From repository](#2-from-repository) -- [⚙️ Config file:](#️-config-file) -- [Usage:](#usage) -- [Example Usage:](#example-usage) -- [Axiom Support: :cloud:](#axiom-support-cloud) -- [Sample video:](#sample-video) +- [⚙️ Config file](#️-config-file) +- [Usage](#usage) + - [Example Usage](#example-usage) +- [Axiom Support :cloud:](#axiom-support-cloud) +- [Sample video](#sample-video) - [:fire: Features :fire:](#fire-features-fire) - [Mindmap/Workflow](#mindmapworkflow) - [Data Keep](#data-keep) - - [Main commands:](#main-commands) - - [How to contribute:](#how-to-contribute) - - [Need help?](#need-help) - - [You can support this work buying me a coffee:](#you-can-support-this-work-buying-me-a-coffee) + - [Main commands](#main-commands) + - [How to contribute](#how-to-contribute) +- [Need help?](#need-help) +- [Support this project](#you-can-support-this-work-buying-me-a-coffee) - [Thanks :pray:](#thanks-pray) - + --- - + # 💿 Installation: - + ## a) In your PC/VPS/VM - + > You can check out our wiki for the installation guide [Installation Guide](https://github.com/six2dez/reconftw/wiki/0.-Installation-Guide) :book: - + - Requires [Golang](https://golang.org/dl/) > **1.15.0+** installed and paths correctly set (**$GOPATH**, **$GOROOT**) - + ```bash ▶ git clone https://github.com/six2dez/reconftw ▶ cd reconftw/ ▶ ./install.sh ▶ ./reconftw.sh -d target.com -r ``` - + ## b) Docker container 🐳 (2 options) - + ### 1) From [DockerHub](https://hub.docker.com/r/six2dez/reconftw) - + ```bash ▶ docker pull six2dez/reconftw:main ▶ docker run -it six2dez/reconftw:main /bin/bash - + # Exit the container and run these commands additionally if you want to gain persistence: - + ▶ docker start $(docker ps -a|grep six2dez/reconftw:main|cut -d' ' -f1) ▶ docker exec -it $(docker ps -a|grep six2dez/reconftw:main|cut -d' ' -f1) /bin/bash - + # Now you can exit the container and run again this command without files loss: ▶ docker exec -it $(docker ps -a|grep six2dez/reconftw:main|cut -d' ' -f1) /bin/bash ``` - + ### 2) From repository - + ```bash ▶ git clone https://github.com/six2dez/reconftw ▶ cd reconftw/Docker ▶ docker build -t reconftw . ▶ docker run -it reconftw /bin/bash ``` - + # ⚙️ Config file: > A detailed explaintion of config file can be found here [Configuration file](https://github.com/six2dez/reconftw/wiki/3.-Configuration-file) :book: - + - Through ```reconftw.cfg``` file the whole execution of the tool can be controlled. - Hunters can set various scanning modes, execution preferences, tools, config files, APIs/TOKENS, personalized wordlists and much more. - +


:point_right: Click here to view default config file :point_left: - + ```yaml ################################################################# -# reconFTW config file # +# reconFTW config file # ################################################################# - + # TERM COLORS bred='\033[1;31m' bblue='\033[1;34m' @@ -121,7 +130,7 @@ red='\033[0;31m' blue='\033[0;34m' green='\033[0;32m' reset='\033[0m' - + # General values tools=~/Tools SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" @@ -130,31 +139,33 @@ reconftw_version=$(git branch --show-current)-$(git describe --tags) update_resolvers=true proxy_url="http://127.0.0.1:8080/" #dir_output=/custom/output/path - + # Golang Vars (Comment or change on your own) export GOROOT=/usr/local/go export GOPATH=$HOME/go export PATH=$GOPATH/bin:$GOROOT/bin:$HOME/.local/bin:$PATH - + # Tools config files #NOTIFY_CONFIG=~/.config/notify/notify.conf # No need to define #SUBFINDER_CONFIG=~/.config/subfinder/config.yaml # No need to define AMASS_CONFIG=~/.config/amass/config.ini GITHUB_TOKENS=${tools}/.github_tokens - + # APIs/TOKENS - Uncomment the lines you set removing the '#' at the beginning of the line -#SHODAN_API_KEY=XXXXXXXXXXXXX -#XSS_SERVER=six2dez.xss.ht -#COLLAB_SERVER=XXXXXXXXXXXXXXXXX -#findomain_virustotal_token=XXXXXXXXXXXXXXXXX -#findomain_spyse_token=XXXXXXXXXXXXXXXXX -#findomain_securitytrails_token=XXXXXXXXXXXXXXXXX -#findomain_fb_token=XXXXXXXXXXXXXXXXX - +#SHODAN_API_KEY="XXXXXXXXXXXXX" +#XSS_SERVER="XXXXXXXXXXXXXXXXX" +#COLLAB_SERVER="XXXXXXXXXXXXXXXXX" +#findomain_virustotal_token="XXXXXXXXXXXXXXXXX" +#findomain_spyse_token="XXXXXXXXXXXXXXXXX" +#findomain_securitytrails_token="XXXXXXXXXXXXXXXXX" +#findomain_fb_token="XXXXXXXXXXXXXXXXX" +slack_channel="XXXXXXXX" +slack_auth="xoXX-XXX-XXX-XXX" + # File descriptors DEBUG_STD="&>/dev/null" DEBUG_ERROR="2>/dev/null" - + # Osint OSINT=true GOOGLE_DORKS=true @@ -162,7 +173,7 @@ GITHUB_DORKS=true METADATA=true EMAILS=true DOMAIN_INFO=true - + # Subdomains SUBCRT=true SUBBRUTE=true @@ -172,20 +183,23 @@ SUBTAKEOVER=true SUBRECURSIVE=true ZONETRANSFER=true S3BUCKETS=true - + # Web detection WEBPROBESIMPLE=true WEBPROBEFULL=true WEBSCREENSHOT=true UNCOMMON_PORTS_WEB="81,300,591,593,832,981,1010,1311,1099,2082,2095,2096,2480,3000,3128,3333,4243,4567,4711,4712,4993,5000,5104,5108,5280,5281,5601,5800,6543,7000,7001,7396,7474,8000,8001,8008,8014,8042,8060,8069,8080,8081,8083,8088,8090,8091,8095,8118,8123,8172,8181,8222,8243,8280,8281,8333,8337,8443,8500,8834,8880,8888,8983,9000,9001,9043,9060,9080,9090,9091,9200,9443,9502,9800,9981,10000,10250,11371,12443,15672,16080,17778,18091,18092,20720,32000,55440,55672" - +# You can change to aquatone if gowitness fails, comment the one you don't want +AXIOM_SCREENSHOT_MODULE=gowitness +#AXIOM_SCREENSHOT_MODULE=aquatone + # Host FAVICON=true PORTSCANNER=true PORTSCAN_PASSIVE=true PORTSCAN_ACTIVE=true CLOUD_IP=true - + # Web analysis WAF_DETECTION=true NUCLEICHECK=true @@ -197,7 +211,7 @@ PARAMS=true FUZZ=true CMS_SCANNER=true WORDLIST=true - + # Vulns XSS=true CORS=true @@ -211,17 +225,18 @@ SQLI=true BROKENLINKS=true SPRAY=true BYPASSER4XX=true - + # Extra features NOTIFICATION=false DEEP=false DIFF=false REMOVETMP=false PROXY=false - +SENDZIPNOTIFY=false + # HTTP options HEADER="User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" - + # Threads FFUF_THREADS=40 HTTPX_THREADS=50 @@ -233,8 +248,16 @@ BRUTESPRAY_CONCURRENCE=10 ARJUN_THREADS=20 GAUPLUS_THREADS=10 DALFOX_THREADS=200 +PUREDNS_PUBLIC_LIMIT=0 # Set between 2000 - 10000 if your router blows up, 0 is unlimited PUREDNS_TRUSTED_LIMIT=400 - +DIRDAR_THREADS=200 + +# Timeouts +CMSSCAN_TIMEOUT=3600 +FFUF_MAXTIME=900 # Seconds +HTTPX_TIMEOUT=15 # Seconds +HTTPX_UNCOMMONPORTS_TIMEOUT=10 # Seconds + # lists fuzz_wordlist=${tools}/fuzz_wordlist.txt lfi_wordlist=${tools}/lfi_wordlist.txt @@ -242,95 +265,109 @@ subs_wordlist=${tools}/subdomains.txt subs_wordlist_big=${tools}/subdomains_big.txt resolvers=${tools}/resolvers.txt resolvers_trusted=${tools}/resolvers_trusted.txt + +# Axiom Fleet +# Will not start a new fleet if one exist w/ same name and size (or larger) +AXIOM_FLEET_LAUNCH=true +AXIOM_FLEET_NAME="reconFTW" +AXIOM_FLEET_COUNT=5 +AXIOM_FLEET_REGIONS="" +AXIOM_FLEET_SHUTDOWN=true +# This is a script on your reconftw host that might prep things your way... +#AXIOM_POST_START="$HOME/bin/yourScript" + ```
- + # Usage: - + > Check out the wiki section to know which flag performs what all steps/attacks [Usage Guide](https://github.com/six2dez/reconftw/wiki/2.-Usage-Guide) :book: - + **TARGET OPTIONS** - + | Flag | Description | |------|-------------| -| -d | Target domain *(example.com)* | +| -d | Single Target domain *(example.com)* | +| -l | List of targets *(one per line)* | | -m | Multiple domain target *(companyName)* | -| -l | Target list *(one per line)* | | -x | Exclude subdomains list *(Out Of Scope)* | - + **MODE OPTIONS** - + | Flag | Description | |------|-------------| | -r | Recon - Full recon process (without attacks like sqli,ssrf,xss,ssti,lfi etc.) | | -s | Subdomains - Perform only subdomain enumeration, web probing, subdomain takeovers | | -p | Passive - Perform only passive steps | | -a | All - Perform whole recon and all active attacks | -| -w | Web - Just web checks on the list provided | -| -v | Verbose - Prints everything including errors, for debug purposes | +| -w | Web - Perform only vulnerability checks/attacks on particular target | +| -n | OSINT - Performs an OSINT scan (no subdomain enumeration and attacks) | | -h | Help - Show this help menu | - + **GENERAL OPTIONS** - + | Flag | Description | |------|-------------| | --deep | Deep scan (Enable some slow options for deeper scan, _vps intended mode_) | | -o | Output directory | - + # Example Usage: - + **To perform a full recon on single target** - + ```bash ▶ ./reconftw.sh -d target.com -r ``` - + **To perform a full recon on a list of targets** - + ```bash ▶ ./reconftw.sh -l sites.txt -r -o /output/directory/ ``` - + **Perform all steps (whole recon + all attacks)** - + ```bash ▶ ./reconftw.sh -d target.com -a ``` - + **Perform full recon with more time intense tasks** *(VPS intended only)* - + ```bash ▶ ./reconftw.sh -d target.com -r --deep -o /output/directory/ ``` - + **Perform recon in a multi domain target** - + ```bash ▶ ./reconftw.sh -m company -l domains_list.txt -r ``` - + **Show help section** - + ```bash ▶ ./reconftw.sh -h ``` - + # Axiom Support: :cloud: ![](https://i.ibb.co/Jzrgkqt/axiom-readme.png) > Check out the wiki section for more info [Axiom Support](https://github.com/six2dez/reconftw/wiki/5.-Axiom-version) -* Using ```reconftw_axiom.sh``` script you can take advantage of running **reconFTW** with [Axiom](https://github.com/pry0cc/axiom). * As reconFTW actively hits the target with a lot of web traffic, hence there was a need to move to Axiom distributing the work load among various instances leading to reduction of execution time. -* Currently except the ```-a``` flag, all flags are supported when running with Axiom. +* During the configuartion of axiom you need to select `reconftw` as provisoner. +* Using ```reconftw_axiom.sh``` script you can take advantage of running **reconFTW** with [Axiom](https://github.com/pry0cc/axiom). +* Its also necessary that you need to create your fleet prior. + ```bash +▶ axiom-fleet testy -i=10 # Initialize a fleet named 'testy' ▶ ./reconftw_axiom.sh -d target.com -r ``` - + # Sample video: - + ![Video](images/reconFTW.gif) - + # :fire: Features :fire: - + - Domain information parser ([domainbigdata](https://domainbigdata.com/)) - Emails addresses and users ([theHarvester](https://github.com/laramies/theHarvester)) - Password leaks ([pwndb](https://github.com/davidtavarez/pwndb) and [H8mail](https://github.com/khast3x/h8mail)) @@ -345,7 +382,7 @@ resolvers_trusted=${tools}/resolvers_trusted.txt - JS files & Source Code Scraping ([gospider](https://github.com/jaeles-project/gospider)) - CNAME Records ([dnsx](https://github.com/projectdiscovery/dnsx)) - Nuclei Sub TKO templates ([nuclei](https://github.com/projectdiscovery/nuclei)) -- Web Prober ([httpx](https://github.com/projectdiscovery/httpx)) +- Web Prober ([httpx](https://github.com/projectdiscovery/httpx) and [naabu](https://github.com/projectdiscovery/naabu)) - Web screenshot ([gowitness](https://github.com/sensepost/gowitness)) - Web templates scanner ([nuclei](https://github.com/projectdiscovery/nuclei)) - IP and subdomains WAF checker ([cf-check](https://github.com/dwisiswant0/cf-check) and [wafw00f](https://github.com/EnableSecurity/wafw00f)) @@ -353,7 +390,7 @@ resolvers_trusted=${tools}/resolvers_trusted.txt - Url extraction ([waybackurls](https://github.com/tomnomnom/waybackurls), [gauplus](https://github.com/bp0lr/gauplus), [gospider](https://github.com/jaeles-project/gospider), [github-endpoints](https://gist.github.com/six2dez/d1d516b606557526e9a78d7dd49cacd3)) - Pattern Search ([gf](https://github.com/tomnomnom/gf) and [gf-patterns](https://github.com/1ndianl33t/Gf-Patterns)) - Param discovery ([paramspider](https://github.com/devanshbatham/ParamSpider) and [arjun](https://github.com/s0md3v/Arjun)) -- XSS ([XSStrike](https://github.com/s0md3v/XSStrike)) +- XSS ([dalfox](https://github.com/hahwul/dalfox)) - Open redirect ([Openredirex](https://github.com/devanshbatham/OpenRedireX)) - SSRF (headers [asyncio_ssrf.py](https://gist.github.com/h4ms1k/adcc340495d418fcd72ec727a116fea2) and param values with [ffuf](https://github.com/ffuf/ffuf)) - CRLF ([crlfuzz](https://github.com/dwisiswant0/crlfuzz)) @@ -376,23 +413,24 @@ resolvers_trusted=${tools}/resolvers_trusted.txt - Docker container included and [DockerHub](https://hub.docker.com/r/six2dez/reconftw) integration - Cloud providers check ([ip2provider](https://github.com/oldrho/ip2provider)) - Resume the scan from last performed step -- Custom output folder +- Custom output folder option - All in one installer/updater script compatible with most distros - Diff support for continuous running (cron mode) - Support for targets with multiple domains - RaspberryPi/ARM support -- 5 modes (recon, passive, subdomains, web and all) +- Send scan results zipped over Slack, Discord and Telegram +- 6 modes (recon, passive, subdomains, web, osint and all) - Out of Scope Support - Notification support for Slack, Discord and Telegram ([notify](https://github.com/projectdiscovery/notify)) - + # Mindmap/Workflow - + ![Mindmap](images/mindmap_0321.png) - + ## Data Keep - + Follow these simple steps to end up having a private repository with your `API Keys` and `/Recon` data. - + * Create a private __blank__ repository on `Git(Hub|Lab)` (Take into account size limits regarding Recon data upload) * Clone your project: `git clone https://gitlab.com/example/reconftw-data` * Get inside the cloned repository: `cd reconftw-data` @@ -400,30 +438,37 @@ Follow these simple steps to end up having a private repository with your `API K * Add official repo as a new remote: `git remote add upstream https://github.com/six2dez/reconftw` (`upstream` is an example) * Update upstream's repo: `git fetch upstream` * Rebase current branch with the official one: `git rebase upstream/main master` - + ### Main commands: - + * Upload changes to your personal repo: `git add . && git commit -m "Data upload" && git push origin master` * Update tool anytime: `git fetch upstream && git rebase upstream/main master` - + ## How to contribute: - + If you want to contribute to this project you can do it in multiple ways: - Submitting an [issue](https://github.com/six2dez/reconftw/issues/new/choose) because you have found a bug or you have any suggestion or request. - Making a Pull Request from [dev](https://github.com/six2dez/reconftw/tree/dev) branch because you want to improve the code or add something to the script. - -## Need help? - -- Take a look in the [wiki](https://github.com/six2dez/reconftw/wiki) + +## Need help? :information_source: + +- Take a look at the [wiki](https://github.com/six2dez/reconftw/wiki) section. +- Check [FAQ](https://github.com/six2dez/reconftw/wiki/7.-FAQs) for commonly asked questions. - Ask for help in the [Telegram group](https://t.me/joinchat/TO_R8NYFhhbmI5co) - + ## You can support this work buying me a coffee: - + [](https://www.buymeacoffee.com/six2dez) - + +# Sponsors ❤️ +**This section shows the current financial sponsors of this project** + + +[](https://github.com/0xtavian) + # Thanks :pray: * Thank you for lending a helping hand towards the development of the project! - + - [Spyse](https://spyse.com/) - [Networksdb](https://networksdb.io/) - [Intelx](https://intelx.io/) From bce9906866b0e98d01e54203eba487d4cec30419 Mon Sep 17 00:00:00 2001 From: six2dez Date: Mon, 17 May 2021 00:30:55 +0200 Subject: [PATCH 15/32] Added webscreenshot, recursive passive subdomains scan, fuzz axiom fix --- install.sh | 2 +- reconftw.cfg | 4 ++-- reconftw.sh | 17 +++++++++++++++-- reconftw_axiom.sh | 22 +++++++++++++++++++--- requirements.txt | 3 ++- 5 files changed, 39 insertions(+), 9 deletions(-) diff --git a/install.sh b/install.sh index 01ae8fed..268b5728 100755 --- a/install.sh +++ b/install.sh @@ -88,7 +88,7 @@ install_apt(){ eval $SUDO apt update -y $DEBUG_STD eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium-browser -y $DEBUG_STD eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium -y $DEBUG_STD - eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install python3 python3-pip gcc build-essential ruby git curl libpcap-dev wget zip python3-dev pv dnsutils libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx tor medusa -y $DEBUG_STD + eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install python3 python3-pip gcc build-essential ruby git curl libpcap-dev wget zip python3-dev pv dnsutils libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx tor medusa xvfb -y $DEBUG_STD eval $SUDO systemctl enable tor $DEBUG_STD } diff --git a/reconftw.cfg b/reconftw.cfg index 19a023c5..f0bf0b7f 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -61,8 +61,7 @@ WEBPROBEFULL=true WEBSCREENSHOT=true UNCOMMON_PORTS_WEB="81,300,591,593,832,981,1010,1311,1099,2082,2095,2096,2480,3000,3128,3333,4243,4567,4711,4712,4993,5000,5104,5108,5280,5281,5601,5800,6543,7000,7001,7396,7474,8000,8001,8008,8014,8042,8060,8069,8080,8081,8083,8088,8090,8091,8095,8118,8123,8172,8181,8222,8243,8280,8281,8333,8337,8443,8500,8834,8880,8888,8983,9000,9001,9043,9060,9080,9090,9091,9200,9443,9502,9800,9981,10000,10250,11371,12443,15672,16080,17778,18091,18092,20720,32000,55440,55672" # You can change to aquatone if gowitness fails, comment the one you don't want -AXIOM_SCREENSHOT_MODULE=gowitness -#AXIOM_SCREENSHOT_MODULE=aquatone +AXIOM_SCREENSHOT_MODULE=webscreenshot # Choose between aquatone,gowitness,webscreenshot # Host FAVICON=true @@ -123,6 +122,7 @@ DALFOX_THREADS=200 PUREDNS_PUBLIC_LIMIT=0 # Set between 2000 - 10000 if your router blows up, 0 is unlimited PUREDNS_TRUSTED_LIMIT=400 DIRDAR_THREADS=200 +WEBSCREENSHOT_THREADS=200 # Timeouts CMSSCAN_TIMEOUT=3600 diff --git a/reconftw.sh b/reconftw.sh index 255d2b8f..4c7d846d 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -485,7 +485,19 @@ function sub_permut(){ function sub_recursive(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBRECURSIVE" = true ]; then - if [[ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 1000 ]]; then + + # Passive recursive + for sub in $(cat subdomains/subdomains.txt | rev | cut -d '.' -f 3 | rev | sort | uniq -c | sort -nr | grep -v '1 ' | sed -e 's/^[[:space:]]*//' | cut -d ' ' -f 2); do + subfinder -d $sub.$domain -all -silent &>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt + assetfinder --subs-only $sub.$domain 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt + amass enum -passive -d $sub.$domain -config $AMASS_CONFIG &>>"$LOGFILE" + findomain --quiet -t $sub.$domain &>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt + done + puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -f ".tmp/passive_recurs_tmp.txt" ] && cat .tmp/passive_recurs_tmp.txt | anew -q subdomains/subdomains.txt + + # Bruteforce recursive + if [[ $(cat subdomains/subdomains.txt | wc -l) -le 1000 ]]; then start_subfunc "Running : Subdomains recursive search" echo "" > .tmp/brute_recursive_wordlist.txt for sub in $(cat subdomains/subdomains.txt); do @@ -632,7 +644,8 @@ function screenshot(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBSCREENSHOT" = true ]; then start_func "Web Screenshots" cat webs/webs.txt webs/webs_uncommon_ports.txt 2>>"$LOGFILE" | anew -q .tmp/webs_screenshots.txt - gowitness file -f .tmp/webs_screenshots.txt --disable-logging 2>>"$LOGFILE" + webscreenshot --no-xserver -r chrome -i .tmp/webs_screenshots.txt -w $WEBSCREENSHOT_THREADS -o screenshots + #gowitness file -f .tmp/webs_screenshots.txt --disable-logging 2>>"$LOGFILE" end_func "Results are saved in $domain/screenshots folder" ${FUNCNAME[0]} else if [ "$WEBSCREENSHOT" = false ]; then diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index bee3f1fa..62537d79 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -489,7 +489,21 @@ function sub_permut(){ function sub_recursive(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBRECURSIVE" = true ]; then - if [[ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 1000 ]]; then + + # Passive recursive + for sub in $(cat subdomains/subdomains.txt | rev | cut -d '.' -f 3 | rev | sort | uniq -c | sort -nr | grep -v '1 ' | sed -e 's/^[[:space:]]*//' | cut -d ' ' -f 2); do + echo $sub.$domain | anew -q .tmp/sub_pass_recur_target.com + done + axiom-scan .tmp/sub_pass_recur_target.com -m subfinder -all -o .tmp/subfinder_prec.txt &>>"$LOGFILE" + axiom-scan .tmp/sub_pass_recur_target.com -m assetfinder -o .tmp/assetfinder_prec.txt &>>"$LOGFILE" + axiom-scan .tmp/sub_pass_recur_target.com -m amass -passive -o .tmp/amass_prec.txt &>>"$LOGFILE" + axiom-scan .tmp/sub_pass_recur_target.com -m findomain -o .tmp/findomain_prec.txt &>>"$LOGFILE" + cat .tmp/*_prec.txt | anew -q .tmp/passive_recursive.txt + axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/passive_recurs_tmp.txt &>>"$LOGFILE" + [ -f ".tmp/passive_recurs_tmp.txt" ] && cat .tmp/passive_recurs_tmp.txt | anew -q subdomains/subdomains.txt + + #Bruteforce recursive + if [[ $(cat subdomains/subdomains.txt | wc -l) -le 1000 ]]; then start_subfunc "Running : Subdomains recursive search" echo "" > .tmp/brute_recursive_wordlist.txt for sub in $(cat subdomains/subdomains.txt); do @@ -636,7 +650,8 @@ function screenshot(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBSCREENSHOT" = true ]; then start_func "Web Screenshots" cat webs/webs.txt webs/webs_uncommon_ports.txt 2>>"$LOGFILE" | anew -q .tmp/webs_screenshots.txt - axiom-scan .tmp/webs_screenshots.txt -m "$AXIOM_SCREENSHOT_MODULE" -o screenshots &>>"$LOGFILE" + axiom-scan .tmp/webs_screenshots.txt -m webscreenshot -w $WEBSCREENSHOT_THREADS -o screenshots &>>"$LOGFILE" +# axiom-scan .tmp/webs_screenshots.txt -m "$AXIOM_SCREENSHOT_MODULE" -o screenshots &>>"$LOGFILE" end_func "Results are saved in $domain/screenshots folder" ${FUNCNAME[0]} else if [ "$WEBSCREENSHOT" = false ]; then @@ -780,7 +795,7 @@ function fuzz(){ start_func "Web directory fuzzing" if [ -s "./webs/webs.txt" ]; then mkdir -p $dir/fuzzing - axiom-scan webs/webs.txt -m ffuf -H \'\"${HEADER}\"\' -wL $fuzz_wordlist -mc all -fc 404 -sf -s -maxtime $FFUF_MAXTIME -o $dir/fuzzing/ffuf-content.csv &>>"$LOGFILE" + axiom-scan webs/webs.txt -m ffuf -w /home/op/lists/onelistforallmicro.txt -H \"${HEADER}\" -mc all -fc 404 -sf -s -maxtime $FFUF_MAXTIME -o $dir/fuzzing/ffuf-content.csv &>>"$LOGFILE" grep -v "FUZZ,url,redirectlocation" $dir/fuzzing/ffuf-content.csv | awk -F "," '{print $2" "$5" "$6}' | sort > $dir/fuzzing/ffuf-content.tmp for sub in $(cat webs/webs.txt); do sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') @@ -1441,6 +1456,7 @@ function resolvers_update(){ axiom-exec 'if [ \$(find "/home/op/lists/resolvers.txt" -mtime +1 -print) ] || [ \$(cat /home/op/lists/resolvers.txt | wc -l) -le 40 ] ; then dnsvalidator -tL https://public-dns.info/nameservers.txt -threads 200 -o /home/op/lists/resolvers.txt ; fi' &>/dev/null notification "Updated\n" good axiom-exec 'wget -O /home/op/lists/resolvers_trusted.txt https://gist.githubusercontent.com/six2dez/ae9ed7e5c786461868abd3f2344401b6/raw' &>/dev/null + axiom-exec 'wget -O /home/op/lists/onelistforallmicro.txt https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt' &>/dev/null update_resolvers=false fi } diff --git a/requirements.txt b/requirements.txt index 0c115ccf..e71a9331 100644 --- a/requirements.txt +++ b/requirements.txt @@ -45,4 +45,5 @@ boto3 s3scanner shodan dnspython -pytest-runner \ No newline at end of file +pytest-runner +webscreenshot \ No newline at end of file From 6da095ebb76fcc8578ae0980cf29a74ee37298ca Mon Sep 17 00:00:00 2001 From: six2dez Date: Mon, 17 May 2021 00:33:24 +0200 Subject: [PATCH 16/32] Small improvements --- reconftw.sh | 4 ++-- reconftw_axiom.sh | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 4c7d846d..dad206f9 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -487,8 +487,8 @@ function sub_recursive(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBRECURSIVE" = true ]; then # Passive recursive - for sub in $(cat subdomains/subdomains.txt | rev | cut -d '.' -f 3 | rev | sort | uniq -c | sort -nr | grep -v '1 ' | sed -e 's/^[[:space:]]*//' | cut -d ' ' -f 2); do - subfinder -d $sub.$domain -all -silent &>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt + for sub in $(cat subdomains/subdomains.txt | rev | cut -d '.' -f 3,2,1 | rev | sort | uniq -c | sort -nr | grep -v '1 ' | sed -e 's/^[[:space:]]*//' | cut -d ' ' -f 2); do + subfinder -d $sub -all -silent &>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt assetfinder --subs-only $sub.$domain 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt amass enum -passive -d $sub.$domain -config $AMASS_CONFIG &>>"$LOGFILE" findomain --quiet -t $sub.$domain &>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 62537d79..1cb6dfb9 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -491,8 +491,8 @@ function sub_recursive(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBRECURSIVE" = true ]; then # Passive recursive - for sub in $(cat subdomains/subdomains.txt | rev | cut -d '.' -f 3 | rev | sort | uniq -c | sort -nr | grep -v '1 ' | sed -e 's/^[[:space:]]*//' | cut -d ' ' -f 2); do - echo $sub.$domain | anew -q .tmp/sub_pass_recur_target.com + for sub in $(cat subdomains/subdomains.txt | rev | cut -d '.' -f 3,2,1 | rev | sort | uniq -c | sort -nr | grep -v '1 ' | sed -e 's/^[[:space:]]*//' | cut -d ' ' -f 2); do + echo $sub | anew -q .tmp/sub_pass_recur_target.com done axiom-scan .tmp/sub_pass_recur_target.com -m subfinder -all -o .tmp/subfinder_prec.txt &>>"$LOGFILE" axiom-scan .tmp/sub_pass_recur_target.com -m assetfinder -o .tmp/assetfinder_prec.txt &>>"$LOGFILE" From b58376f18706dd6d716587ced1d0651c511166e2 Mon Sep 17 00:00:00 2001 From: six2dez Date: Mon, 17 May 2021 13:57:04 +0200 Subject: [PATCH 17/32] Added ssti wordlist, improved installer double check, fix passive recursive, lfi and ssti improvements --- install.sh | 56 ++++++++++++++++++++++++++--------------------- reconftw.cfg | 1 + reconftw.sh | 18 +++++++-------- reconftw_axiom.sh | 24 ++++++++++---------- 4 files changed, 52 insertions(+), 47 deletions(-) diff --git a/install.sh b/install.sh index 268b5728..dfe1a82f 100755 --- a/install.sh +++ b/install.sh @@ -60,6 +60,7 @@ repos["commix"]="commixproject/commix" repos["JSA"]="w9w/JSA" dir=${tools} +double_check=false if grep -q "ARMv" /proc/cpuinfo then @@ -189,6 +190,7 @@ for gotool in "${!gotools[@]}"; do printf "${yellow} $gotool installed (${go_step}/${#gotools[@]})${reset}\n" else printf "${red} Unable to install $gotool, try manually (${go_step}/${#gotools[@]})${reset}\n" + double_check=true fi done @@ -215,6 +217,7 @@ for repo in "${!repos[@]}"; do printf "${yellow} $repo installed (${repos_step}/${#repos[@]})${reset}\n" else printf "${red} Unable to install $repo, try manually (${repos_step}/${#repos[@]})${reset}\n" + double_check=true fi if [ -s "setup.py" ]; then eval $SUDO python3 setup.py install $DEBUG_STD @@ -263,33 +266,36 @@ eval wget -O permutations_list.txt https://gist.github.com/six2dez/ffc2b14d283e8 eval wget -nc -O ssrf.py https://gist.github.com/h4ms1k/adcc340495d418fcd72ec727a116fea2/raw $DEBUG_STD eval wget -nc -O fuzz_wordlist.txt https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt $DEBUG_STD eval wget -O lfi_wordlist.txt https://gist.githubusercontent.com/six2dez/a89a0c7861d49bb61a09822d272d5395/raw $DEBUG_STD +eval wget -O ssti_wordlist.txt https://gist.githubusercontent.com/six2dez/ab5277b11da7369bf4e9db72b49ad3c1/raw $DEBUG_STD ## Last check -printf "${bblue} Running: Double check for installed tools ${reset}\n\n" -go_step=0 -for gotool in "${!gotools[@]}"; do - go_step=$((go_step + 1)) - eval type -P $gotool $DEBUG_STD || { eval ${gotools[$gotool]} $DEBUG_STD; } - exit_status=$? -done -repos_step=0 -for repo in "${!repos[@]}"; do - repos_step=$((repos_step + 1)) - eval cd $dir/$repo $DEBUG_STD || { eval git clone https://github.com/${repos[$repo]} $dir/$repo $DEBUG_STD && cd $dir/$repo; } - eval git pull $DEBUG_STD - exit_status=$? - if [ -s "setup.py" ]; then - eval $SUDO python3 setup.py install $DEBUG_STD - fi - if [ "massdns" = "$repo" ]; then - eval make $DEBUG_STD && strip -s bin/massdns && eval $SUDO cp bin/massdns /usr/bin/ $DEBUG_ERROR - elif [ "gf" = "$repo" ]; then - eval cp -r examples ~/.gf $DEBUG_ERROR - elif [ "Gf-Patterns" = "$repo" ]; then - eval mv *.json ~/.gf $DEBUG_ERROR - fi - cd $dir -done +if [ "$double_check" = "true" ]; then + printf "${bblue} Running: Double check for installed tools ${reset}\n\n" + go_step=0 + for gotool in "${!gotools[@]}"; do + go_step=$((go_step + 1)) + eval type -P $gotool $DEBUG_STD || { eval ${gotools[$gotool]} $DEBUG_STD; } + exit_status=$? + done + repos_step=0 + for repo in "${!repos[@]}"; do + repos_step=$((repos_step + 1)) + eval cd $dir/$repo $DEBUG_STD || { eval git clone https://github.com/${repos[$repo]} $dir/$repo $DEBUG_STD && cd $dir/$repo; } + eval git pull $DEBUG_STD + exit_status=$? + if [ -s "setup.py" ]; then + eval $SUDO python3 setup.py install $DEBUG_STD + fi + if [ "massdns" = "$repo" ]; then + eval make $DEBUG_STD && strip -s bin/massdns && eval $SUDO cp bin/massdns /usr/bin/ $DEBUG_ERROR + elif [ "gf" = "$repo" ]; then + eval cp -r examples ~/.gf $DEBUG_ERROR + elif [ "Gf-Patterns" = "$repo" ]; then + eval mv *.json ~/.gf $DEBUG_ERROR + fi + cd $dir + done +fi printf "${bblue} Running: Performing last configurations ${reset}\n\n" ## Last steps diff --git a/reconftw.cfg b/reconftw.cfg index f0bf0b7f..021ad6b1 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -133,6 +133,7 @@ HTTPX_UNCOMMONPORTS_TIMEOUT=10 # Seconds # lists fuzz_wordlist=${tools}/fuzz_wordlist.txt lfi_wordlist=${tools}/lfi_wordlist.txt +ssti_wordlist=${tools}/ssti_wordlist.txt subs_wordlist=${tools}/subdomains.txt subs_wordlist_big=${tools}/subdomains_big.txt resolvers=${tools}/resolvers.txt diff --git a/reconftw.sh b/reconftw.sh index dad206f9..74d3ab9f 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -484,8 +484,9 @@ function sub_permut(){ } function sub_recursive(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBRECURSIVE" = true ]; then + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && { [ "$SUBRECURSIVE" = true ] || [ "$DEEP" = true ]; } ; then + start_subfunc "Running : Subdomains recursive search" # Passive recursive for sub in $(cat subdomains/subdomains.txt | rev | cut -d '.' -f 3,2,1 | rev | sort | uniq -c | sort -nr | grep -v '1 ' | sed -e 's/^[[:space:]]*//' | cut -d ' ' -f 2); do subfinder -d $sub -all -silent &>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt @@ -498,7 +499,6 @@ function sub_recursive(){ # Bruteforce recursive if [[ $(cat subdomains/subdomains.txt | wc -l) -le 1000 ]]; then - start_subfunc "Running : Subdomains recursive search" echo "" > .tmp/brute_recursive_wordlist.txt for sub in $(cat subdomains/subdomains.txt); do sed "s/$/.$sub/" $subs_wordlist >> .tmp/brute_recursive_wordlist.txt @@ -514,7 +514,7 @@ function sub_recursive(){ NUMOFLINES=$(cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]} else - notification "Skipping Recursive: Too Many Subdomains" warn + notification "Skipping Recursive BF: Too Many Subdomains" warn fi else if [ "$SUBRECURSIVE" = false ]; then @@ -1190,7 +1190,7 @@ function lfi(){ start_func "LFI checks" cat gf/lfi.txt | qsreplace FUZZ | anew -q .tmp/tmp_lfi.txt for url in $(cat .tmp/tmp_lfi.txt); do - ffuf -v -mc 200 -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt + ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt done end_func "Results are saved in vulns/lfi.txt" ${FUNCNAME[0]} else @@ -1207,10 +1207,10 @@ function lfi(){ function ssti(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SSTI" = true ] && [ -s "gf/ssti.txt" ]; then start_func "SSTI checks" - cat gf/ssti.txt | qsreplace "ssti{{7*7}}" | anew -q .tmp/ssti_fuzz.txt - ffuf -v -mc 200 -t $FFUF_THREADS -H "${HEADER}" -w .tmp/ssti_fuzz.txt -u FUZZ -mr "ssti49" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt - cat gf/ssti.txt | qsreplace "{{''.class.mro[2].subclasses()[40]('/etc/passwd').read()}}" | anew -q .tmp/ssti_fuzz2.txt - ffuf -v -mc 200 -t $FFUF_THREADS -H "${HEADER}" -w .tmp/ssti_fuzz.txt -u FUZZ -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt + cat gf/ssti.txt | qsreplace FUZZ | anew -q .tmp/tmp_ssti.txt + for url in $(cat .tmp/tmp_ssti.txt); do + ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $ssti_wordlist -u $url -mr "ssti49" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt + done end_func "Results are saved in vulns/ssti.txt" ${FUNCNAME[0]} else if [ "$SSTI" = false ]; then @@ -1925,7 +1925,7 @@ function help(){ ########################################### START SCRIPT ##################################################### ############################################################################################################### -PROGARGS=$(getopt -o 'd:m:l:x:i:o:f:rspawvh::' --long 'domain:,list:,recon,subdomains,passive,all,web,osint,deep,help' -n 'reconFTW' -- "$@") +PROGARGS=$(getopt -o 'd:m:l:x:i:o:f:rspanwvh::' --long 'domain:,list:,recon,subdomains,passive,all,web,osint,deep,help' -n 'reconFTW' -- "$@") # Note the quotes around "$PROGARGS": they are essential! diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 1cb6dfb9..e1b6ccbb 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -271,9 +271,7 @@ function subdomains_full(){ sub_active sub_brute sub_permut - if [ "$DEEP" = true ]; then - sub_recursive - fi + sub_recursive sub_dns sub_scraping webprobe_simple @@ -488,8 +486,9 @@ function sub_permut(){ } function sub_recursive(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBRECURSIVE" = true ]; then + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && { [ "$SUBRECURSIVE" = true ] || [ "$DEEP" = true ]; } ; then + start_subfunc "Running : Subdomains recursive search" # Passive recursive for sub in $(cat subdomains/subdomains.txt | rev | cut -d '.' -f 3,2,1 | rev | sort | uniq -c | sort -nr | grep -v '1 ' | sed -e 's/^[[:space:]]*//' | cut -d ' ' -f 2); do echo $sub | anew -q .tmp/sub_pass_recur_target.com @@ -498,13 +497,12 @@ function sub_recursive(){ axiom-scan .tmp/sub_pass_recur_target.com -m assetfinder -o .tmp/assetfinder_prec.txt &>>"$LOGFILE" axiom-scan .tmp/sub_pass_recur_target.com -m amass -passive -o .tmp/amass_prec.txt &>>"$LOGFILE" axiom-scan .tmp/sub_pass_recur_target.com -m findomain -o .tmp/findomain_prec.txt &>>"$LOGFILE" - cat .tmp/*_prec.txt | anew -q .tmp/passive_recursive.txt + eval cat .tmp/*_prec.txt 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/passive_recurs_tmp.txt &>>"$LOGFILE" [ -f ".tmp/passive_recurs_tmp.txt" ] && cat .tmp/passive_recurs_tmp.txt | anew -q subdomains/subdomains.txt #Bruteforce recursive if [[ $(cat subdomains/subdomains.txt | wc -l) -le 1000 ]]; then - start_subfunc "Running : Subdomains recursive search" echo "" > .tmp/brute_recursive_wordlist.txt for sub in $(cat subdomains/subdomains.txt); do sed "s/$/.$sub/" $subs_wordlist >> .tmp/brute_recursive_wordlist.txt @@ -520,7 +518,7 @@ function sub_recursive(){ NUMOFLINES=$(cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]} else - notification "Skipping Recursive: Too Many Subdomains" warn + notification "Skipping Recursive BF: Too Many Subdomains" warn fi else if [ "$SUBRECURSIVE" = false ]; then @@ -1197,7 +1195,7 @@ function lfi(){ start_func "LFI checks" cat gf/lfi.txt | qsreplace FUZZ | anew -q .tmp/tmp_lfi.txt for url in $(cat .tmp/tmp_lfi.txt); do - ffuf -v -mc 200 -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt + ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt done end_func "Results are saved in vulns/lfi.txt" ${FUNCNAME[0]} else @@ -1214,10 +1212,10 @@ function lfi(){ function ssti(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SSTI" = true ] && [ -s "gf/ssti.txt" ]; then start_func "SSTI checks" - cat gf/ssti.txt | qsreplace "ssti{{7*7}}" | anew -q .tmp/ssti_fuzz.txt - ffuf -v -mc 200 -t $FFUF_THREADS -H "${HEADER}" -w .tmp/ssti_fuzz.txt -u FUZZ -mr "ssti49" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt - cat gf/ssti.txt | qsreplace "{{''.class.mro[2].subclasses()[40]('/etc/passwd').read()}}" | anew -q .tmp/ssti_fuzz2.txt - ffuf -v -mc 200 -t $FFUF_THREADS -H "${HEADER}" -w .tmp/ssti_fuzz.txt -u FUZZ -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt + cat gf/ssti.txt | qsreplace FUZZ | anew -q .tmp/tmp_ssti.txt + for url in $(cat .tmp/tmp_ssti.txt); do + ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $ssti_wordlist -u $url -mr "ssti49" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt + done end_func "Results are saved in vulns/ssti.txt" ${FUNCNAME[0]} else if [ "$SSTI" = false ]; then @@ -2027,7 +2025,7 @@ function help(){ ########################################### START SCRIPT ##################################################### ############################################################################################################### -PROGARGS=$(getopt -o 'd:m:l:x:i:o:f:rspawvh::' --long 'domain:,list:,recon,subdomains,passive,all,web,osint,deep,help' -n 'reconFTW' -- "$@") +PROGARGS=$(getopt -o 'd:m:l:x:i:o:f:rspanwvh::' --long 'domain:,list:,recon,subdomains,passive,all,web,osint,deep,help' -n 'reconFTW' -- "$@") # Note the quotes around "$PROGARGS": they are essential! From 923ce48ffb35450d24cc40e77f13822ee8d34422 Mon Sep 17 00:00:00 2001 From: six2dez Date: Mon, 17 May 2021 16:54:42 +0200 Subject: [PATCH 18/32] Small fix --- reconftw.sh | 2 +- reconftw_axiom.sh | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 74d3ab9f..2cedc7fb 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -529,7 +529,7 @@ function subtakeover(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBTAKEOVER" = true ]; then start_func "Looking for possible subdomain takeover" touch .tmp/tko.txt - cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/takeovers/ -r $resolvers_trusted -o .tmp/tko.txt + [ -f "webs/webs.txt" ] && cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/takeovers/ -r $resolvers_trusted -o .tmp/tko.txt NUMOFLINES=$(cat .tmp/tko.txt 2>>"$LOGFILE" | anew webs/takeover.txt | wc -l) if [ "$NUMOFLINES" -gt 0 ]; then notification "${NUMOFLINES} new possible takeovers found" info diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index e1b6ccbb..01eb248d 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -328,6 +328,7 @@ function sub_crt(){ start_subfunc "Running : Crtsh Subdomain Enumeration" echo "python3 -u /home/op/recon/ctfr/ctfr.py -d ${domain} -o ${domain}_ctfr.txt; cat ${domain}_ctfr.txt" > .tmp/sub_ctrf_commands.txt axiom-scan .tmp/sub_ctrf_commands.txt -m exec -o .tmp/crtsh_subs_tmp.txt &>>"$LOGFILE" + sed -i '1,11d' .tmp/crtsh_subs_tmp.txt curl "https://tls.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r .Results[] 2>>"$LOGFILE" | cut -d ',' -f3 | grep -F ".$domain" | anew -q .tmp/crtsh_subs_tmp.txt curl "https://dns.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r '.FDNS_A'[],'.RDNS'[] 2>>"$LOGFILE" | cut -d ',' -f2 | grep -F ".$domain" | anew -q .tmp/crtsh_subs_tmp.txt NUMOFLINES=$(cat .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" | anew .tmp/crtsh_subs.txt | wc -l) @@ -533,7 +534,7 @@ function subtakeover(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBTAKEOVER" = true ]; then start_func "Looking for possible subdomain takeover" touch .tmp/tko.txt - axiom-scan webs/webs.txt -m nuclei -w /home/op/recon/nuclei/takeovers/ -o .tmp/tko.txt &>>"$LOGFILE" + [ -f "webs/webs.txt" ] && axiom-scan webs/webs.txt -m nuclei -w /home/op/recon/nuclei/takeovers/ -o .tmp/tko.txt &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/tko.txt 2>>"$LOGFILE" | anew webs/takeover.txt | wc -l) if [ "$NUMOFLINES" -gt 0 ]; then notification "${NUMOFLINES} new possible takeovers found" info From 5a307e14278e31d8e1bac58d890c8e8f38fad025 Mon Sep 17 00:00:00 2001 From: Jinay Patel <50541295+0-0eth0@users.noreply.github.com> Date: Tue, 18 May 2021 01:47:08 +0530 Subject: [PATCH 19/32] Update reconftw.cfg Added "REMOVELOG" if set it to true .log file will get deleted after recon completes. --- reconftw.cfg | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/reconftw.cfg b/reconftw.cfg index 021ad6b1..8a60c967 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -101,6 +101,7 @@ NOTIFICATION=false DEEP=false DIFF=false REMOVETMP=false +REMOVELOG=false PROXY=false SENDZIPNOTIFY=false PRESERVE=false # set to true to avoid deleting the .called_fn files on really large scans @@ -157,4 +158,4 @@ yellow='\033[0;33m' red='\033[0;31m' blue='\033[0;34m' green='\033[0;32m' -reset='\033[0m' \ No newline at end of file +reset='\033[0m' From a4b090ea7ebb584b436af84c24b9cd9307040cba Mon Sep 17 00:00:00 2001 From: Jinay Patel <50541295+0-0eth0@users.noreply.github.com> Date: Tue, 18 May 2021 01:50:22 +0530 Subject: [PATCH 20/32] Update reconftw_axiom.sh Added "REMOVELOG" function if you set it to true in reconftw.cfg file it will remove the .log file at the end of the recon process. --- reconftw_axiom.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 01eb248d..195431bc 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -1614,6 +1614,10 @@ function end(){ if [ "$REMOVETMP" = true ]; then rm -rf $dir/.tmp fi + + if [ "$REMOVELOG" = true ]; then + rm -rf $dir/.log + fi if [ -n "$dir_output" ]; then output From cfb65bcd8601e49761667beb6ba82ce4da75cf58 Mon Sep 17 00:00:00 2001 From: Jinay Patel <50541295+0-0eth0@users.noreply.github.com> Date: Tue, 18 May 2021 01:51:14 +0530 Subject: [PATCH 21/32] Update reconftw.sh Added REMOVELOG function --- reconftw.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/reconftw.sh b/reconftw.sh index 2cedc7fb..eeb84e3f 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1540,6 +1540,10 @@ function end(){ rm -rf $dir/.tmp fi + if [ "$REMOVELOG" = true ]; then + rm -rf $dir/.log + fi + if [ -n "$dir_output" ]; then output finaldir=$dir_output From 48b1d47db4330e2250eb8ef12b029d314fa1e4dd Mon Sep 17 00:00:00 2001 From: Jinay Patel <50541295+0-0eth0@users.noreply.github.com> Date: Tue, 18 May 2021 01:52:30 +0530 Subject: [PATCH 22/32] Update README.md EDITED CONFIG FILE --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 8aa39b22..64226b60 100644 --- a/README.md +++ b/README.md @@ -231,6 +231,7 @@ NOTIFICATION=false DEEP=false DIFF=false REMOVETMP=false +REMOVELOG=false PROXY=false SENDZIPNOTIFY=false From 4e0128810ca1b288925a3fdb2d8bddfad341a321 Mon Sep 17 00:00:00 2001 From: six2dez Date: Tue, 18 May 2021 09:58:51 +0200 Subject: [PATCH 23/32] Added error controls --- reconftw.sh | 270 ++++++++++++++++++++------------------- reconftw_axiom.sh | 319 +++++++++++++++++++++++++--------------------- 2 files changed, 315 insertions(+), 274 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index eeb84e3f..79d7abc3 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -178,14 +178,13 @@ function emails(){ cd "$tools/theHarvester" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } python3 theHarvester.py -d $domain -b all 2>>"$LOGFILE" > $dir/.tmp/harvester.txt cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - cat .tmp/harvester.txt | awk '/Emails/,/Hosts/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/emails.txt - cat .tmp/harvester.txt | awk '/Users/,/IPs/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/users.txt - cat .tmp/harvester.txt | awk '/Links/,/Users/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/linkedin.txt - - h8mail -t $domain -q domain --loose -c $tools/h8mail_config.ini -j .tmp/h8_results.json &>>"$LOGFILE" - if [ -s ".tmp/h8_results.json" ]; then - cat .tmp/h8_results.json | jq -r '.targets[0] | .data[] | .[]' | cut -d '-' -f2 | anew -q osint/h8mail.txt + if [ -s ".tmp/harvester.txt" ]; then + cat .tmp/harvester.txt | awk '/Emails/,/Hosts/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/emails.txt + cat .tmp/harvester.txt | awk '/Users/,/IPs/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/users.txt + cat .tmp/harvester.txt | awk '/Links/,/Users/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/linkedin.txt fi + h8mail -t $domain -q domain --loose -c $tools/h8mail_config.ini -j .tmp/h8_results.json &>>"$LOGFILE" + [ -s ".tmp/h8_results.json" ] && cat .tmp/h8_results.json | jq -r '.targets[0] | .data[] | .[]' | cut -d '-' -f2 | anew -q osint/h8mail.txt PWNDB_STATUS=$(timeout 15s curl -Is --socks5-hostname localhost:9050 http://pwndb2am4tzkvold.onion | grep HTTP | cut -d ' ' -f2) @@ -213,11 +212,11 @@ function domain_info(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$DOMAIN_INFO" = true ] && [ "$OSINT" = true ]; then start_func "Searching domain info (whois, registrant name/email domains)" lynx -dump "https://domainbigdata.com/${domain}" | tail -n +19 > osint/domain_info_general.txt - - cat osint/domain_info_general.txt | grep '/nj/' | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_name.txt - cat osint/domain_info_general.txt | grep '/mj/' | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_email.txt - cat osint/domain_info_general.txt | grep -E "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | grep "https://domainbigdata.com" | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_ip.txt - + if [ -s "osint/domain_info_general.txt" ]; then + cat osint/domain_info_general.txt | grep '/nj/' | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_name.txt + cat osint/domain_info_general.txt | grep '/mj/' | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_email.txt + cat osint/domain_info_general.txt | grep -E "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | grep "https://domainbigdata.com" | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_ip.txt + fi sed -i -n '/Copyright/q;p' osint/domain_info_general.txt if [ -s ".tmp/domain_registrant_name.txt" ]; then @@ -271,9 +270,7 @@ function subdomains_full(){ sub_active sub_brute sub_permut - if [ "$DEEP" = true ]; then - sub_recursive - fi + sub_recursive sub_dns sub_scraping webprobe_simple @@ -316,7 +313,7 @@ function sub_passive(){ if echo $domain | grep -q ".mil$"; then mildew mv mildew.out .tmp/mildew.out - cat .tmp/mildew.out | grep ".$domain$" | anew -q .tmp/mil_psub.txt + [ -s ".tmp/mildew.out" ] && cat .tmp/mildew.out | grep ".$domain$" | anew -q .tmp/mil_psub.txt fi NUMOFLINES=$(cat .tmp/*_psub.txt 2>>"$LOGFILE" | sed "s/*.//" | anew .tmp/passive_subs.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (passive)" ${FUNCNAME[0]} @@ -345,9 +342,7 @@ function sub_crt(){ function sub_active(){ if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then start_subfunc "Running : Active Subdomain Enumeration" - if [ -s "${inScope_file}" ]; then - cat ${inScope_file} .tmp/inscope_subs.txt - fi + [ -s "${inScope_file}" ] && cat ${inScope_file} .tmp/inscope_subs.txt cat .tmp/*_subs.txt | anew -q .tmp/subs_no_resolved.txt deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" @@ -363,7 +358,7 @@ function sub_dns(){ if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then start_subfunc "Running : DNS Subdomain Enumeration" dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -resp -silent -l subdomains/subdomains.txt -o subdomains/subdomains_cname.txt -r $resolvers_trusted &>>"$LOGFILE" - cat subdomains/subdomains_cname.txt | cut -d '[' -f2 | sed 's/.$//' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt + [ -s "subdomains/subdomains_cname.txt" ] && cat subdomains/subdomains_cname.txt | cut -d '[' -f2 | sed 's/.$//' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]} @@ -400,18 +395,18 @@ function sub_scraping(){ touch .tmp/scrap_subs.txt if [ -s "$dir/subdomains/subdomains.txt" ]; then cat subdomains/subdomains.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt - cat .tmp/probed_tmp_scrap.txt | httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt - cat .tmp/probed_tmp_scrap.txt | httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt + [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt + [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt if [ "$DEEP" = true ]; then [ -f ".tmp/probed_tmp_scrap.txt" ] && gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt else [ -f ".tmp/probed_tmp_scrap.txt" ] && gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt fi sed -i '/^.\{2048\}./d' .tmp/gospider.txt - cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | unfurl --unique domains | grep ".$domain$" | anew -q .tmp/scrap_subs.txt + [ -s ".tmp/gospider.txt" ] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | unfurl --unique domains | grep ".$domain$" | anew -q .tmp/scrap_subs.txt puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | wc -l) - cat .tmp/diff_scrap.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt + [ -s ".tmp/diff_scrap.txt" ] && cat .tmp/diff_scrap.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt end_subfunc "${NUMOFLINES} new subs (code scraping)" ${FUNCNAME[0]} else end_subfunc "No subdomains to search (code scraping)" ${FUNCNAME[0]} @@ -484,7 +479,7 @@ function sub_permut(){ } function sub_recursive(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && { [ "$SUBRECURSIVE" = true ] || [ "$DEEP" = true ]; } ; then + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && { [ "$SUBRECURSIVE" = true ] || [ "$DEEP" = true ]; } ; then start_subfunc "Running : Subdomains recursive search" # Passive recursive @@ -529,7 +524,7 @@ function subtakeover(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBTAKEOVER" = true ]; then start_func "Looking for possible subdomain takeover" touch .tmp/tko.txt - [ -f "webs/webs.txt" ] && cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/takeovers/ -r $resolvers_trusted -o .tmp/tko.txt + [ -s "webs/webs.txt" ] && cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/takeovers/ -r $resolvers_trusted -o .tmp/tko.txt NUMOFLINES=$(cat .tmp/tko.txt 2>>"$LOGFILE" | anew webs/takeover.txt | wc -l) if [ "$NUMOFLINES" -gt 0 ]; then notification "${NUMOFLINES} new possible takeovers found" info @@ -616,7 +611,7 @@ function webprobe_full(){ start_func "Http probing non standard ports" sudo unimap --fast-scan -f subdomains/subdomains.txt --ports $UNCOMMON_PORTS_WEB -q -k --url-output | anew -q .tmp/nmap_uncommonweb.txt - cat .tmp/nmap_uncommonweb.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain" | anew -q .tmp/probed_uncommon_ports_tmp.txt + [ -s ".tmp/nmap_uncommonweb.txt" ] && cat .tmp/nmap_uncommonweb.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain" | anew -q .tmp/probed_uncommon_ports_tmp.txt #timeout_secs=$(($(cat subdomains/subdomains.txt | wc -l)*5+10)) #cat subdomains/subdomains.txt | timeout $timeout_secs naabu -p $UNCOMMON_PORTS_WEB -o .tmp/nmap_uncommonweb.txt &>>"$LOGFILE" && uncommon_ports_checked=$(cat .tmp/nmap_uncommonweb.txt | cut -d ':' -f2 | sort -u | sed -e 'H;${x;s/\n/,/g;s/^,//;p;};d') @@ -625,7 +620,7 @@ function webprobe_full(){ #fi NUMOFLINES=$(cat .tmp/probed_uncommon_ports_tmp.txt 2>>"$LOGFILE" | anew webs/webs_uncommon_ports.txt | wc -l) notification "Uncommon web ports: ${NUMOFLINES} new websites" good - cat webs/webs_uncommon_ports.txt 2>>"$LOGFILE" + [ -s "webs/webs_uncommon_ports.txt" ] && cat webs/webs_uncommon_ports.txt end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" ${FUNCNAME[0]} if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs_uncommon_ports.txt| wc -l) -le 1500 ]]; then notification "Sending websites uncommon ports to proxy" info @@ -665,7 +660,7 @@ function favicon(){ start_func "Favicon Ip Lookup" cd "$tools/fav-up" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } python3 favUp.py -w "$domain" -sc -o favicontest.json &>>"$LOGFILE" - if [ -f "favicontest.json" ]; then + if [ -s "favicontest.json" ]; then cat favicontest.json | jq -r '.found_ips' 2>>"$LOGFILE" | grep -v "not-found" > favicontest.txt sed -i "s/|/\n/g" favicontest.txt cat favicontest.txt 2>>"$LOGFILE" @@ -690,10 +685,10 @@ function portscan(){ echo "$sub $(dig +short a $sub | tail -n1)" | anew -q .tmp/subs_ips.txt done awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt - cat hosts/subs_ips_vhosts.txt 2>>"$LOGFILE" | cut -d ' ' -f1 | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt - cat hosts/ips.txt 2>>"$LOGFILE" | cf-check | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q .tmp/ips_nowaf.txt + [ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt + [ -s "hosts/ips.txt" ] && cat hosts/ips.txt | cf-check | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q .tmp/ips_nowaf.txt printf "${bblue}\n Resolved IP addresses (No WAF) ${reset}\n\n"; - cat .tmp/ips_nowaf.txt 2>>"$LOGFILE" | sort + [ -s ".tmp/ips_nowaf.txt" ] && cat .tmp/ips_nowaf.txt | sort printf "${bblue}\n Scanning ports... ${reset}\n\n"; if [ "$PORTSCAN_PASSIVE" = true ] && [ ! -f "hosts/portscan_passive.txt" ]; then for sub in $(cat hosts/ips.txt); do @@ -717,7 +712,7 @@ function cloudprovider(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CLOUD_IP" = true ]; then start_func "Cloud provider check" cd "$tools/ip2provider" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - cat $dir/hosts/ips.txt | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | ./ip2provider.py | anew -q $dir/hosts/cloud_providers.txt &>>"$LOGFILE" + [ -s "$dir/hosts/ips.txt" ] && cat $dir/hosts/ips.txt | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | ./ip2provider.py | anew -q $dir/hosts/cloud_providers.txt &>>"$LOGFILE" cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } end_func "Results are saved in hosts/cloud_providers.txt" ${FUNCNAME[0]} else @@ -738,7 +733,7 @@ function waf_checks(){ start_func "Website's WAF detection" if [ -s "./webs/webs.txt" ]; then wafw00f -i webs/webs.txt -o .tmp/wafs.txt &>>"$LOGFILE" - if [ -f ".tmp/wafs.txt" ]; then + if [ -s ".tmp/wafs.txt" ]; then cat .tmp/wafs.txt | sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' | tr -s "\t" ";" > webs/webs_wafs.txt NUMOFLINES=$(cat webs/webs_wafs.txt 2>>"$LOGFILE" | wc -l) notification "${NUMOFLINES} websites protected by waf" info @@ -763,17 +758,19 @@ function nuclei_check(){ start_func "Templates based web scanner" nuclei -update-templates &>>"$LOGFILE" mkdir -p nuclei_output - printf "${yellow}\n Running : Nuclei Info${reset}\n\n" - cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/ -severity info -r $resolvers_trusted -o nuclei_output/info.txt - printf "${yellow}\n\n Running : Nuclei Low${reset}\n\n" - cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/ -severity low -r $resolvers_trusted -o nuclei_output/low.txt - printf "${yellow}\n\n Running : Nuclei Medium${reset}\n\n" - cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/ -severity medium -r $resolvers_trusted -o nuclei_output/medium.txt - printf "${yellow}\n\n Running : Nuclei High${reset}\n\n" - cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/ -severity high -r $resolvers_trusted -o nuclei_output/high.txt - printf "${yellow}\n\n Running : Nuclei Critical${reset}\n\n" - cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/ -severity critical -r $resolvers_trusted -o nuclei_output/critical.txt - printf "\n\n" + if [ -s "webs/webs.txt" ]; then + printf "${yellow}\n Running : Nuclei Info${reset}\n\n" + cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/ -severity info -r $resolvers_trusted -o nuclei_output/info.txt + printf "${yellow}\n\n Running : Nuclei Low${reset}\n\n" + cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/ -severity low -r $resolvers_trusted -o nuclei_output/low.txt + printf "${yellow}\n\n Running : Nuclei Medium${reset}\n\n" + cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/ -severity medium -r $resolvers_trusted -o nuclei_output/medium.txt + printf "${yellow}\n\n Running : Nuclei High${reset}\n\n" + cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/ -severity high -r $resolvers_trusted -o nuclei_output/high.txt + printf "${yellow}\n\n Running : Nuclei Critical${reset}\n\n" + cat webs/webs.txt | nuclei -silent -t ~/nuclei-templates/ -severity critical -r $resolvers_trusted -o nuclei_output/critical.txt + printf "\n\n" + fi end_func "Results are saved in $domain/nuclei_output folder" ${FUNCNAME[0]} else if [ "$NUCLEICHECK" = false ]; then @@ -793,7 +790,7 @@ function fuzz(){ printf "${yellow}\n\n Running: Fuzzing in ${sub}${reset}\n" sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') ffuf -mc all -fc 404 -ac -t $FFUF_THREADS -sf -s -H "${HEADER}" -w $fuzz_wordlist -maxtime $FFUF_MAXTIME -u $sub/FUZZ -or -of csv -o $dir/fuzzing/${sub_out}.csv &>/dev/null - [ -f "$dir/fuzzing/${sub_out}.csv" ] && cat $dir/fuzzing/${sub_out}.csv | cut -d ',' -f2,5,6 | tr ',' ' ' | awk '{ print $2 " " $3 " " $1}' | tail -n +2 | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt + [ -s "$dir/fuzzing/${sub_out}.csv" ] && cat $dir/fuzzing/${sub_out}.csv | cut -d ',' -f2,5,6 | tr ',' ' ' | awk '{ print $2 " " $3 " " $1}' | tail -n +2 | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt rm $dir/fuzzing/${sub_out}.csv 2>>"$LOGFILE" done end_func "Results are saved in $domain/fuzzing/*subdomain*.txt" ${FUNCNAME[0]} @@ -852,20 +849,22 @@ function params(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PARAMS" = true ]; then start_func "Parameter Discovery" printf "${yellow}\n\n Running : Searching params with paramspider${reset}\n" - cat webs/webs.txt | sed -r "s/https?:\/\///" | anew -q .tmp/probed_nohttp.txt - interlace -tL .tmp/probed_nohttp.txt -threads 10 -c "python3 $tools/ParamSpider/paramspider.py -d _target_ -l high -q --exclude eot,jpg,jpeg,gif,css,tif,tiff,png,ttf,otf,woff,woff2,ico,pdf,svg,txt,js" &>/dev/null - cat output/*.txt 2>>"$LOGFILE" | anew -q .tmp/param_tmp.txt - sed '/^FUZZ/d' -i .tmp/param_tmp.txt - rm -rf output/ 2>>"$LOGFILE" - if [ "$DEEP" = true ]; then - printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt &>>"$LOGFILE" - else - if [[ $(cat .tmp/param_tmp.txt | wc -l) -le 50 ]]; then + if [ -s "webs/webs.txt" ]; then + cat webs/webs.txt | sed -r "s/https?:\/\///" | anew -q .tmp/probed_nohttp.txt + interlace -tL .tmp/probed_nohttp.txt -threads 10 -c "python3 $tools/ParamSpider/paramspider.py -d _target_ -l high -q --exclude eot,jpg,jpeg,gif,css,tif,tiff,png,ttf,otf,woff,woff2,ico,pdf,svg,txt,js" &>/dev/null + cat output/*.txt 2>>"$LOGFILE" | anew -q .tmp/param_tmp.txt + sed '/^FUZZ/d' -i .tmp/param_tmp.txt + rm -rf output/ 2>>"$LOGFILE" + if [ "$DEEP" = true ]; then printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt &>>"$LOGFILE" else - cp .tmp/param_tmp.txt webs/param.txt + if [[ $(cat .tmp/param_tmp.txt | wc -l) -le 50 ]]; then + printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" + arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt &>>"$LOGFILE" + else + cp .tmp/param_tmp.txt webs/param.txt + fi fi fi end_func "Results are saved in $domain/webs/param.txt" ${FUNCNAME[0]} @@ -882,34 +881,36 @@ function urlchecks(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$URL_CHECK" = true ]; then start_func "URL Extraction" mkdir -p js - cat webs/webs.txt | waybackurls | anew -q .tmp/url_extract_tmp.txt - cat webs/webs.txt | gauplus -t $GAUPLUS_THREADS -subs | anew -q .tmp/url_extract_tmp.txt - diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u webs/webs.txt) | wc -l) - if [ $diff_webs != "0" ] || [ ! -s ".tmp/gospider.txt" ]; then + if [ -s "webs/webs.txt" ]; then + cat webs/webs.txt | waybackurls | anew -q .tmp/url_extract_tmp.txt + cat webs/webs.txt | gauplus -t $GAUPLUS_THREADS -subs | anew -q .tmp/url_extract_tmp.txt + diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u webs/webs.txt) | wc -l) + if [ $diff_webs != "0" ] || [ ! -s ".tmp/gospider.txt" ]; then + if [ "$DEEP" = true ]; then + gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt + else + gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt + fi + fi + sed -i '/^.\{2048\}./d' .tmp/gospider.txt + [ -s ".tmp/gospider.txt" ] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain$" | anew -q .tmp/url_extract_tmp.txt + if [ -s "${GITHUB_TOKENS}" ]; then + github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt &>>"$LOGFILE" + [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt + fi + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -Ei "\.(js)" | anew -q js/url_extract_js.txt if [ "$DEEP" = true ]; then - [ -f "webs/webs.txt" ] && gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt - else - [ -f "webs/webs.txt" ] && gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt + [ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | python3 $tools/JSA/jsa.py | anew -q .tmp/url_extract_tmp.txt + fi + cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt + uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt &>>"$LOGFILE" + NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | wc -l) + notification "${NUMOFLINES} new urls with params" info + end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} + if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/url_extract.txt | wc -l) -le 1500 ]]; then + notification "Sending urls to proxy" info + ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url &>>"$LOGFILE" fi - fi - sed -i '/^.\{2048\}./d' .tmp/gospider.txt - [ -f ".tmp/gospider.txt" ] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain$" | anew -q .tmp/url_extract_tmp.txt - if [ -s "${GITHUB_TOKENS}" ]; then - github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt &>>"$LOGFILE" - cat .tmp/github-endpoints.txt 2>>"$LOGFILE" | anew -q .tmp/url_extract_tmp.txt - fi - cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -Ei "\.(js)" | anew -q js/url_extract_js.txt - if [ "$DEEP" = true ]; then - [ -f "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | python3 $tools/JSA/jsa.py | anew -q .tmp/url_extract_tmp.txt - fi - cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt - uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt &>>"$LOGFILE" - NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | wc -l) - notification "${NUMOFLINES} new urls with params" info - end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} - if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/url_extract.txt | wc -l) -le 1500 ]]; then - notification "Sending urls to proxy" info - ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url &>>"$LOGFILE" fi else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -924,10 +925,11 @@ function url_gf(){ gf ssti webs/url_extract.txt | anew -q gf/ssti.txt gf ssrf webs/url_extract.txt | anew -q gf/ssrf.txt gf sqli webs/url_extract.txt | anew -q gf/sqli.txt - gf redirect webs/url_extract.txt | anew -q gf/redirect.txt && cat gf/ssrf.txt | anew -q gf/redirect.txt + gf redirect webs/url_extract.txt | anew -q gf/redirect.txt + [ -f "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt gf rce webs/url_extract.txt | anew -q gf/rce.txt gf potential webs/url_extract.txt | cut -d ':' -f3-5 |anew -q gf/potential.txt - cat .tmp/url_extract_tmp.txt | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p | anew -q gf/endpoints.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p | anew -q gf/endpoints.txt gf lfi webs/url_extract.txt | anew -q gf/lfi.txt end_func "Results are saved in $domain/gf folder" ${FUNCNAME[0]} else @@ -947,7 +949,7 @@ function url_ext(){ NUMOFLINES=$(cat .tmp/url_extract_tmp.txt | grep -Ei "\.(${t})($|\/|\?)" | sort -u | wc -l) if [[ ${NUMOFLINES} -gt 0 ]]; then echo -e "\n############################\n + ${t} + \n############################\n" >> webs/urls_by_ext.txt - cat .tmp/url_extract_tmp.txt | grep -Ei "\.(${t})($|\/|\?)" | sort -u >> webs/urls_by_ext.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -Ei "\.(${t})($|\/|\?)" | sort -u >> webs/urls_by_ext.txt fi done end_func "Results are saved in $domain/webs/urls_by_ext.txt" ${FUNCNAME[0]} @@ -968,7 +970,7 @@ function jschecks(){ cat js/url_extract_js.txt | cut -d '?' -f 1 | grep -iE "\.js$" | grep "$domain$" | anew -q js/jsfile_links.txt cat js/url_extract_js.txt | subjs | grep "$domain$" | anew -q js/jsfile_links.txt printf "${yellow} Running : Resolving JS Urls 2/5${reset}\n" - cat js/jsfile_links.txt | httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -status-code -retries 2 -no-color | grep "[200]" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt + [ -s "js/jsfile_links.txt" ] && cat js/jsfile_links.txt | httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -status-code -retries 2 -no-color | grep "[200]" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt printf "${yellow} Running : Gathering endpoints 3/5${reset}\n" if [ -s "js/js_livelinks.txt" ]; then interlace -tL js/js_livelinks.txt -threads 10 -c "python3 $tools/LinkFinder/linkfinder.py -d -i _target_ -o cli >> .tmp/js_endpoints.txt" &>/dev/null @@ -978,13 +980,9 @@ function jschecks(){ cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt fi printf "${yellow} Running : Gathering secrets 4/5${reset}\n" - if [ -s "js/js_livelinks.txt" ]; then - cat js/js_livelinks.txt | nuclei -silent -t ~/nuclei-templates/exposures/tokens/ -r $resolvers_trusted -o js/js_secrets.txt &>>"$LOGFILE" - fi + [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | nuclei -silent -t ~/nuclei-templates/exposures/tokens/ -r $resolvers_trusted -o js/js_secrets.txt &>>"$LOGFILE" printf "${yellow} Running : Building wordlist 5/5${reset}\n" - if [ -s "js/js_livelinks.txt" ]; then - cat js/js_livelinks.txt | python3 $tools/getjswords.py 2>>"$LOGFILE" | anew -q webs/dict_words.txt - fi + [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | python3 $tools/getjswords.py 2>>"$LOGFILE" | anew -q webs/dict_words.txt end_func "Results are saved in $domain/js folder" ${FUNCNAME[0]} else end_func "No JS urls found for $domain, function skipped" ${FUNCNAME[0]} @@ -1001,13 +999,13 @@ function jschecks(){ function wordlist_gen(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WORDLIST" = true ]; then start_func "Wordlist generation" - cat .tmp/url_extract_tmp.txt | unfurl -u keys | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt - cat .tmp/url_extract_tmp.txt | unfurl -u values | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt - cat .tmp/url_extract_tmp.txt | tr "[:punct:]" "\n" | anew -q webs/dict_words.txt - if [ -s ".tmp/js_endpoints.txt" ]; then - cat .tmp/js_endpoints.txt | unfurl -u path | anew -q webs/dict_paths.txt + if [ -s ".tmp/url_extract_tmp.txt" ]; then + cat .tmp/url_extract_tmp.txt | unfurl -u keys | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt + cat .tmp/url_extract_tmp.txt | unfurl -u values | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt + cat .tmp/url_extract_tmp.txt | tr "[:punct:]" "\n" | anew -q webs/dict_words.txt fi - cat .tmp/url_extract_tmp.txt | unfurl -u path | anew -q webs/dict_paths.txt + [ -s ".tmp/js_endpoints.txt" ] && cat .tmp/js_endpoints.txt | unfurl -u path | anew -q webs/dict_paths.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | unfurl -u path | anew -q webs/dict_paths.txt end_func "Results are saved in $domain/webs/dict_[words|paths].txt" ${FUNCNAME[0]} else if [ "$WORDLIST" = false ]; then @@ -1027,13 +1025,13 @@ function brokenLinks(){ start_func "Broken links checks" if [ ! -s ".tmp/gospider.txt" ]; then if [ "$DEEP" = true ]; then - gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt + [ -s "webs/webs.txt" ] && gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt else - gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt + [ -s "webs/webs.txt" ] && gospider -S webs/webs.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt fi fi - sed -i '/^.\{2048\}./d' .tmp/gospider.txt - cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | sort -u | httpx -follow-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | grep "\[4" | cut -d ' ' -f1 | anew -q .tmp/brokenLinks_total.txt + [ -s ".tmp/gospider.txt" ] && sed -i '/^.\{2048\}./d' .tmp/gospider.txt + [ -s ".tmp/gospider.txt" ] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | sort -u | httpx -follow-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | grep "\[4" | cut -d ' ' -f1 | anew -q .tmp/brokenLinks_total.txt NUMOFLINES=$(cat .tmp/brokenLinks_total.txt 2>>"$LOGFILE" | anew webs/brokenLinks.txt | wc -l) notification "${NUMOFLINES} new broken links found" info end_func "Results are saved in webs/brokenLinks.txt" ${FUNCNAME[0]} @@ -1049,13 +1047,13 @@ function brokenLinks(){ function xss(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$XSS" = true ] && [ -s "gf/xss.txt" ]; then start_func "XSS Analysis" - cat gf/xss.txt | qsreplace FUZZ | Gxss -c 100 -p Xss | anew -q .tmp/xss_reflected.txt + [ -s "gf/xss.txt" ] && cat gf/xss.txt | qsreplace FUZZ | Gxss -c 100 -p Xss | anew -q .tmp/xss_reflected.txt if [ "$DEEP" = true ]; then if [ -n "$XSS_SERVER" ]; then - cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --mass --mass-worker 100 --multicast --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt + [ -s ".tmp/xss_reflected.txt" ] && cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --mass --mass-worker 100 --multicast --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" - cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --mass --mass-worker 100 --multicast --skip-bav -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt + [ -s ".tmp/xss_reflected.txt" ] && cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --mass --mass-worker 100 --multicast --skip-bav -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt fi else if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le 500 ]]; then @@ -1085,7 +1083,7 @@ function cors(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CORS" = true ]; then start_func "CORS Scan" python3 $tools/Corsy/corsy.py -i webs/webs.txt > webs/cors.txt &>>"$LOGFILE" - cat webs/cors.txt 2>>"$LOGFILE" + [ -s "webs/cors.txt" ] && cat webs/cors.txt end_func "Results are saved in webs/cors.txt" ${FUNCNAME[0]} else if [ "$CORS" = false ]; then @@ -1100,9 +1098,11 @@ function open_redirect(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$OPEN_REDIRECT" = true ] && [ -s "gf/redirect.txt" ]; then start_func "Open redirects checks" if [ "$DEEP" = true ]; then - cat gf/redirect.txt | qsreplace FUZZ | anew -q .tmp/tmp_redirect.txt - python3 $tools/OpenRedireX/openredirex.py -l .tmp/tmp_redirect.txt --keyword FUZZ -p $tools/OpenRedireX/payloads.txt 2>>"$LOGFILE" | grep "^http" > vulns/redirect.txt - sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" vulns/redirect.txt + if [ -s "webs/cors.txt" ]; then + cat gf/redirect.txt | qsreplace FUZZ | anew -q .tmp/tmp_redirect.txt + python3 $tools/OpenRedireX/openredirex.py -l .tmp/tmp_redirect.txt --keyword FUZZ -p $tools/OpenRedireX/payloads.txt 2>>"$LOGFILE" | grep "^http" > vulns/redirect.txt + sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" vulns/redirect.txt + fi end_func "Results are saved in vulns/redirect.txt" ${FUNCNAME[0]} else if [[ $(cat gf/redirect.txt | wc -l) -le 1000 ]]; then @@ -1131,14 +1131,16 @@ function ssrf_checks(){ if [ -n "$COLLAB_SERVER" ]; then start_func "SSRF checks" if [ "$DEEP" = true ]; then - cat gf/ssrf.txt | qsreplace FUZZ | anew -q .tmp/tmp_ssrf.txt - COLLAB_SERVER_FIX=$(echo $COLLAB_SERVER | sed -r "s/https?:\/\///") - echo $COLLAB_SERVER_FIX | anew -q .tmp/ssrf_server.txt - echo $COLLAB_SERVER | anew -q .tmp/ssrf_server.txt - for url in $(cat .tmp/tmp_ssrf.txt); do - ffuf -v -H "${HEADER}" -t $FFUF_THREADS -w .tmp/ssrf_server.txt -u $url &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf.txt - done - python3 $tools/ssrf.py $dir/gf/ssrf.txt $COLLAB_SERVER_FIX 2>>"$LOGFILE" | anew -q vulns/ssrf.txt + if [ -s "gf/ssrf.txt" ]; then + cat gf/ssrf.txt | qsreplace FUZZ | anew -q .tmp/tmp_ssrf.txt + COLLAB_SERVER_FIX=$(echo $COLLAB_SERVER | sed -r "s/https?:\/\///") + echo $COLLAB_SERVER_FIX | anew -q .tmp/ssrf_server.txt + echo $COLLAB_SERVER | anew -q .tmp/ssrf_server.txt + for url in $(cat .tmp/tmp_ssrf.txt); do + ffuf -v -H "${HEADER}" -t $FFUF_THREADS -w .tmp/ssrf_server.txt -u $url &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf.txt + done + python3 $tools/ssrf.py $dir/gf/ssrf.txt $COLLAB_SERVER_FIX 2>>"$LOGFILE" | anew -q vulns/ssrf.txt + fi end_func "Results are saved in vulns/ssrf.txt" ${FUNCNAME[0]} else if [[ $(cat gf/ssrf.txt | wc -l) -le 1000 ]]; then @@ -1188,10 +1190,12 @@ function crlf_checks(){ function lfi(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$LFI" = true ] && [ -s "gf/lfi.txt" ]; then start_func "LFI checks" - cat gf/lfi.txt | qsreplace FUZZ | anew -q .tmp/tmp_lfi.txt - for url in $(cat .tmp/tmp_lfi.txt); do - ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt - done + if [ -s "gf/lfi.txt" ]; then + cat gf/lfi.txt | qsreplace FUZZ | anew -q .tmp/tmp_lfi.txt + for url in $(cat .tmp/tmp_lfi.txt); do + ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt + done + fi end_func "Results are saved in vulns/lfi.txt" ${FUNCNAME[0]} else if [ "$LFI" = false ]; then @@ -1207,10 +1211,12 @@ function lfi(){ function ssti(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SSTI" = true ] && [ -s "gf/ssti.txt" ]; then start_func "SSTI checks" - cat gf/ssti.txt | qsreplace FUZZ | anew -q .tmp/tmp_ssti.txt - for url in $(cat .tmp/tmp_ssti.txt); do - ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $ssti_wordlist -u $url -mr "ssti49" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt - done + if [ -s "gf/ssti.txt" ]; then + cat gf/ssti.txt | qsreplace FUZZ | anew -q .tmp/tmp_ssti.txt + for url in $(cat .tmp/tmp_ssti.txt); do + ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $ssti_wordlist -u $url -mr "ssti49" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt + done + fi end_func "Results are saved in vulns/ssti.txt" ${FUNCNAME[0]} else if [ "$SSTI" = false ]; then @@ -1226,8 +1232,10 @@ function ssti(){ function sqli(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SQLI" = true ] && [ -s "gf/sqli.txt" ]; then start_func "SQLi checks" - cat gf/sqli.txt | qsreplace FUZZ | anew -q .tmp/tmp_sqli.txt - interlace -tL .tmp/tmp_sqli.txt -threads 10 -c "python3 $tools/sqlmap/sqlmap.py -u _target_ -b --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap" &>/dev/null + if [ -s "gf/sqli.txt" ]; then + cat gf/sqli.txt | qsreplace FUZZ | anew -q .tmp/tmp_sqli.txt + interlace -tL .tmp/tmp_sqli.txt -threads 10 -c "python3 $tools/sqlmap/sqlmap.py -u _target_ -b --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap" &>/dev/null + fi end_func "Results are saved in vulns/sqlmap folder" ${FUNCNAME[0]} else if [ "$SQLI" = false ]; then @@ -1275,7 +1283,7 @@ function 4xxbypass(){ if [[ $(cat fuzzing/*.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 | wc -l) -le 1000 ]] || [ "$DEEP" = true ]; then start_func "403 bypass" cat fuzzing/*.txt 2>>"$LOGFILE" | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 | dirdar -threads $DIRDAR_THREADS -only-ok > .tmp/dirdar.txt - cat .tmp/dirdar.txt 2>>"$LOGFILE" | sed -e '1,12d' | sed '/^$/d' | anew -q vulns/4xxbypass.txt + [ -s ".tmp/dirdar.txt" ] && cat .tmp/dirdar.txt | sed -e '1,12d' | sed '/^$/d' | anew -q vulns/4xxbypass.txt end_func "Results are saved in vulns/4xxbypass.txt" ${FUNCNAME[0]} else notification "Too many urls to bypass, skipping" warn @@ -1292,8 +1300,8 @@ function 4xxbypass(){ function command_injection(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$COMM_INJ" = true ] && [ -s "gf/rce.txt" ]; then start_func "Command Injection checks" - cat gf/rce.txt | qsreplace FUZZ | anew -q .tmp/tmp_rce.txt - python3 $tools/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection + [ -s "gf/rce.txt" ] && cat gf/rce.txt | qsreplace FUZZ | anew -q .tmp/tmp_rce.txt + [ -s ".tmp/tmp_rce.txt" ] && python3 $tools/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection end_func "Results are saved in vulns/command_injection folder" ${FUNCNAME[0]} else if [ "$COMM_INJ" = false ]; then diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 195431bc..c0ad1a7f 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -55,8 +55,9 @@ function tools_installed(){ [ -f "$tools/CMSeeK/cmseek.py" ] || { printf "${bred} [*] CMSeeK [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/ctfr/ctfr.py" ] || { printf "${bred} [*] ctfr [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/fuzz_wordlist.txt" ] || { printf "${bred} [*] OneListForAll [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/LinkFinder/linkfinder.py" ] || { printf "${bred} [*] LinkFinder [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/GitDorker/GitDorker.py" ] || { printf "${bred} [*] GitDorker [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/LinkFinder/linkfinder.py" ] || { printf "${bred} [*] LinkFinder [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/GitDorker/GitDorker.py" ] || { printf "${bred} [*] GitDorker [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/commix/commix.py" ] || { printf "${bred} [*] commix [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/degoogle_hunter/degoogle_hunter.sh" ] || { printf "${bred} [*] degoogle_hunter [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/getjswords.py" ] || { printf "${bred} [*] getjswords [NO]${reset}\n"; allinstalled=false;} [ -f "$tools/JSA/jsa.py" ] || { printf "${bred} [*] JSA [NO]${reset}\n"; allinstalled=false;} @@ -178,14 +179,13 @@ function emails(){ cd "$tools/theHarvester" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } python3 theHarvester.py -d $domain -b all 2>>"$LOGFILE" > $dir/.tmp/harvester.txt cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - cat .tmp/harvester.txt | awk '/Emails/,/Hosts/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/emails.txt - cat .tmp/harvester.txt | awk '/Users/,/IPs/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/users.txt - cat .tmp/harvester.txt | awk '/Links/,/Users/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/linkedin.txt - - h8mail -t $domain -q domain --loose -c $tools/h8mail_config.ini -j .tmp/h8_results.json &>>"$LOGFILE" - if [ -s ".tmp/h8_results.json" ]; then - cat .tmp/h8_results.json | jq -r '.targets[0] | .data[] | .[]' | cut -d '-' -f2 | anew -q osint/h8mail.txt + if [ -s ".tmp/harvester.txt" ]; then + cat .tmp/harvester.txt | awk '/Emails/,/Hosts/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/emails.txt + cat .tmp/harvester.txt | awk '/Users/,/IPs/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/users.txt + cat .tmp/harvester.txt | awk '/Links/,/Users/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/linkedin.txt fi + h8mail -t $domain -q domain --loose -c $tools/h8mail_config.ini -j .tmp/h8_results.json &>>"$LOGFILE" + [ -s ".tmp/h8_results.json" ] && cat .tmp/h8_results.json | jq -r '.targets[0] | .data[] | .[]' | cut -d '-' -f2 | anew -q osint/h8mail.txt PWNDB_STATUS=$(timeout 15s curl -Is --socks5-hostname localhost:9050 http://pwndb2am4tzkvold.onion | grep HTTP | cut -d ' ' -f2) @@ -213,11 +213,11 @@ function domain_info(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$DOMAIN_INFO" = true ] && [ "$OSINT" = true ]; then start_func "Searching domain info (whois, registrant name/email domains)" lynx -dump "https://domainbigdata.com/${domain}" | tail -n +19 > osint/domain_info_general.txt - - cat osint/domain_info_general.txt | grep '/nj/' | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_name.txt - cat osint/domain_info_general.txt | grep '/mj/' | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_email.txt - cat osint/domain_info_general.txt | grep -E "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | grep "https://domainbigdata.com" | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_ip.txt - + if [ -s "osint/domain_info_general.txt" ]; then + cat osint/domain_info_general.txt | grep '/nj/' | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_name.txt + cat osint/domain_info_general.txt | grep '/mj/' | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_email.txt + cat osint/domain_info_general.txt | grep -E "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | grep "https://domainbigdata.com" | tr -s ' ' ',' | cut -d ',' -f3 > .tmp/domain_registrant_ip.txt + fi sed -i -n '/Copyright/q;p' osint/domain_info_general.txt if [ -s ".tmp/domain_registrant_name.txt" ]; then @@ -300,8 +300,10 @@ function sub_passive(){ axiom-scan $list -m assetfinder -o .tmp/assetfinder_psub.txt &>>"$LOGFILE" axiom-scan $list -m amass -passive -o .tmp/amass_psub.txt &>>"$LOGFILE" axiom-scan $list -m findomain -o .tmp/findomain_psub.txt &>>"$LOGFILE" - axiom-scan $list -m waybackurls -o .tmp/waybackurls_psub_tmp.txt &>>"$LOGFILE" && cat .tmp/waybackurls_psub_tmp.txt 2>>"$LOGFILE" | unfurl --unique domains | anew -q .tmp/waybackurls_psub.txt - axiom-scan $list -m gau -o .tmp/gau_psub_tmp.txt &>>"$LOGFILE" && cat .tmp/gau_psub_tmp.txt 2>>"$LOGFILE" | unfurl --unique domains | anew -q .tmp/gau_psub.txt + axiom-scan $list -m waybackurls -o .tmp/waybackurls_psub_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/waybackurls_psub_tmp.txt" ] && cat .tmp/waybackurls_psub_tmp.txt | unfurl --unique domains | anew -q .tmp/waybackurls_psub.txt + axiom-scan $list -m gau -o .tmp/gau_psub_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/gau_psub_tmp.txt" ] && cat .tmp/gau_psub_tmp.txt | unfurl --unique domains | anew -q .tmp/gau_psub.txt crobat -s $domain 2>>"$LOGFILE" | anew -q .tmp/crobat_psub.txt if [ -s "${GITHUB_TOKENS}" ]; then if [ "$DEEP" = true ]; then @@ -314,7 +316,7 @@ function sub_passive(){ if echo $domain | grep -q ".mil$"; then mildew mv mildew.out .tmp/mildew.out - cat .tmp/mildew.out | grep ".$domain$" | anew -q .tmp/mil_psub.txt + [ -s ".tmp/mildew.out" ] && cat .tmp/mildew.out | grep ".$domain$" | anew -q .tmp/mil_psub.txt fi NUMOFLINES=$(cat .tmp/*_psub.txt 2>>"$LOGFILE" | sed "s/*.//" | anew .tmp/passive_subs.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (passive)" ${FUNCNAME[0]} @@ -345,9 +347,7 @@ function sub_crt(){ function sub_active(){ if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then start_subfunc "Running : Active Subdomain Enumeration" - if [ -s "${inScope_file}" ]; then - cat ${inScope_file} .tmp/inscope_subs.txt - fi + [ -s "${inScope_file}" ] && cat ${inScope_file} .tmp/inscope_subs.txt cat .tmp/*_subs.txt | anew -q .tmp/subs_no_resolved.txt deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subdomains_tmp.txt &>>"$LOGFILE" @@ -363,7 +363,7 @@ function sub_dns(){ if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then start_subfunc "Running : DNS Subdomain Enumeration" axiom-scan subdomains/subdomains.txt -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -resp -o subdomains/subdomains_cname.txt &>>"$LOGFILE" - cat subdomains/subdomains_cname.txt | cut -d '[' -f2 | sed 's/.$//' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt + [ -s "subdomains/subdomains_cname.txt" ] && cat subdomains/subdomains_cname.txt | cut -d '[' -f2 | sed 's/.$//' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subdomains_dns_resolved.txt &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]} @@ -399,9 +399,12 @@ function sub_scraping(){ start_subfunc "Running : Source code scraping subdomain search" touch .tmp/scrap_subs.txt if [ -s "$dir/subdomains/subdomains.txt" ]; then - axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap1.txt &>>"$LOGFILE" && cat .tmp/probed_tmp_scrap1.txt 2>>"$LOGFILE" | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt - axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap2.txt &>>"$LOGFILE" && cat .tmp/probed_tmp_scrap2.txt 2>>"$LOGFILE" | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt - axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap3.txt &>>"$LOGFILE" && cat .tmp/probed_tmp_scrap3.txt 2>>"$LOGFILE" | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt + axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap1.txt &>>"$LOGFILE" + [ -s ".tmp/probed_tmp_scrap1.txt" ] && cat .tmp/probed_tmp_scrap1.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt + axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap2.txt &>>"$LOGFILE" + [ -s ".tmp/probed_tmp_scrap2.txt" ] && cat .tmp/probed_tmp_scrap2.txt | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt + axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap3.txt &>>"$LOGFILE" + [ -s ".tmp/probed_tmp_scrap3.txt" ] && cat .tmp/probed_tmp_scrap3.txt | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt if [ "$DEEP" = true ]; then [ -f ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" else @@ -414,7 +417,8 @@ function sub_scraping(){ grep -Eo 'https?://[^ ]+' .tmp/gospider.txt | sed 's/]$//' | unfurl --unique domains | grep ".$domain$" | anew -q .tmp/scrap_subs.txt axiom-scan .tmp/scrap_subs.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/scrap_subs_resolved.txt &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | wc -l) - axiom-scan .tmp/diff_scrap.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap4.txt &>>"$LOGFILE" && cat .tmp/probed_tmp_scrap4.txt 2>>"$LOGFILE" | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt + axiom-scan .tmp/diff_scrap.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap4.txt &>>"$LOGFILE" + [ -s ".tmp/probed_tmp_scrap4.txt" ] && cat .tmp/probed_tmp_scrap4.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt end_subfunc "${NUMOFLINES} new subs (code scraping)" ${FUNCNAME[0]} else end_subfunc "No subdomains to search (code scraping)" ${FUNCNAME[0]} @@ -432,37 +436,45 @@ function sub_permut(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPERMUTE" = true ]; then start_subfunc "Running : Permutations Subdomain Enumeration" if [ "$DEEP" = true ]; then - axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt + axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_tmp.txt &>>"$LOGFILE" cat .tmp/permute1_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1.txt - axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt &>>"$LOGFILE" && cat .tmp/DNScewl2_.txt 2>>"$LOGFILE" | grep ".$domain$" > .tmp/DNScewl2.txt + axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl2_.txt" ] && cat .tmp/DNScewl2_.txt 2>>"$LOGFILE" | grep ".$domain$" > .tmp/DNScewl2.txt axiom-scan .tmp/DNScewl2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_tmp.txt &>>"$LOGFILE" cat .tmp/permute2_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute2.txt cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt else if [[ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 100 ]]; then - axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt + axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_tmp.txt &>>"$LOGFILE" cat .tmp/permute1_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1.txt - axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt &>>"$LOGFILE" && cat .tmp/DNScewl2_.txt 2>>"$LOGFILE" | grep ".$domain$" > .tmp/DNScewl2.txt + axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl2_.txt" ] && cat .tmp/DNScewl2_.txt 2>>"$LOGFILE" | grep ".$domain$" > .tmp/DNScewl2.txt axiom-scan .tmp/DNScewl2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_tmp.txt &>>"$LOGFILE" cat .tmp/permute2_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute2.txt cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt elif [[ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 200 ]]; then - axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt + axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute_tmp.txt &>>"$LOGFILE" cat .tmp/permute_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt else if [[ $(cat subdomains/subdomains.txt | wc -l) -le 100 ]]; then - axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt + axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_tmp.txt &>>"$LOGFILE" cat .tmp/permute1_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1.txt - axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt &>>"$LOGFILE" && cat .tmp/DNScewl2_.txt 2>>"$LOGFILE" | grep ".$domain$" > .tmp/DNScewl2.txt + axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl2_.txt" ] && cat .tmp/DNScewl2_.txt 2>>"$LOGFILE" | grep ".$domain$" > .tmp/DNScewl2.txt axiom-scan .tmp/DNScewl2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_tmp.txt &>>"$LOGFILE" cat .tmp/permute2_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute2.txt cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt elif [[ $(cat subdomains/subdomains.txt | wc -l) -le 200 ]]; then - axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt + axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute_tmp.txt &>>"$LOGFILE" cat .tmp/permute_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt else @@ -510,10 +522,12 @@ function sub_recursive(){ done axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/brute_recursive_result.txt &>>"$LOGFILE" cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt - axiom-scan .tmp/brute_recursive.txt -m dnscewl -o .tmp/DNScewl1_recursive_.txt &>>"$LOGFILE" && cat .tmp/DNScewl1_recursive_.txt 2>>"$LOGFILE" | grep ".$domain$" > .tmp/DNScewl1_recursive.txt + axiom-scan .tmp/brute_recursive.txt -m dnscewl -o .tmp/DNScewl1_recursive_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl1_recursive_.txt" ] && cat .tmp/DNScewl1_recursive_.txt | grep ".$domain$" > .tmp/DNScewl1_recursive.txt axiom-scan .tmp/DNScewl1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_recursive_tmp.txt &>>"$LOGFILE" cat .tmp/permute1_recursive_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1_recursive.txt - axiom-scan .tmp/permute1_recursive.txt -m dnscewl -o .tmp/DNScewl2_recursive_.txt &>>"$LOGFILE" && cat .tmp/DNScewl2_recursive_.txt 2>>"$LOGFILE" | grep ".$domain$" > .tmp/DNScewl2_recursive.txt + axiom-scan .tmp/permute1_recursive.txt -m dnscewl -o .tmp/DNScewl2_recursive_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl2_recursive_.txt" ] && cat .tmp/DNScewl2_recursive_.txt | grep ".$domain$" > .tmp/DNScewl2_recursive.txt axiom-scan .tmp/DNScewl2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_recursive_tmp.txt &>>"$LOGFILE" cat .tmp/permute1_recursive.txt .tmp/permute2_recursive_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute_recursive.txt NUMOFLINES=$(cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) @@ -534,7 +548,7 @@ function subtakeover(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBTAKEOVER" = true ]; then start_func "Looking for possible subdomain takeover" touch .tmp/tko.txt - [ -f "webs/webs.txt" ] && axiom-scan webs/webs.txt -m nuclei -w /home/op/recon/nuclei/takeovers/ -o .tmp/tko.txt &>>"$LOGFILE" + [ -s "webs/webs.txt" ] && axiom-scan webs/webs.txt -m nuclei -w /home/op/recon/nuclei/takeovers/ -o .tmp/tko.txt &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/tko.txt 2>>"$LOGFILE" | anew webs/takeover.txt | wc -l) if [ "$NUMOFLINES" -gt 0 ]; then notification "${NUMOFLINES} new possible takeovers found" info @@ -595,7 +609,8 @@ function webprobe_simple(){ if [ -s ".tmp/probed_tmp_scrap.txt" ]; then mv .tmp/probed_tmp_scrap.txt .tmp/probed_tmp.txt else - axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -threads $HTTPX_THREADS -status-code -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_.txt &>>"$LOGFILE" && cat .tmp/probed_tmp_.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp.txt + axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -threads $HTTPX_THREADS -status-code -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_.txt &>>"$LOGFILE" + [ -s ".tmp/probed_tmp_.txt" ] && cat .tmp/probed_tmp_.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp.txt fi if [ -s ".tmp/probed_tmp.txt" ]; then deleteOutScoped $outOfScope_file .tmp/probed_tmp.txt @@ -622,7 +637,8 @@ function webprobe_full(){ start_func "Http probing non standard ports" axiom-scan subdomains/subdomains.txt -m unimap --fast-scan --ports $UNCOMMON_PORTS_WEB -q -k --url-output -o .tmp/nmap_uncommonweb.txt - axiom-scan .tmp/nmap_uncommonweb.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_uncommon_ports_tmp_.txt &>>"$LOGFILE" && cat .tmp/probed_uncommon_ports_tmp_.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_uncommon_ports_tmp.txt + [ -s ".tmp/nmap_uncommonweb.txt" ] && axiom-scan .tmp/nmap_uncommonweb.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_uncommon_ports_tmp_.txt &>>"$LOGFILE" + [ -s ".tmp/probed_uncommon_ports_tmp_.txt" ] && cat .tmp/probed_uncommon_ports_tmp_.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_uncommon_ports_tmp.txt #axiom-scan subdomains/subdomains.txt -m naabu -p $UNCOMMON_PORTS_WEB -o .tmp/nmap_uncommonweb.txt &>>"$LOGFILE" && uncommon_ports_checked=$(cat .tmp/nmap_uncommonweb.txt | cut -d ':' -f2 | sort -u | sed -e 'H;${x;s/\n/,/g;s/^,//;p;};d') #if [ -n "$uncommon_ports_checked" ]; then @@ -630,7 +646,7 @@ function webprobe_full(){ #fi NUMOFLINES=$(cat .tmp/probed_uncommon_ports_tmp.txt 2>>"$LOGFILE" | anew webs/webs_uncommon_ports.txt | wc -l) notification "Uncommon web ports: ${NUMOFLINES} new websites" good - cat webs/webs_uncommon_ports.txt 2>>"$LOGFILE" + [ -s "webs/webs_uncommon_ports.txt" ] && cat webs/webs_uncommon_ports.txt end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" ${FUNCNAME[0]} if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs_uncommon_ports.txt| wc -l) -le 1500 ]]; then notification "Sending websites uncommon ports to proxy" info @@ -670,7 +686,7 @@ function favicon(){ start_func "Favicon Ip Lookup" cd "$tools/fav-up" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } python3 favUp.py -w "$domain" -sc -o favicontest.json &>>"$LOGFILE" - if [ -f "favicontest.json" ]; then + if [ -s "favicontest.json" ]; then cat favicontest.json | jq -r '.found_ips' 2>>"$LOGFILE" | grep -v "not-found" > favicontest.txt sed -i "s/|/\n/g" favicontest.txt cat favicontest.txt 2>>"$LOGFILE" @@ -695,10 +711,11 @@ function portscan(){ echo "$sub $(dig +short a $sub | tail -n1)" | anew -q .tmp/subs_ips.txt done awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt - cat hosts/subs_ips_vhosts.txt 2>>"$LOGFILE" | cut -d ' ' -f1 | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt - axiom-scan webs/webs.txt -m cf-check -o .tmp/ips_nowaf_.txt &>>"$LOGFILE" && cat .tmp/ips_nowaf_.txt | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q .tmp/ips_nowaf.txt + [ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt + [ -s "hosts/ips.txt" ] && axiom-scan hosts/ips.txt -m cf-check -o .tmp/ips_nowaf_.txt &>>"$LOGFILE" + [ -s ".tmp/ips_nowaf_.txt" ] && cat .tmp/ips_nowaf_.txt | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q .tmp/ips_nowaf.txt printf "${bblue}\n Resolved IP addresses (No WAF) ${reset}\n\n"; - cat .tmp/ips_nowaf.txt 2>>"$LOGFILE" | sort + [ -s ".tmp/ips_nowaf.txt" ] && cat .tmp/ips_nowaf.txt | sort printf "${bblue}\n Scanning ports... ${reset}\n\n"; if [ "$PORTSCAN_PASSIVE" = true ] && [ ! -f "hosts/portscan_passive.txt" ]; then for sub in $(cat hosts/ips.txt); do @@ -722,7 +739,7 @@ function cloudprovider(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CLOUD_IP" = true ]; then start_func "Cloud provider check" cd "$tools/ip2provider" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - cat $dir/hosts/ips.txt | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | ./ip2provider.py | anew -q $dir/hosts/cloud_providers.txt &>>"$LOGFILE" + [ -s "$dir/hosts/ips.txt" ] && cat $dir/hosts/ips.txt | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | ./ip2provider.py | anew -q $dir/hosts/cloud_providers.txt &>>"$LOGFILE" cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } end_func "Results are saved in hosts/cloud_providers.txt" ${FUNCNAME[0]} else @@ -743,7 +760,7 @@ function waf_checks(){ start_func "Website's WAF detection" if [ -s "./webs/webs.txt" ]; then axiom-scan webs/webs.txt -m wafw00f -o .tmp/wafs.txt &>>"$LOGFILE" - if [ -f ".tmp/wafs.txt" ]; then + if [ -s ".tmp/wafs.txt" ]; then cat .tmp/wafs.txt | sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' | tr -s "\t" ";" > webs/webs_wafs.txt NUMOFLINES=$(cat webs/webs_wafs.txt 2>>"$LOGFILE" | wc -l) notification "${NUMOFLINES} websites protected by waf" info @@ -768,17 +785,19 @@ function nuclei_check(){ start_func "Templates based web scanner" nuclei -update-templates &>>"$LOGFILE" mkdir -p nuclei_output - printf "${yellow}\n Running : Nuclei Info${reset}\n\n" - axiom-scan webs/webs.txt -m nuclei -severity info -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/info.txt &>>"$LOGFILE" - printf "${yellow}\n\n Running : Nuclei Low${reset}\n\n" - axiom-scan webs/webs.txt -m nuclei -severity low -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/low.txt &>>"$LOGFILE" - printf "${yellow}\n\n Running : Nuclei Medium${reset}\n\n" - axiom-scan webs/webs.txt -m nuclei -severity medium -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/medium.txt &>>"$LOGFILE" - printf "${yellow}\n\n Running : Nuclei High${reset}\n\n" - axiom-scan webs/webs.txt -m nuclei -severity high -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/high.txt &>>"$LOGFILE" - printf "${yellow}\n\n Running : Nuclei Critical${reset}\n\n" - axiom-scan webs/webs.txt -m nuclei -severity critical -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/critical.txt &>>"$LOGFILE" - printf "\n\n" + if [ -s "webs/webs.txt" ]; then + printf "${yellow}\n Running : Nuclei Info${reset}\n\n" + axiom-scan webs/webs.txt -m nuclei -severity info -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/info.txt &>>"$LOGFILE" + printf "${yellow}\n\n Running : Nuclei Low${reset}\n\n" + axiom-scan webs/webs.txt -m nuclei -severity low -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/low.txt &>>"$LOGFILE" + printf "${yellow}\n\n Running : Nuclei Medium${reset}\n\n" + axiom-scan webs/webs.txt -m nuclei -severity medium -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/medium.txt &>>"$LOGFILE" + printf "${yellow}\n\n Running : Nuclei High${reset}\n\n" + axiom-scan webs/webs.txt -m nuclei -severity high -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/high.txt &>>"$LOGFILE" + printf "${yellow}\n\n Running : Nuclei Critical${reset}\n\n" + axiom-scan webs/webs.txt -m nuclei -severity critical -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/critical.txt &>>"$LOGFILE" + printf "\n\n" + fi end_func "Results are saved in $domain/nuclei_output folder" ${FUNCNAME[0]} else if [ "$NUCLEICHECK" = false ]; then @@ -857,20 +876,22 @@ function params(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PARAMS" = true ]; then start_func "Parameter Discovery" printf "${yellow}\n\n Running : Searching params with paramspider${reset}\n" - cat webs/webs.txt | sed -r "s/https?:\/\///" | anew -q .tmp/probed_nohttp.txt - axiom-scan .tmp/probed_nohttp.txt -m paramspider -l high -q --exclude eot,jpg,jpeg,gif,css,tif,tiff,png,ttf,otf,woff,woff2,ico,pdf,svg,txt,js -o output_paramspider &>>"$LOGFILE" - cat output_paramspider/*.txt 2>>"$LOGFILE" | anew -q .tmp/param_tmp.txt - sed '/^FUZZ/d' -i .tmp/param_tmp.txt - rm -rf output_paramspider/ 2>>"$LOGFILE" - if [ "$DEEP" = true ]; then - printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - axiom-scan .tmp/param_tmp.txt -m arjun -t $ARJUN_THREADS -o webs/param.txt &>>"$LOGFILE" - else - if [[ $(cat .tmp/param_tmp.txt | wc -l) -le 50 ]]; then + if [ -s "webs/webs.txt" ]; then + cat webs/webs.txt | sed -r "s/https?:\/\///" | anew -q .tmp/probed_nohttp.txt + axiom-scan .tmp/probed_nohttp.txt -m paramspider -l high -q --exclude eot,jpg,jpeg,gif,css,tif,tiff,png,ttf,otf,woff,woff2,ico,pdf,svg,txt,js -o output_paramspider &>>"$LOGFILE" + cat output_paramspider/*.txt 2>>"$LOGFILE" | anew -q .tmp/param_tmp.txt + sed '/^FUZZ/d' -i .tmp/param_tmp.txt + rm -rf output_paramspider/ 2>>"$LOGFILE" + if [ "$DEEP" = true ]; then printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" axiom-scan .tmp/param_tmp.txt -m arjun -t $ARJUN_THREADS -o webs/param.txt &>>"$LOGFILE" else - cp .tmp/param_tmp.txt webs/param.txt + if [[ $(cat .tmp/param_tmp.txt | wc -l) -le 50 ]]; then + printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" + axiom-scan .tmp/param_tmp.txt -m arjun -t $ARJUN_THREADS -o webs/param.txt &>>"$LOGFILE" + else + cp .tmp/param_tmp.txt webs/param.txt + fi fi fi end_func "Results are saved in $domain/webs/param.txt" ${FUNCNAME[0]} @@ -887,35 +908,39 @@ function urlchecks(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$URL_CHECK" = true ]; then start_func "URL Extraction" mkdir -p js - axiom-scan webs/webs.txt -m waybackurls -o .tmp/url_extract_way_tmp.txt &>>"$LOGFILE" && cat .tmp/url_extract_way_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/url_extract_tmp.txt - axiom-scan webs/webs.txt -m gau -o .tmp/url_extract_gau_tmp.txt &>>"$LOGFILE" && cat .tmp/url_extract_gau_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/url_extract_tmp.txt - diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u webs/webs.txt) | wc -l) - if [ $diff_webs != "0" ] || [ ! -s ".tmp/gospider.txt" ]; then + if [ -s "webs/webs.txt" ]; then + axiom-scan webs/webs.txt -m waybackurls -o .tmp/url_extract_way_tmp.txt &>>"$LOGFILE" + [ -f ".tmp/url_extract_way_tmp.txt" ] && cat .tmp/url_extract_way_tmp.txt | anew -q .tmp/url_extract_tmp.txt + axiom-scan webs/webs.txt -m gau -o .tmp/url_extract_gau_tmp.txt &>>"$LOGFILE" + [ -f ".tmp/url_extract_gau_tmp.txt" ] && cat .tmp/url_extract_gau_tmp.txt | anew -q .tmp/url_extract_tmp.txt + diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u webs/webs.txt) | wc -l) + if [ $diff_webs != "0" ] || [ ! -s ".tmp/gospider.txt" ]; then + if [ "$DEEP" = true ]; then + axiom-scan webs/webs.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + else + axiom-scan webs/webs.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + fi + [[ -d .tmp/gospider/ ]] && cat .tmp/gospider/* 2>>"$LOGFILE" | sed '/^.\{2048\}./d' | anew -q .tmp/gospider.txt + fi + [[ -d .tmp/gospider/ ]] && NUMFILES=$(find .tmp/gospider/ -type f | wc -l) + [[ $NUMFILES -gt 0 ]] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain$" | anew -q .tmp/url_extract_tmp.txt + if [ -s "${GITHUB_TOKENS}" ]; then + github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt &>>"$LOGFILE" + [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt + fi + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -Ei "\.(js)" | anew -q js/url_extract_js.txt if [ "$DEEP" = true ]; then - [ -f ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" - else - [ -f ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | python3 $tools/JSA/jsa.py | anew -q .tmp/url_extract_tmp.txt + fi + cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt + uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt &>>"$LOGFILE" + NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | wc -l) + notification "${NUMOFLINES} new urls with params" info + end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} + if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/url_extract.txt | wc -l) -le 1500 ]]; then + notification "Sending urls to proxy" info + ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url &>>"$LOGFILE" fi - [[ -d .tmp/gospider/ ]] && cat .tmp/gospider/* | sed '/^.\{2048\}./d' | anew -q .tmp/gospider.txt - fi - [[ -d .tmp/gospider/ ]] && NUMFILES=$(find .tmp/gospider/ -type f | wc -l) - [[ $NUMFILES -gt 0 ]] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain$" | anew -q .tmp/url_extract_tmp.txt - if [ -s "${GITHUB_TOKENS}" ]; then - github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt &>>"$LOGFILE" - cat .tmp/github-endpoints.txt 2>>"$LOGFILE" | anew -q .tmp/url_extract_tmp.txt - fi - cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -Ei "\.(js)" | anew -q js/url_extract_js.txt - if [ "$DEEP" = true ]; then - [ -f "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | python3 $tools/JSA/jsa.py | anew -q .tmp/url_extract_tmp.txt - fi - cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt - uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt &>>"$LOGFILE" - NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | wc -l) - notification "${NUMOFLINES} new urls with params" info - end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} - if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/url_extract.txt | wc -l) -le 1500 ]]; then - notification "Sending urls to proxy" info - ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url &>>"$LOGFILE" fi else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -930,10 +955,11 @@ function url_gf(){ gf ssti webs/url_extract.txt | anew -q gf/ssti.txt gf ssrf webs/url_extract.txt | anew -q gf/ssrf.txt gf sqli webs/url_extract.txt | anew -q gf/sqli.txt - gf redirect webs/url_extract.txt | anew -q gf/redirect.txt && cat gf/ssrf.txt | anew -q gf/redirect.txt + gf redirect webs/url_extract.txt | anew -q gf/redirect.txt + [ -f "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt gf rce webs/url_extract.txt | anew -q gf/rce.txt gf potential webs/url_extract.txt | cut -d ':' -f3-5 |anew -q gf/potential.txt - cat .tmp/url_extract_tmp.txt | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p | anew -q gf/endpoints.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p | anew -q gf/endpoints.txt gf lfi webs/url_extract.txt | anew -q gf/lfi.txt end_func "Results are saved in $domain/gf folder" ${FUNCNAME[0]} else @@ -953,7 +979,7 @@ function url_ext(){ NUMOFLINES=$(cat .tmp/url_extract_tmp.txt | grep -Ei "\.(${t})($|\/|\?)" | sort -u | wc -l) if [[ ${NUMOFLINES} -gt 0 ]]; then echo -e "\n############################\n + ${t} + \n############################\n" >> webs/urls_by_ext.txt - cat .tmp/url_extract_tmp.txt | grep -Ei "\.(${t})($|\/|\?)" | sort -u >> webs/urls_by_ext.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -Ei "\.(${t})($|\/|\?)" | sort -u >> webs/urls_by_ext.txt fi done end_func "Results are saved in $domain/webs/urls_by_ext.txt" ${FUNCNAME[0]} @@ -974,7 +1000,8 @@ function jschecks(){ cat js/url_extract_js.txt | cut -d '?' -f 1 | grep -iE "\.js$" | grep "$domain$" | anew -q js/jsfile_links.txt cat js/url_extract_js.txt | subjs | grep "$domain$" | anew -q js/jsfile_links.txt printf "${yellow} Running : Resolving JS Urls 2/5${reset}\n" - axiom-scan js/jsfile_links.txt -m httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -status-code -retries 2 -no-color -o .tmp/js_livelinks.txt &>>"$LOGFILE" && cat .tmp/js_livelinks.txt | grep "[200]" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt + axiom-scan js/jsfile_links.txt -m httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -status-code -retries 2 -no-color -o .tmp/js_livelinks.txt &>>"$LOGFILE" + [ -s ".tmp/js_livelinks.txt" ] && cat .tmp/js_livelinks.txt | grep "[200]" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt printf "${yellow} Running : Gathering endpoints 3/5${reset}\n" if [ -s "js/js_livelinks.txt" ]; then interlace -tL js/js_livelinks.txt -threads 10 -c "python3 $tools/LinkFinder/linkfinder.py -d -i _target_ -o cli >> .tmp/js_endpoints.txt" &>/dev/null @@ -984,13 +1011,9 @@ function jschecks(){ cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt fi printf "${yellow} Running : Gathering secrets 4/5${reset}\n" - if [ -s "js/js_livelinks.txt" ]; then - axiom-scan js/js_livelinks.txt -m nuclei -w /home/op/recon/nuclei/exposures/tokens/ -r /home/op/lists/resolvers_trusted.txt -o js/js_secrets.txt &>>"$LOGFILE" - fi + [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m nuclei -w /home/op/recon/nuclei/exposures/tokens/ -r /home/op/lists/resolvers_trusted.txt -o js/js_secrets.txt &>>"$LOGFILE" printf "${yellow} Running : Building wordlist 5/5${reset}\n" - if [ -s "js/js_livelinks.txt" ]; then - cat js/js_livelinks.txt | python3 $tools/getjswords.py 2>>"$LOGFILE" | anew -q webs/dict_words.txt - fi + [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | python3 $tools/getjswords.py 2>>"$LOGFILE" | anew -q webs/dict_words.txt end_func "Results are saved in $domain/js folder" ${FUNCNAME[0]} else end_func "No JS urls found for $domain, function skipped" ${FUNCNAME[0]} @@ -1007,13 +1030,13 @@ function jschecks(){ function wordlist_gen(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WORDLIST" = true ]; then start_func "Wordlist generation" - cat .tmp/url_extract_tmp.txt | unfurl -u keys | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt - cat .tmp/url_extract_tmp.txt | unfurl -u values | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt - cat .tmp/url_extract_tmp.txt | tr "[:punct:]" "\n" | anew -q webs/dict_words.txt - if [ -s ".tmp/js_endpoints.txt" ]; then - cat .tmp/js_endpoints.txt | unfurl -u path | anew -q webs/dict_paths.txt + if [ -s ".tmp/url_extract_tmp.txt" ]; then + cat .tmp/url_extract_tmp.txt | unfurl -u keys | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt + cat .tmp/url_extract_tmp.txt | unfurl -u values | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt + cat .tmp/url_extract_tmp.txt | tr "[:punct:]" "\n" | anew -q webs/dict_words.txt fi - cat .tmp/url_extract_tmp.txt | unfurl -u path | anew -q webs/dict_paths.txt + [ -s ".tmp/js_endpoints.txt" ] && cat .tmp/js_endpoints.txt | unfurl -u path | anew -q webs/dict_paths.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | unfurl -u path | anew -q webs/dict_paths.txt end_func "Results are saved in $domain/webs/dict_[words|paths].txt" ${FUNCNAME[0]} else if [ "$WORDLIST" = false ]; then @@ -1033,13 +1056,13 @@ function brokenLinks(){ start_func "Broken links checks" if [ ! -s ".tmp/gospider.txt" ]; then if [ "$DEEP" = true ]; then - axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -s "webs/webs.txt" ] && axiom-scan webs/webs.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" else - axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -s "webs/webs.txt" ] && axiom-scan webs/webs.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" fi cat .tmp/gospider/* | sed '/^.\{2048\}./d' | anew -q .tmp/gospider.txt fi - cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | sort -u | httpx -follow-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | grep "\[4" | cut -d ' ' -f1 | anew -q .tmp/brokenLinks_total.txt + [ -s ".tmp/gospider.txt" ] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | sort -u | httpx -follow-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | grep "\[4" | cut -d ' ' -f1 | anew -q .tmp/brokenLinks_total.txt NUMOFLINES=$(cat .tmp/brokenLinks_total.txt 2>>"$LOGFILE" | anew webs/brokenLinks.txt | wc -l) notification "${NUMOFLINES} new broken links found" info end_func "Results are saved in webs/brokenLinks.txt" ${FUNCNAME[0]} @@ -1055,13 +1078,13 @@ function brokenLinks(){ function xss(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$XSS" = true ] && [ -s "gf/xss.txt" ]; then start_func "XSS Analysis" - cat gf/xss.txt | qsreplace FUZZ | Gxss -c 100 -p Xss | anew -q .tmp/xss_reflected.txt + [ -s "gf/xss.txt" ] && cat gf/xss.txt | qsreplace FUZZ | Gxss -c 100 -p Xss | anew -q .tmp/xss_reflected.txt if [ "$DEEP" = true ]; then if [ -n "$XSS_SERVER" ]; then - axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt &>>"$LOGFILE" + [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt &>>"$LOGFILE" else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" - axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav -w $DALFOX_THREADS -o vulns/xss.txt &>>"$LOGFILE" + [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav -w $DALFOX_THREADS -o vulns/xss.txt &>>"$LOGFILE" fi else if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le 500 ]]; then @@ -1091,7 +1114,7 @@ function cors(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CORS" = true ]; then start_func "CORS Scan" python3 $tools/Corsy/corsy.py -i webs/webs.txt > webs/cors.txt &>>"$LOGFILE" - cat webs/cors.txt 2>>"$LOGFILE" + [ -s "webs/cors.txt" ] && cat webs/cors.txt end_func "Results are saved in webs/cors.txt" ${FUNCNAME[0]} else if [ "$CORS" = false ]; then @@ -1106,9 +1129,11 @@ function open_redirect(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$OPEN_REDIRECT" = true ] && [ -s "gf/redirect.txt" ]; then start_func "Open redirects checks" if [ "$DEEP" = true ]; then - cat gf/redirect.txt | qsreplace FUZZ | anew -q .tmp/tmp_redirect.txt - python3 $tools/OpenRedireX/openredirex.py -l .tmp/tmp_redirect.txt --keyword FUZZ -p $tools/OpenRedireX/payloads.txt 2>>"$LOGFILE" | grep "^http" > vulns/redirect.txt - sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" vulns/redirect.txt + if [ -s "webs/cors.txt" ]; then + cat gf/redirect.txt | qsreplace FUZZ | anew -q .tmp/tmp_redirect.txt + python3 $tools/OpenRedireX/openredirex.py -l .tmp/tmp_redirect.txt --keyword FUZZ -p $tools/OpenRedireX/payloads.txt 2>>"$LOGFILE" | grep "^http" > vulns/redirect.txt + sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" vulns/redirect.txt + fi end_func "Results are saved in vulns/redirect.txt" ${FUNCNAME[0]} else if [[ $(cat gf/redirect.txt | wc -l) -le 1000 ]]; then @@ -1137,14 +1162,16 @@ function ssrf_checks(){ if [ -n "$COLLAB_SERVER" ]; then start_func "SSRF checks" if [ "$DEEP" = true ]; then - cat gf/ssrf.txt | qsreplace FUZZ | anew -q .tmp/tmp_ssrf.txt - COLLAB_SERVER_FIX=$(echo $COLLAB_SERVER | sed -r "s/https?:\/\///") - echo $COLLAB_SERVER_FIX | anew -q .tmp/ssrf_server.txt - echo $COLLAB_SERVER | anew -q .tmp/ssrf_server.txt - for url in $(cat .tmp/tmp_ssrf.txt); do - ffuf -v -H "${HEADER}" -t $FFUF_THREADS -w .tmp/ssrf_server.txt -u $url &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf.txt - done - python3 $tools/ssrf.py $dir/gf/ssrf.txt $COLLAB_SERVER_FIX 2>>"$LOGFILE" | anew -q vulns/ssrf.txt + if [ -s "gf/ssrf.txt" ]; then + cat gf/ssrf.txt | qsreplace FUZZ | anew -q .tmp/tmp_ssrf.txt + COLLAB_SERVER_FIX=$(echo $COLLAB_SERVER | sed -r "s/https?:\/\///") + echo $COLLAB_SERVER_FIX | anew -q .tmp/ssrf_server.txt + echo $COLLAB_SERVER | anew -q .tmp/ssrf_server.txt + for url in $(cat .tmp/tmp_ssrf.txt); do + ffuf -v -H "${HEADER}" -t $FFUF_THREADS -w .tmp/ssrf_server.txt -u $url &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf.txt + done + python3 $tools/ssrf.py $dir/gf/ssrf.txt $COLLAB_SERVER_FIX 2>>"$LOGFILE" | anew -q vulns/ssrf.txt + fi end_func "Results are saved in vulns/ssrf.txt" ${FUNCNAME[0]} else if [[ $(cat gf/ssrf.txt | wc -l) -le 1000 ]]; then @@ -1194,10 +1221,12 @@ function crlf_checks(){ function lfi(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$LFI" = true ] && [ -s "gf/lfi.txt" ]; then start_func "LFI checks" - cat gf/lfi.txt | qsreplace FUZZ | anew -q .tmp/tmp_lfi.txt - for url in $(cat .tmp/tmp_lfi.txt); do - ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt - done + if [ -s "gf/lfi.txt" ]; then + cat gf/lfi.txt | qsreplace FUZZ | anew -q .tmp/tmp_lfi.txt + for url in $(cat .tmp/tmp_lfi.txt); do + ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt + done + fi end_func "Results are saved in vulns/lfi.txt" ${FUNCNAME[0]} else if [ "$LFI" = false ]; then @@ -1213,10 +1242,12 @@ function lfi(){ function ssti(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SSTI" = true ] && [ -s "gf/ssti.txt" ]; then start_func "SSTI checks" - cat gf/ssti.txt | qsreplace FUZZ | anew -q .tmp/tmp_ssti.txt - for url in $(cat .tmp/tmp_ssti.txt); do - ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $ssti_wordlist -u $url -mr "ssti49" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt - done + if [ -s "gf/ssti.txt" ]; then + cat gf/ssti.txt | qsreplace FUZZ | anew -q .tmp/tmp_ssti.txt + for url in $(cat .tmp/tmp_ssti.txt); do + ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $ssti_wordlist -u $url -mr "ssti49" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt + done + fi end_func "Results are saved in vulns/ssti.txt" ${FUNCNAME[0]} else if [ "$SSTI" = false ]; then @@ -1232,9 +1263,11 @@ function ssti(){ function sqli(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SQLI" = true ] && [ -s "gf/sqli.txt" ]; then start_func "SQLi checks" - cat gf/sqli.txt | qsreplace FUZZ | anew -q .tmp/tmp_sqli.txt - interlace -tL .tmp/tmp_sqli.txt -threads 10 -c "python3 $tools/sqlmap/sqlmap.py -u _target_ -b --batch --disable-coloring --random-agent --output-dir=sqlmap" &>/dev/null - end_func "Results are saved in sqlmap folder" ${FUNCNAME[0]} + if [ -s "gf/sqli.txt" ]; then + cat gf/sqli.txt | qsreplace FUZZ | anew -q .tmp/tmp_sqli.txt + interlace -tL .tmp/tmp_sqli.txt -threads 10 -c "python3 $tools/sqlmap/sqlmap.py -u _target_ -b --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap" &>/dev/null + fi + end_func "Results are saved in vulns/sqlmap folder" ${FUNCNAME[0]} else if [ "$SQLI" = false ]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" @@ -1282,7 +1315,7 @@ function 4xxbypass(){ start_func "403 bypass" cat fuzzing/*.txt 2>>"$LOGFILE" | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 > .tmp/dirdar_test.txt axiom-scan .tmp/dirdar_test.txt -m dirdar -threads $DIRDAR_THREADS -only-ok > .tmp/dirdar.txt - cat .tmp/dirdar.txt 2>>"$LOGFILE" | sed -e '1,12d' | sed '/^$/d' | anew -q vulns/4xxbypass.txt + [ -s ".tmp/dirdar.txt" ] && cat .tmp/dirdar.txt | sed -e '1,12d' | sed '/^$/d' | anew -q vulns/4xxbypass.txt end_func "Results are saved in vulns/4xxbypass.txt" ${FUNCNAME[0]} else notification "Too many urls to bypass, skipping" warn @@ -1299,8 +1332,8 @@ function 4xxbypass(){ function command_injection(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$COMM_INJ" = true ] && [ -s "gf/rce.txt" ]; then start_func "Command Injection checks" - cat gf/rce.txt | qsreplace FUZZ | anew -q .tmp/tmp_rce.txt - python3 $tools/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection + [ -s "gf/rce.txt" ] && cat gf/rce.txt | qsreplace FUZZ | anew -q .tmp/tmp_rce.txt + [ -s ".tmp/tmp_rce.txt" ] && python3 $tools/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection #axiom_scan .tmp/tmp_rce.txt -m commix -o vulns/command_injection end_func "Results are saved in vulns/command_injection folder" ${FUNCNAME[0]} else @@ -1614,7 +1647,7 @@ function end(){ if [ "$REMOVETMP" = true ]; then rm -rf $dir/.tmp fi - + if [ "$REMOVELOG" = true ]; then rm -rf $dir/.log fi From 872ad31d78c2e8b04c8d29f650b963979813f212 Mon Sep 17 00:00:00 2001 From: six2dez Date: Tue, 18 May 2021 16:58:29 +0200 Subject: [PATCH 24/32] Some code refactoring && more error checks --- reconftw.sh | 150 +++++++++++++++++++--------------------------- reconftw_axiom.sh | 135 ++++++++++++++++------------------------- 2 files changed, 111 insertions(+), 174 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 79d7abc3..a8f71920 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -192,7 +192,8 @@ function emails(){ cd "$tools/pwndb" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } python3 pwndb.py --target "@${domain}" | sed '/^[-]/d' | anew -q $dir/osint/passwords.txt cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" osint/passwords.txt + [ -f "osint/passwords.txt" ] && sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" osint/passwords.txt + [ -f "osint/passwords.txt" ] && sed -i '1,2d' osint/passwords.txt else text="${yellow}\n pwndb is currently down :(\n\n Check xjypo5vzgmo7jca6b322dnqbsdnp3amd24ybx26x5nxbusccjkm4pwid.onion${reset}\n" printf "${text}" && printf "${text}" | $NOTIFY @@ -256,12 +257,8 @@ function subdomains_full(){ NUMOFLINES_probed="0" printf "${bgreen}#######################################################################\n\n" printf "${bblue} Subdomain Enumeration $domain\n\n" - if [ -f "subdomains/subdomains.txt" ]; then - cp subdomains/subdomains.txt .tmp/subdomains_old.txt 2>>"$LOGFILE" - fi - if [ -f "webs/webs.txt" ]; then - cp webs/webs.txt .tmp/probed_old.txt 2>>"$LOGFILE" - fi + [ -s "subdomains/subdomains.txt" ] && cp subdomains/subdomains.txt .tmp/subdomains_old.txt + [ -s "webs/webs.txt" ] && cp webs/webs.txt .tmp/probed_old.txt resolvers_update @@ -284,9 +281,9 @@ function subdomains_full(){ fi printf "${bblue}\n Total subdomains: ${reset}\n\n" notification "- ${NUMOFLINES_subs} new alive subdomains" good - [ -f "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt 2>>"$LOGFILE" | sort + [ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | sort notification "- ${NUMOFLINES_probed} new web probed" good - [ -f "webs/webs.txt" ] && cat webs/webs.txt 2>>"$LOGFILE" | sort + [ -s "webs/webs.txt" ] && cat webs/webs.txt | sort notification "Subdomain Enumeration Finished" good printf "${bblue} Results are saved in $domain/subdomains/subdomains.txt and webs/webs.txt${reset}\n" printf "${bgreen}#######################################################################\n\n" @@ -345,7 +342,7 @@ function sub_active(){ [ -s "${inScope_file}" ] && cat ${inScope_file} .tmp/inscope_subs.txt cat .tmp/*_subs.txt | anew -q .tmp/subs_no_resolved.txt deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt - puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/subs_no_resolved.txt" ] && puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" echo $domain | dnsx -retry 3 -silent -r $resolvers_trusted 2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt NUMOFLINES=$(cat .tmp/subdomains_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (active resolution)" ${FUNCNAME[0]} @@ -357,9 +354,9 @@ function sub_active(){ function sub_dns(){ if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then start_subfunc "Running : DNS Subdomain Enumeration" - dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -resp -silent -l subdomains/subdomains.txt -o subdomains/subdomains_cname.txt -r $resolvers_trusted &>>"$LOGFILE" + [ -s "subdomains/subdomains.txt" ] && dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -resp -silent -l subdomains/subdomains.txt -o subdomains/subdomains_cname.txt -r $resolvers_trusted &>>"$LOGFILE" [ -s "subdomains/subdomains_cname.txt" ] && cat subdomains/subdomains_cname.txt | cut -d '[' -f2 | sed 's/.$//' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt - puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/subdomains_dns.txt" ] && puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]} else @@ -375,9 +372,7 @@ function sub_brute(){ else puredns bruteforce $subs_wordlist $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" fi - if [[ -s ".tmp/subs_brute.txt" ]]; then - puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" - fi + [ -s ".tmp/subs_brute.txt" ] && puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/subs_brute_valid.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (bruteforce)" ${FUNCNAME[0]} else @@ -398,13 +393,13 @@ function sub_scraping(){ [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt if [ "$DEEP" = true ]; then - [ -f ".tmp/probed_tmp_scrap.txt" ] && gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt + [ -s ".tmp/probed_tmp_scrap.txt" ] && gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt else - [ -f ".tmp/probed_tmp_scrap.txt" ] && gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt + [ -s ".tmp/probed_tmp_scrap.txt" ] && gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt fi sed -i '/^.\{2048\}./d' .tmp/gospider.txt [ -s ".tmp/gospider.txt" ] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | unfurl --unique domains | grep ".$domain$" | anew -q .tmp/scrap_subs.txt - puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/scrap_subs.txt" ] && puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | wc -l) [ -s ".tmp/diff_scrap.txt" ] && cat .tmp/diff_scrap.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt end_subfunc "${NUMOFLINES} new subs (code scraping)" ${FUNCNAME[0]} @@ -423,45 +418,19 @@ function sub_scraping(){ function sub_permut(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPERMUTE" = true ]; then start_subfunc "Running : Permutations Subdomain Enumeration" - if [ "$DEEP" = true ]; then - DNScewl --tL subdomains/subdomains.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt - puredns resolve .tmp/DNScewl1.txt -w .tmp/permute1_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" - cat .tmp/permute1_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1.txt - DNScewl --tL .tmp/permute1.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl2.txt - puredns resolve .tmp/DNScewl2.txt -w .tmp/permute2_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" - cat .tmp/permute2_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute2.txt - cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - else - if [[ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 100 ]]; then - DNScewl --tL .tmp/subs_no_resolved.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt - puredns resolve .tmp/DNScewl1.txt -w .tmp/permute1_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" - cat .tmp/permute1_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1.txt - DNScewl --tL .tmp/permute1.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl2.txt - puredns resolve .tmp/DNScewl2.txt -w .tmp/permute2_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" - cat .tmp/permute2_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute2.txt - cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - elif [[ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 200 ]]; then - DNScewl --tL .tmp/subs_no_resolved.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt - puredns resolve .tmp/DNScewl1.txt -w .tmp/permute_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" - cat .tmp/permute_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - else - if [[ $(cat subdomains/subdomains.txt | wc -l) -le 100 ]]; then - DNScewl --tL subdomains/subdomains.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt - puredns resolve .tmp/DNScewl1.txt -w .tmp/permute1_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" - cat .tmp/permute1_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1.txt - DNScewl --tL .tmp/permute1.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl2.txt - puredns resolve .tmp/DNScewl2.txt -w .tmp/permute2_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" - cat .tmp/permute2_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute2.txt - cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - elif [[ $(cat subdomains/subdomains.txt | wc -l) -le 200 ]]; then - DNScewl --tL subdomains/subdomains.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt - puredns resolve .tmp/DNScewl1.txt -w .tmp/permute_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" - cat .tmp/permute_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - else - printf "\n${bred} Skipping Permutations: Too Many Subdomains${reset}\n\n" - fi - fi - fi + + [ "$DEEP" = true ] && [ -s "subdomains/subdomains.txt" ] && DNScewl --tL subdomains/subdomains.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt + [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 100 ] && DNScewl --tL .tmp/subs_no_resolved.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt + [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -gt 100 ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 200 ] && DNScewl --tL .tmp/subs_no_resolved.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt + [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -gt 200 ] && [ $(cat subdomains/subdomains.txt | wc -l) -le 100 ] && DNScewl --tL subdomains/subdomains.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt + [ -s ".tmp/DNScewl1.txt" ] && puredns resolve .tmp/DNScewl1.txt -w .tmp/permute1_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/permute1_tmp.txt" ] && cat .tmp/permute1_tmp.txt | anew -q .tmp/permute1.txt + [ -s ".tmp/permute1.txt" ] && DNScewl --tL .tmp/permute1.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl2.txt + [ -s ".tmp/DNScewl2.txt" ] && puredns resolve .tmp/DNScewl2.txt -w .tmp/permute2_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/permute2_tmp.txt" ] && cat .tmp/permute2_tmp.txt | anew -q .tmp/permute2.txt + + cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt + if [ -f ".tmp/permute_subs.txt" ]; then deleteOutScoped $outOfScope_file .tmp/permute_subs.txt NUMOFLINES=$(cat .tmp/permute_subs.txt 2>>"$LOGFILE" | grep ".$domain$" | anew subdomains/subdomains.txt | wc -l) @@ -489,8 +458,8 @@ function sub_recursive(){ amass enum -passive -d $sub.$domain -config $AMASS_CONFIG &>>"$LOGFILE" findomain --quiet -t $sub.$domain &>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt done - puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" - [ -f ".tmp/passive_recurs_tmp.txt" ] && cat .tmp/passive_recurs_tmp.txt | anew -q subdomains/subdomains.txt + [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/passive_recurs_tmp.txt" ] && cat .tmp/passive_recurs_tmp.txt | anew -q subdomains/subdomains.txt # Bruteforce recursive if [[ $(cat subdomains/subdomains.txt | wc -l) -le 1000 ]]; then @@ -498,13 +467,13 @@ function sub_recursive(){ for sub in $(cat subdomains/subdomains.txt); do sed "s/$/.$sub/" $subs_wordlist >> .tmp/brute_recursive_wordlist.txt done - puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT -w .tmp/brute_recursive_result.txt &>>"$LOGFILE" - cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt - DNScewl --tL .tmp/brute_recursive.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1_recursive.txt - puredns resolve .tmp/DNScewl1_recursive.txt -w .tmp/permute1_recursive_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" - cat .tmp/permute1_recursive_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1_recursive.txt - DNScewl --tL .tmp/permute1_recursive.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl2_recursive.txt - puredns resolve .tmp/DNScewl2_recursive.txt -w .tmp/permute2_recursive_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/brute_recursive_wordlist.txt" ] && puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT -w .tmp/brute_recursive_result.txt &>>"$LOGFILE" + [ -s ".tmp/brute_recursive_result.txt" ] && cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt + [ -s ".tmp/brute_recursive.txt" ] && DNScewl --tL .tmp/brute_recursive.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1_recursive.txt + [ -s ".tmp/DNScewl1_recursive.txt" ] && puredns resolve .tmp/DNScewl1_recursive.txt -w .tmp/permute1_recursive_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/permute1_recursive_tmp.txt" ] && cat .tmp/permute1_recursive_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1_recursive.txt + [ -s ".tmp/permute1_recursive.txt" ] && DNScewl --tL .tmp/permute1_recursive.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl2_recursive.txt + [ -s ".tmp/DNScewl2_recursive.txt" ] && puredns resolve .tmp/DNScewl2_recursive.txt -w .tmp/permute2_recursive_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" cat .tmp/permute1_recursive.txt .tmp/permute2_recursive_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute_recursive.txt NUMOFLINES=$(cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]} @@ -559,7 +528,7 @@ function zonetransfer(){ function s3buckets(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$S3BUCKETS" = true ]; then start_func "AWS S3 buckets search" - s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | grep -iv "not_exist" | grep -iv "Warning:" | anew -q .tmp/s3buckets.txt + [ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | grep -iv "not_exist" | grep -iv "Warning:" | anew -q .tmp/s3buckets.txt NUMOFLINES=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | anew subdomains/s3buckets.txt | wc -l) if [ "$NUMOFLINES" -gt 0 ]; then notification "${NUMOFLINES} new S3 buckets found" info @@ -610,7 +579,7 @@ function webprobe_full(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBPROBEFULL" = true ]; then start_func "Http probing non standard ports" - sudo unimap --fast-scan -f subdomains/subdomains.txt --ports $UNCOMMON_PORTS_WEB -q -k --url-output | anew -q .tmp/nmap_uncommonweb.txt + [ -s "subdomains/subdomains.txt" ] && sudo unimap --fast-scan -f subdomains/subdomains.txt --ports $UNCOMMON_PORTS_WEB -q -k --url-output | anew -q .tmp/nmap_uncommonweb.txt [ -s ".tmp/nmap_uncommonweb.txt" ] && cat .tmp/nmap_uncommonweb.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain" | anew -q .tmp/probed_uncommon_ports_tmp.txt #timeout_secs=$(($(cat subdomains/subdomains.txt | wc -l)*5+10)) @@ -639,7 +608,7 @@ function screenshot(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBSCREENSHOT" = true ]; then start_func "Web Screenshots" cat webs/webs.txt webs/webs_uncommon_ports.txt 2>>"$LOGFILE" | anew -q .tmp/webs_screenshots.txt - webscreenshot --no-xserver -r chrome -i .tmp/webs_screenshots.txt -w $WEBSCREENSHOT_THREADS -o screenshots + [ -s ".tmp/webs_screenshots.txt" ] && webscreenshot --no-xserver -r chrome -i .tmp/webs_screenshots.txt -w $WEBSCREENSHOT_THREADS -o screenshots #gowitness file -f .tmp/webs_screenshots.txt --disable-logging 2>>"$LOGFILE" end_func "Results are saved in $domain/screenshots folder" ${FUNCNAME[0]} else @@ -665,7 +634,7 @@ function favicon(){ sed -i "s/|/\n/g" favicontest.txt cat favicontest.txt 2>>"$LOGFILE" mv favicontest.txt $dir/hosts/favicontest.txt 2>>"$LOGFILE" - rm favicontest.json 2>>"$LOGFILE" + rm -f favicontest.json 2>>"$LOGFILE" fi cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } end_func "Results are saved in hosts/favicontest.txt" ${FUNCNAME[0]} @@ -696,7 +665,7 @@ function portscan(){ done fi if [ "$PORTSCAN_ACTIVE" = true ]; then - sudo nmap --top-ports 1000 -sV -n --max-retries 2 -Pn -iL .tmp/ips_nowaf.txt -oN hosts/portscan_active.txt -oG .tmp/nmap_grep.gnmap &>>"$LOGFILE" + [ -s ".tmp/ips_nowaf.txt" ] && sudo nmap --top-ports 1000 -sV -n --max-retries 2 -Pn -iL .tmp/ips_nowaf.txt -oN hosts/portscan_active.txt -oG .tmp/nmap_grep.gnmap &>>"$LOGFILE" fi end_func "Results are saved in hosts/portscan_[passive|active].txt" ${FUNCNAME[0]} else @@ -786,13 +755,14 @@ function fuzz(){ start_func "Web directory fuzzing" if [ -s "./webs/webs.txt" ]; then mkdir -p $dir/fuzzing + interlace -tL webs/webs.txt -threads 10 -c "ffuf -mc all -fc 404 -ac -t ${FFUF_THREADS} -sf -s -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -of csv -o _output_/_cleantarget_.csv -ac" -o fuzzing + for sub in $(cat webs/webs.txt); do - printf "${yellow}\n\n Running: Fuzzing in ${sub}${reset}\n" sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') - ffuf -mc all -fc 404 -ac -t $FFUF_THREADS -sf -s -H "${HEADER}" -w $fuzz_wordlist -maxtime $FFUF_MAXTIME -u $sub/FUZZ -or -of csv -o $dir/fuzzing/${sub_out}.csv &>/dev/null [ -s "$dir/fuzzing/${sub_out}.csv" ] && cat $dir/fuzzing/${sub_out}.csv | cut -d ',' -f2,5,6 | tr ',' ' ' | awk '{ print $2 " " $3 " " $1}' | tail -n +2 | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt - rm $dir/fuzzing/${sub_out}.csv 2>>"$LOGFILE" + rm -f $dir/fuzzing/${sub_out}.csv 2>>"$LOGFILE" done + end_func "Results are saved in $domain/fuzzing/*subdomain*.txt" ${FUNCNAME[0]} else end_func "No $domain/web/webs.txts file found, fuzzing skipped " ${FUNCNAME[0]} @@ -851,19 +821,19 @@ function params(){ printf "${yellow}\n\n Running : Searching params with paramspider${reset}\n" if [ -s "webs/webs.txt" ]; then cat webs/webs.txt | sed -r "s/https?:\/\///" | anew -q .tmp/probed_nohttp.txt - interlace -tL .tmp/probed_nohttp.txt -threads 10 -c "python3 $tools/ParamSpider/paramspider.py -d _target_ -l high -q --exclude eot,jpg,jpeg,gif,css,tif,tiff,png,ttf,otf,woff,woff2,ico,pdf,svg,txt,js" &>/dev/null + [ -s ".tmp/probed_nohttp.txt" ] && interlace -tL .tmp/probed_nohttp.txt -threads 10 -c "python3 $tools/ParamSpider/paramspider.py -d _target_ -l high -q --exclude eot,jpg,jpeg,gif,css,tif,tiff,png,ttf,otf,woff,woff2,ico,pdf,svg,txt,js" &>/dev/null cat output/*.txt 2>>"$LOGFILE" | anew -q .tmp/param_tmp.txt sed '/^FUZZ/d' -i .tmp/param_tmp.txt rm -rf output/ 2>>"$LOGFILE" if [ "$DEEP" = true ]; then printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt &>>"$LOGFILE" + [ -s ".tmp/param_tmp.txt" ] && arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt &>>"$LOGFILE" else if [[ $(cat .tmp/param_tmp.txt | wc -l) -le 50 ]]; then printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt &>>"$LOGFILE" + [ -s ".tmp/param_tmp.txt" ] && arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt &>>"$LOGFILE" else - cp .tmp/param_tmp.txt webs/param.txt + [ -s ".tmp/param_tmp.txt" ] && cp .tmp/param_tmp.txt webs/param.txt fi fi fi @@ -903,7 +873,7 @@ function urlchecks(){ [ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | python3 $tools/JSA/jsa.py | anew -q .tmp/url_extract_tmp.txt fi cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt - uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt &>>"$LOGFILE" + [ -s ".tmp/url_extract_tmp2.txt" ] && uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | wc -l) notification "${NUMOFLINES} new urls with params" info end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} @@ -921,16 +891,18 @@ function url_gf(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$URL_GF" = true ]; then start_func "Vulnerable Pattern Search" mkdir -p gf - gf xss webs/url_extract.txt | anew -q gf/xss.txt - gf ssti webs/url_extract.txt | anew -q gf/ssti.txt - gf ssrf webs/url_extract.txt | anew -q gf/ssrf.txt - gf sqli webs/url_extract.txt | anew -q gf/sqli.txt - gf redirect webs/url_extract.txt | anew -q gf/redirect.txt - [ -f "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt - gf rce webs/url_extract.txt | anew -q gf/rce.txt - gf potential webs/url_extract.txt | cut -d ':' -f3-5 |anew -q gf/potential.txt - [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p | anew -q gf/endpoints.txt - gf lfi webs/url_extract.txt | anew -q gf/lfi.txt + if [ -s "webs/url_extract.txt" ]; then + gf xss webs/url_extract.txt | anew -q gf/xss.txt + gf ssti webs/url_extract.txt | anew -q gf/ssti.txt + gf ssrf webs/url_extract.txt | anew -q gf/ssrf.txt + gf sqli webs/url_extract.txt | anew -q gf/sqli.txt + gf redirect webs/url_extract.txt | anew -q gf/redirect.txt + [ -f "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt + gf rce webs/url_extract.txt | anew -q gf/rce.txt + gf potential webs/url_extract.txt | cut -d ':' -f3-5 |anew -q gf/potential.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p | anew -q gf/endpoints.txt + gf lfi webs/url_extract.txt | anew -q gf/lfi.txt + fi end_func "Results are saved in $domain/gf folder" ${FUNCNAME[0]} else if [ "$URL_GF" = false ]; then diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index c0ad1a7f..c8327fe7 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -193,7 +193,8 @@ function emails(){ cd "$tools/pwndb" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } python3 pwndb.py --target "@${domain}" | sed '/^[-]/d' | anew -q $dir/osint/passwords.txt cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" osint/passwords.txt + [ -f "osint/passwords.txt" ] && sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" osint/passwords.txt + [ -f "osint/passwords.txt" ] && sed -i '1,2d' osint/passwords.txt else text="${yellow}\n pwndb is currently down :(\n\n Check xjypo5vzgmo7jca6b322dnqbsdnp3amd24ybx26x5nxbusccjkm4pwid.onion${reset}\n" printf "${text}" && printf "${text}" | $NOTIFY @@ -257,12 +258,8 @@ function subdomains_full(){ NUMOFLINES_probed="0" printf "${bgreen}#######################################################################\n\n" printf "${bblue} Subdomain Enumeration $domain\n\n" - if [ -f "subdomains/subdomains.txt" ]; then - cp subdomains/subdomains.txt .tmp/subdomains_old.txt 2>>"$LOGFILE" - fi - if [ -f "webs/webs.txt" ]; then - cp webs/webs.txt .tmp/probed_old.txt 2>>"$LOGFILE" - fi + [ -s "subdomains/subdomains.txt" ] && cp subdomains/subdomains.txt .tmp/subdomains_old.txt + [ -s "webs/webs.txt" ] && cp webs/webs.txt .tmp/probed_old.txt resolvers_update @@ -285,9 +282,9 @@ function subdomains_full(){ fi printf "${bblue}\n Total subdomains: ${reset}\n\n" notification "- ${NUMOFLINES_subs} new alive subdomains" good - [ -f "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt 2>>"$LOGFILE" | sort + [ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | sort notification "- ${NUMOFLINES_probed} new web probed" good - [ -f "webs/webs.txt" ] && cat webs/webs.txt 2>>"$LOGFILE" | sort + [ -s "webs/webs.txt" ] && cat webs/webs.txt | sort notification "Subdomain Enumeration Finished" good printf "${bblue} Results are saved in $domain/subdomains/subdomains.txt and webs/webs.txt${reset}\n" printf "${bgreen}#######################################################################\n\n" @@ -350,8 +347,8 @@ function sub_active(){ [ -s "${inScope_file}" ] && cat ${inScope_file} .tmp/inscope_subs.txt cat .tmp/*_subs.txt | anew -q .tmp/subs_no_resolved.txt deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt - axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subdomains_tmp.txt &>>"$LOGFILE" - echo $domain | dnsx -retry 3 -silent -r /home/op/lists/resolvers_trusted.txt 2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt + [ -s ".tmp/subs_no_resolved.txt" ] && axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subdomains_tmp.txt &>>"$LOGFILE" + echo $domain | dnsx -retry 3 -silent -r $resolvers_trusted 2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt NUMOFLINES=$(cat .tmp/subdomains_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (active resolution)" ${FUNCNAME[0]} else @@ -362,9 +359,9 @@ function sub_active(){ function sub_dns(){ if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then start_subfunc "Running : DNS Subdomain Enumeration" - axiom-scan subdomains/subdomains.txt -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -resp -o subdomains/subdomains_cname.txt &>>"$LOGFILE" + [ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -resp -o subdomains/subdomains_cname.txt &>>"$LOGFILE" [ -s "subdomains/subdomains_cname.txt" ] && cat subdomains/subdomains_cname.txt | cut -d '[' -f2 | sed 's/.$//' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt - axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subdomains_dns_resolved.txt &>>"$LOGFILE" + [ -s ".tmp/subdomains_dns.txt" ] && axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subdomains_dns_resolved.txt &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]} else @@ -380,9 +377,7 @@ function sub_brute(){ else axiom-scan $subs_wordlist -m puredns-single $domain -r /home/op/lists/resolvers.txt -o .tmp/subs_brute.txt &>>"$LOGFILE" fi - if [[ -s ".tmp/subs_brute.txt" ]]; then - axiom-scan .tmp/subs_brute.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subs_brute_valid.txt &>>"$LOGFILE" - fi + [ -s ".tmp/subs_brute.txt" ] && axiom-scan .tmp/subs_brute.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subs_brute_valid.txt &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/subs_brute_valid.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (bruteforce)" ${FUNCNAME[0]} else @@ -406,16 +401,16 @@ function sub_scraping(){ axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap3.txt &>>"$LOGFILE" [ -s ".tmp/probed_tmp_scrap3.txt" ] && cat .tmp/probed_tmp_scrap3.txt | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt if [ "$DEEP" = true ]; then - [ -f ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" else - [ -f ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" fi NUMFILES=0 touch .tmp/gospider.txt [[ -d .tmp/gospider/ ]] && NUMFILES=$(find .tmp/gospider/ -type f | wc -l) [[ $NUMFILES -gt 0 ]] && cat .tmp/gospider/* | sed '/^.\{2048\}./d' | anew -q .tmp/gospider.txt grep -Eo 'https?://[^ ]+' .tmp/gospider.txt | sed 's/]$//' | unfurl --unique domains | grep ".$domain$" | anew -q .tmp/scrap_subs.txt - axiom-scan .tmp/scrap_subs.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/scrap_subs_resolved.txt &>>"$LOGFILE" + [ -s ".tmp/scrap_subs.txt" ] && axiom-scan .tmp/scrap_subs.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/scrap_subs_resolved.txt &>>"$LOGFILE" NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | wc -l) axiom-scan .tmp/diff_scrap.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap4.txt &>>"$LOGFILE" [ -s ".tmp/probed_tmp_scrap4.txt" ] && cat .tmp/probed_tmp_scrap4.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt @@ -435,53 +430,21 @@ function sub_scraping(){ function sub_permut(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPERMUTE" = true ]; then start_subfunc "Running : Permutations Subdomain Enumeration" - if [ "$DEEP" = true ]; then - axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" - [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt - axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_tmp.txt &>>"$LOGFILE" - cat .tmp/permute1_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1.txt - axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt &>>"$LOGFILE" - [ -s ".tmp/DNScewl2_.txt" ] && cat .tmp/DNScewl2_.txt 2>>"$LOGFILE" | grep ".$domain$" > .tmp/DNScewl2.txt - axiom-scan .tmp/DNScewl2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_tmp.txt &>>"$LOGFILE" - cat .tmp/permute2_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute2.txt - cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - else - if [[ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 100 ]]; then - axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" - [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt - axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_tmp.txt &>>"$LOGFILE" - cat .tmp/permute1_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1.txt - axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt &>>"$LOGFILE" - [ -s ".tmp/DNScewl2_.txt" ] && cat .tmp/DNScewl2_.txt 2>>"$LOGFILE" | grep ".$domain$" > .tmp/DNScewl2.txt - axiom-scan .tmp/DNScewl2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_tmp.txt &>>"$LOGFILE" - cat .tmp/permute2_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute2.txt - cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - elif [[ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 200 ]]; then - axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" - [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt - axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute_tmp.txt &>>"$LOGFILE" - cat .tmp/permute_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - else - if [[ $(cat subdomains/subdomains.txt | wc -l) -le 100 ]]; then - axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" - [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt - axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_tmp.txt &>>"$LOGFILE" - cat .tmp/permute1_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1.txt - axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt &>>"$LOGFILE" - [ -s ".tmp/DNScewl2_.txt" ] && cat .tmp/DNScewl2_.txt 2>>"$LOGFILE" | grep ".$domain$" > .tmp/DNScewl2.txt - axiom-scan .tmp/DNScewl2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_tmp.txt &>>"$LOGFILE" - cat .tmp/permute2_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute2.txt - cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - elif [[ $(cat subdomains/subdomains.txt | wc -l) -le 200 ]]; then - axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" - [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt - axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute_tmp.txt &>>"$LOGFILE" - cat .tmp/permute_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - else - printf "\n${bred} Skipping Permutations: Too Many Subdomains${reset}\n\n" - fi - fi - fi + + [ "$DEEP" = true ] && [ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" + [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 100 ] && axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" + [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -gt 100 ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 200 ] && axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" + [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -gt 200 ] && [ $(cat subdomains/subdomains.txt | wc -l) -le 100 ] && axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt + [ -s ".tmp/DNScewl1.txt" ] && axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/permute1_tmp.txt" ] && cat .tmp/permute1_tmp.txt | anew -q .tmp/permute1.txt + [ -s ".tmp/permute1.txt" ] && axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl2_.txt" ] && cat .tmp/DNScewl2_.txt | grep ".$domain$" > .tmp/DNScewl2.txt + [ -s ".tmp/DNScewl2.txt" ] && axiom-scan .tmp/DNScewl2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/permute2_tmp.txt" ] && cat .tmp/permute2_tmp.txt | anew -q .tmp/permute2.txt + + cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt + if [ -f ".tmp/permute_subs.txt" ]; then deleteOutScoped $outOfScope_file .tmp/permute_subs.txt NUMOFLINES=$(cat .tmp/permute_subs.txt 2>>"$LOGFILE" | grep ".$domain$" | anew subdomains/subdomains.txt | wc -l) @@ -506,29 +469,31 @@ function sub_recursive(){ for sub in $(cat subdomains/subdomains.txt | rev | cut -d '.' -f 3,2,1 | rev | sort | uniq -c | sort -nr | grep -v '1 ' | sed -e 's/^[[:space:]]*//' | cut -d ' ' -f 2); do echo $sub | anew -q .tmp/sub_pass_recur_target.com done - axiom-scan .tmp/sub_pass_recur_target.com -m subfinder -all -o .tmp/subfinder_prec.txt &>>"$LOGFILE" - axiom-scan .tmp/sub_pass_recur_target.com -m assetfinder -o .tmp/assetfinder_prec.txt &>>"$LOGFILE" - axiom-scan .tmp/sub_pass_recur_target.com -m amass -passive -o .tmp/amass_prec.txt &>>"$LOGFILE" - axiom-scan .tmp/sub_pass_recur_target.com -m findomain -o .tmp/findomain_prec.txt &>>"$LOGFILE" - eval cat .tmp/*_prec.txt 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt - axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/passive_recurs_tmp.txt &>>"$LOGFILE" - [ -f ".tmp/passive_recurs_tmp.txt" ] && cat .tmp/passive_recurs_tmp.txt | anew -q subdomains/subdomains.txt - - #Bruteforce recursive + if [ -s ".tmp/sub_pass_recur_target.com" ]; then + axiom-scan .tmp/sub_pass_recur_target.com -m subfinder -all -o .tmp/subfinder_prec.txt &>>"$LOGFILE" + axiom-scan .tmp/sub_pass_recur_target.com -m assetfinder -o .tmp/assetfinder_prec.txt &>>"$LOGFILE" + axiom-scan .tmp/sub_pass_recur_target.com -m amass -passive -o .tmp/amass_prec.txt &>>"$LOGFILE" + axiom-scan .tmp/sub_pass_recur_target.com -m findomain -o .tmp/findomain_prec.txt &>>"$LOGFILE" + fi + cat .tmp/*_prec.txt 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt + [ -s ".tmp/passive_recursive.txt" ] && axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/passive_recurs_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/passive_recurs_tmp.txt" ] && cat .tmp/passive_recurs_tmp.txt | anew -q subdomains/subdomains.txt + + # Bruteforce recursive if [[ $(cat subdomains/subdomains.txt | wc -l) -le 1000 ]]; then echo "" > .tmp/brute_recursive_wordlist.txt for sub in $(cat subdomains/subdomains.txt); do sed "s/$/.$sub/" $subs_wordlist >> .tmp/brute_recursive_wordlist.txt done - axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/brute_recursive_result.txt &>>"$LOGFILE" - cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt - axiom-scan .tmp/brute_recursive.txt -m dnscewl -o .tmp/DNScewl1_recursive_.txt &>>"$LOGFILE" + [ -s ".tmp/brute_recursive_wordlist.txt" ] && axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/brute_recursive_result.txt &>>"$LOGFILE" + [ -s ".tmp/brute_recursive_result.txt" ] && cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt + [ -s ".tmp/brute_recursive.txt" ] && axiom-scan .tmp/brute_recursive.txt -m dnscewl -o .tmp/DNScewl1_recursive_.txt &>>"$LOGFILE" [ -s ".tmp/DNScewl1_recursive_.txt" ] && cat .tmp/DNScewl1_recursive_.txt | grep ".$domain$" > .tmp/DNScewl1_recursive.txt - axiom-scan .tmp/DNScewl1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_recursive_tmp.txt &>>"$LOGFILE" - cat .tmp/permute1_recursive_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1_recursive.txt - axiom-scan .tmp/permute1_recursive.txt -m dnscewl -o .tmp/DNScewl2_recursive_.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl1_recursive.txt" ] && axiom-scan .tmp/DNScewl1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_recursive_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/permute1_recursive_tmp.txt" ] && cat .tmp/permute1_recursive_tmp.txt | anew -q .tmp/permute1_recursive.txt + [ -s ".tmp/permute1_recursive.txt" ] && axiom-scan .tmp/permute1_recursive.txt -m dnscewl -o .tmp/DNScewl2_recursive_.txt &>>"$LOGFILE" [ -s ".tmp/DNScewl2_recursive_.txt" ] && cat .tmp/DNScewl2_recursive_.txt | grep ".$domain$" > .tmp/DNScewl2_recursive.txt - axiom-scan .tmp/DNScewl2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_recursive_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl2_recursive.txt" ] && axiom-scan .tmp/DNScewl2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_recursive_tmp.txt &>>"$LOGFILE" cat .tmp/permute1_recursive.txt .tmp/permute2_recursive_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute_recursive.txt NUMOFLINES=$(cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]} @@ -583,7 +548,7 @@ function zonetransfer(){ function s3buckets(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$S3BUCKETS" = true ]; then start_func "AWS S3 buckets search" - axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt &>>"$LOGFILE" + axiom-scan webs/webs.txt -m s3scanner -o .tmp/s3buckets_tmp.txt &>>"$LOGFILE" cat .tmp/s3buckets_tmp.txt | grep -iv "not_exist" | grep -iv "Warning:" | anew -q .tmp/s3buckets.txt NUMOFLINES=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | anew subdomains/s3buckets.txt | wc -l) if [ "$NUMOFLINES" -gt 0 ]; then @@ -723,7 +688,7 @@ function portscan(){ done fi if [ "$PORTSCAN_ACTIVE" = true ]; then - axiom-scan .tmp/ips_nowaf.txt -m nmapx --top-ports 1000 -sV -n -Pn --max-retries 2 -o hosts/portscan_active.txt &>>"$LOGFILE" + [ -s ".tmp/ips_nowaf.txt" ] && axiom-scan .tmp/ips_nowaf.txt -m nmapx --top-ports 1000 -sV -n -Pn --max-retries 2 -o hosts/portscan_active.txt &>>"$LOGFILE" fi end_func "Results are saved in hosts/portscan_[passive|active].txt" ${FUNCNAME[0]} else @@ -811,7 +776,7 @@ function nuclei_check(){ function fuzz(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$FUZZ" = true ]; then start_func "Web directory fuzzing" - if [ -s "./webs/webs.txt" ]; then + if [ -s "webs/webs.txt" ]; then mkdir -p $dir/fuzzing axiom-scan webs/webs.txt -m ffuf -w /home/op/lists/onelistforallmicro.txt -H \"${HEADER}\" -mc all -fc 404 -sf -s -maxtime $FFUF_MAXTIME -o $dir/fuzzing/ffuf-content.csv &>>"$LOGFILE" grep -v "FUZZ,url,redirectlocation" $dir/fuzzing/ffuf-content.csv | awk -F "," '{print $2" "$5" "$6}' | sort > $dir/fuzzing/ffuf-content.tmp From 5299cda577cebdaed5aa44c5b96a004b05e0e85e Mon Sep 17 00:00:00 2001 From: six2dez Date: Thu, 20 May 2021 08:00:29 +0200 Subject: [PATCH 25/32] Fix ffuf interlace --- reconftw.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reconftw.sh b/reconftw.sh index a8f71920..2635cd86 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -755,7 +755,7 @@ function fuzz(){ start_func "Web directory fuzzing" if [ -s "./webs/webs.txt" ]; then mkdir -p $dir/fuzzing - interlace -tL webs/webs.txt -threads 10 -c "ffuf -mc all -fc 404 -ac -t ${FFUF_THREADS} -sf -s -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -of csv -o _output_/_cleantarget_.csv -ac" -o fuzzing + interlace -tL webs/webs.txt -threads 10 -c "ffuf -mc all -fc 404 -ac -t ${FFUF_THREADS} -sf -s -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -of csv -o _output_/_cleantarget_.csv -ac" -o fuzzing &>>"$LOGFILE" for sub in $(cat webs/webs.txt); do sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') From ea03faa23b0339daa68b65c199a5af14e79923c7 Mon Sep 17 00:00:00 2001 From: six2dez Date: Thu, 20 May 2021 15:25:55 +0200 Subject: [PATCH 26/32] Fix osint folder and removed webprobe verbose output --- reconftw.sh | 2 +- reconftw_axiom.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 2635cd86..50847532 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -160,7 +160,7 @@ function metadata(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$METADATA" = true ] && [ "$OSINT" = true ]; then start_func "Scanning metadata in public files" metafinder -d "$domain" -l 20 -o osint -go -bi -ba &>>"$LOGFILE" - mv "osint/${domain}/*" "osint/" 2>>"$LOGFILE" + mv "osint/${domain}/"*".txt" "osint/" 2>>"$LOGFILE" rmdir "osint/${domain}" 2>>"$LOGFILE" end_func "Results are saved in $domain/osint/[software/authors/metadata_results].txt" ${FUNCNAME[0]} else diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index c8327fe7..fcf96155 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -161,7 +161,7 @@ function metadata(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$METADATA" = true ] && [ "$OSINT" = true ]; then start_func "Scanning metadata in public files" metafinder -d "$domain" -l 20 -o osint -go -bi -ba &>>"$LOGFILE" - mv "osint/${domain}/*" "osint/" 2>>"$LOGFILE" + mv "osint/${domain}/"*".txt" "osint/" 2>>"$LOGFILE" rmdir "osint/${domain}" 2>>"$LOGFILE" end_func "Results are saved in $domain/osint/[software/authors/metadata_results].txt" ${FUNCNAME[0]} else @@ -601,7 +601,7 @@ function webprobe_full(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBPROBEFULL" = true ]; then start_func "Http probing non standard ports" - axiom-scan subdomains/subdomains.txt -m unimap --fast-scan --ports $UNCOMMON_PORTS_WEB -q -k --url-output -o .tmp/nmap_uncommonweb.txt + axiom-scan subdomains/subdomains.txt -m unimap --fast-scan --ports $UNCOMMON_PORTS_WEB -q -k --url-output -o .tmp/nmap_uncommonweb.txt &>>"$LOGFILE" [ -s ".tmp/nmap_uncommonweb.txt" ] && axiom-scan .tmp/nmap_uncommonweb.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_uncommon_ports_tmp_.txt &>>"$LOGFILE" [ -s ".tmp/probed_uncommon_ports_tmp_.txt" ] && cat .tmp/probed_uncommon_ports_tmp_.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_uncommon_ports_tmp.txt From 278da2a432086b358a340c262c013868f51ef0f3 Mon Sep 17 00:00:00 2001 From: six2dez Date: Thu, 20 May 2021 16:27:44 +0200 Subject: [PATCH 27/32] Fix LFI and paramspider output --- reconftw.sh | 2 +- reconftw_axiom.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 50847532..1fec2c53 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1165,7 +1165,7 @@ function lfi(){ if [ -s "gf/lfi.txt" ]; then cat gf/lfi.txt | qsreplace FUZZ | anew -q .tmp/tmp_lfi.txt for url in $(cat .tmp/tmp_lfi.txt); do - ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt + ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt done fi end_func "Results are saved in vulns/lfi.txt" ${FUNCNAME[0]} diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index fcf96155..5140f3a0 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -844,7 +844,7 @@ function params(){ if [ -s "webs/webs.txt" ]; then cat webs/webs.txt | sed -r "s/https?:\/\///" | anew -q .tmp/probed_nohttp.txt axiom-scan .tmp/probed_nohttp.txt -m paramspider -l high -q --exclude eot,jpg,jpeg,gif,css,tif,tiff,png,ttf,otf,woff,woff2,ico,pdf,svg,txt,js -o output_paramspider &>>"$LOGFILE" - cat output_paramspider/*.txt 2>>"$LOGFILE" | anew -q .tmp/param_tmp.txt + find output_paramspider/ -name '*.txt' -exec cat {} \; | anew -q .tmp/param_tmp.txt sed '/^FUZZ/d' -i .tmp/param_tmp.txt rm -rf output_paramspider/ 2>>"$LOGFILE" if [ "$DEEP" = true ]; then @@ -1189,7 +1189,7 @@ function lfi(){ if [ -s "gf/lfi.txt" ]; then cat gf/lfi.txt | qsreplace FUZZ | anew -q .tmp/tmp_lfi.txt for url in $(cat .tmp/tmp_lfi.txt); do - ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" &>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt + ffuf -v -t $FFUF_THREADS -H "${HEADER}" -w $lfi_wordlist -u $url -mr "root:" 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt done fi end_func "Results are saved in vulns/lfi.txt" ${FUNCNAME[0]} From 1895491cf96d622fd0ef5ad84788ce6e5a0baafe Mon Sep 17 00:00:00 2001 From: six2dez Date: Fri, 21 May 2021 10:04:54 +0200 Subject: [PATCH 28/32] Metafinder limit && better log --- reconftw.cfg | 1 + reconftw.sh | 118 +++++++++++++-------------- reconftw_axiom.sh | 200 +++++++++++++++++++++++----------------------- 3 files changed, 160 insertions(+), 159 deletions(-) diff --git a/reconftw.cfg b/reconftw.cfg index 8a60c967..a883f2c0 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -44,6 +44,7 @@ GITHUB_DORKS=true METADATA=true EMAILS=true DOMAIN_INFO=true +METAFINDER_LIMIT=20 # Max 250 # Subdomains SUBCRT=true diff --git a/reconftw.sh b/reconftw.sh index 1fec2c53..4cd9cdf2 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -138,9 +138,9 @@ function github_dorks(){ start_func "Github Dorks in process" if [ -s "${GITHUB_TOKENS}" ]; then if [ "$DEEP" = true ]; then - python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -ri -d "$tools/GitDorker/Dorks/alldorksv3" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt &>>"$LOGFILE" + python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -ri -d "$tools/GitDorker/Dorks/alldorksv3" 2>>"$LOGFILE" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt else - python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -ri -d "$tools/GitDorker/Dorks/medium_dorks.txt" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt &>>"$LOGFILE" + python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -ri -d "$tools/GitDorker/Dorks/medium_dorks.txt" 2>>"$LOGFILE" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt fi sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" osint/gitdorks.txt else @@ -159,9 +159,9 @@ function github_dorks(){ function metadata(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$METADATA" = true ] && [ "$OSINT" = true ]; then start_func "Scanning metadata in public files" - metafinder -d "$domain" -l 20 -o osint -go -bi -ba &>>"$LOGFILE" + metafinder -d "$domain" -l $METAFINDER_LIMIT -o osint -go -bi -ba 2>>"$LOGFILE" &>/dev/null mv "osint/${domain}/"*".txt" "osint/" 2>>"$LOGFILE" - rmdir "osint/${domain}" 2>>"$LOGFILE" + rm -rf "osint/${domain}" 2>>"$LOGFILE" end_func "Results are saved in $domain/osint/[software/authors/metadata_results].txt" ${FUNCNAME[0]} else if [ "$METADATA" = false ] || [ "$OSINT" = false ]; then @@ -183,7 +183,7 @@ function emails(){ cat .tmp/harvester.txt | awk '/Users/,/IPs/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/users.txt cat .tmp/harvester.txt | awk '/Links/,/Users/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/linkedin.txt fi - h8mail -t $domain -q domain --loose -c $tools/h8mail_config.ini -j .tmp/h8_results.json &>>"$LOGFILE" + h8mail -t $domain -q domain --loose -c $tools/h8mail_config.ini -j .tmp/h8_results.json 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/h8_results.json" ] && cat .tmp/h8_results.json | jq -r '.targets[0] | .data[] | .[]' | cut -d '-' -f2 | anew -q osint/h8mail.txt PWNDB_STATUS=$(timeout 15s curl -Is --socks5-hostname localhost:9050 http://pwndb2am4tzkvold.onion | grep HTTP | cut -d ' ' -f2) @@ -292,18 +292,18 @@ function subdomains_full(){ function sub_passive(){ if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then start_subfunc "Running : Passive Subdomain Enumeration" - subfinder -d $domain -all -o .tmp/subfinder_psub.txt &>>"$LOGFILE" + subfinder -d $domain -all -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" &>/dev/null assetfinder --subs-only $domain 2>>"$LOGFILE" | anew -q .tmp/assetfinder_psub.txt - amass enum -passive -d $domain -config $AMASS_CONFIG -o .tmp/amass_psub.txt &>>"$LOGFILE" - findomain --quiet -t $domain -u .tmp/findomain_psub.txt &>>"$LOGFILE" - timeout 10m waybackurls $domain | unfurl --unique domains | anew -q .tmp/waybackurls_psub.txt - timeout 10m gauplus -t $GAUPLUS_THREADS -random-agent -subs $domain | unfurl --unique domains | anew -q .tmp/gau_psub.txt + amass enum -passive -d $domain -config $AMASS_CONFIG -o .tmp/amass_psub.txt 2>>"$LOGFILE" &>/dev/null + findomain --quiet -t $domain -u .tmp/findomain_psub.txt 2>>"$LOGFILE" &>/dev/null + timeout 10m waybackurls $domain | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/waybackurls_psub.txt + timeout 10m gauplus -t $GAUPLUS_THREADS -random-agent -subs $domain | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/gau_psub.txt crobat -s $domain 2>>"$LOGFILE" | anew -q .tmp/crobat_psub.txt if [ -s "${GITHUB_TOKENS}" ]; then if [ "$DEEP" = true ]; then - github-subdomains -d $domain -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt &>>"$LOGFILE" + github-subdomains -d $domain -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" &>/dev/null else - github-subdomains -d $domain -k -q -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt &>>"$LOGFILE" + github-subdomains -d $domain -k -q -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" &>/dev/null fi fi curl -s "https://jldc.me/anubis/subdomains/${domain}" 2>>"$LOGFILE" | grep -Po "((http|https):\/\/)?(([\w.-]*)\.([\w]*)\.([A-z]))\w+" | sed '/^\./d' | anew -q .tmp/jldc_psub.txt @@ -322,7 +322,7 @@ function sub_passive(){ function sub_crt(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBCRT" = true ]; then start_subfunc "Running : Crtsh Subdomain Enumeration" - python3 $tools/ctfr/ctfr.py -d $domain -o .tmp/crtsh_subs_tmp.txt &>>"$LOGFILE" + python3 $tools/ctfr/ctfr.py -d $domain -o .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" &>/dev/null curl "https://tls.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r .Results[] 2>>"$LOGFILE" | cut -d ',' -f3 | grep -F ".$domain" | anew -q .tmp/crtsh_subs_tmp.txt curl "https://dns.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r '.FDNS_A'[],'.RDNS'[] 2>>"$LOGFILE" | cut -d ',' -f2 | grep -F ".$domain" | anew -q .tmp/crtsh_subs_tmp.txt NUMOFLINES=$(cat .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" | anew .tmp/crtsh_subs.txt | wc -l) @@ -342,7 +342,7 @@ function sub_active(){ [ -s "${inScope_file}" ] && cat ${inScope_file} .tmp/inscope_subs.txt cat .tmp/*_subs.txt | anew -q .tmp/subs_no_resolved.txt deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt - [ -s ".tmp/subs_no_resolved.txt" ] && puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/subs_no_resolved.txt" ] && puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null echo $domain | dnsx -retry 3 -silent -r $resolvers_trusted 2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt NUMOFLINES=$(cat .tmp/subdomains_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (active resolution)" ${FUNCNAME[0]} @@ -354,9 +354,9 @@ function sub_active(){ function sub_dns(){ if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then start_subfunc "Running : DNS Subdomain Enumeration" - [ -s "subdomains/subdomains.txt" ] && dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -resp -silent -l subdomains/subdomains.txt -o subdomains/subdomains_cname.txt -r $resolvers_trusted &>>"$LOGFILE" + [ -s "subdomains/subdomains.txt" ] && dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -resp -silent -l subdomains/subdomains.txt -o subdomains/subdomains_cname.txt -r $resolvers_trusted 2>>"$LOGFILE" &>/dev/null [ -s "subdomains/subdomains_cname.txt" ] && cat subdomains/subdomains_cname.txt | cut -d '[' -f2 | sed 's/.$//' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt - [ -s ".tmp/subdomains_dns.txt" ] && puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/subdomains_dns.txt" ] && puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]} else @@ -368,11 +368,11 @@ function sub_brute(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBBRUTE" = true ]; then start_subfunc "Running : Bruteforce Subdomain Enumeration" if [ "$DEEP" = true ]; then - puredns bruteforce $subs_wordlist_big $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + puredns bruteforce $subs_wordlist_big $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null else - puredns bruteforce $subs_wordlist $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + puredns bruteforce $subs_wordlist $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null fi - [ -s ".tmp/subs_brute.txt" ] && puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/subs_brute.txt" ] && puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null NUMOFLINES=$(cat .tmp/subs_brute_valid.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (bruteforce)" ${FUNCNAME[0]} else @@ -390,16 +390,16 @@ function sub_scraping(){ touch .tmp/scrap_subs.txt if [ -s "$dir/subdomains/subdomains.txt" ]; then cat subdomains/subdomains.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt - [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt + [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt + [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt if [ "$DEEP" = true ]; then [ -s ".tmp/probed_tmp_scrap.txt" ] && gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 3 --sitemap --robots -w -r > .tmp/gospider.txt else [ -s ".tmp/probed_tmp_scrap.txt" ] && gospider -S .tmp/probed_tmp_scrap.txt --js -t $GOSPIDER_THREADS -d 2 --sitemap --robots -w -r > .tmp/gospider.txt fi sed -i '/^.\{2048\}./d' .tmp/gospider.txt - [ -s ".tmp/gospider.txt" ] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | unfurl --unique domains | grep ".$domain$" | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/scrap_subs.txt" ] && puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/gospider.txt" ] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | unfurl -u domains 2>>"$LOGFILE" | grep ".$domain$" | anew -q .tmp/scrap_subs.txt + [ -s ".tmp/scrap_subs.txt" ] && puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | wc -l) [ -s ".tmp/diff_scrap.txt" ] && cat .tmp/diff_scrap.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt end_subfunc "${NUMOFLINES} new subs (code scraping)" ${FUNCNAME[0]} @@ -423,10 +423,10 @@ function sub_permut(){ [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 100 ] && DNScewl --tL .tmp/subs_no_resolved.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -gt 100 ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 200 ] && DNScewl --tL .tmp/subs_no_resolved.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -gt 200 ] && [ $(cat subdomains/subdomains.txt | wc -l) -le 100 ] && DNScewl --tL subdomains/subdomains.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1.txt - [ -s ".tmp/DNScewl1.txt" ] && puredns resolve .tmp/DNScewl1.txt -w .tmp/permute1_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/DNScewl1.txt" ] && puredns resolve .tmp/DNScewl1.txt -w .tmp/permute1_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/permute1_tmp.txt" ] && cat .tmp/permute1_tmp.txt | anew -q .tmp/permute1.txt [ -s ".tmp/permute1.txt" ] && DNScewl --tL .tmp/permute1.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl2.txt - [ -s ".tmp/DNScewl2.txt" ] && puredns resolve .tmp/DNScewl2.txt -w .tmp/permute2_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/DNScewl2.txt" ] && puredns resolve .tmp/DNScewl2.txt -w .tmp/permute2_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/permute2_tmp.txt" ] && cat .tmp/permute2_tmp.txt | anew -q .tmp/permute2.txt cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt @@ -453,12 +453,12 @@ function sub_recursive(){ start_subfunc "Running : Subdomains recursive search" # Passive recursive for sub in $(cat subdomains/subdomains.txt | rev | cut -d '.' -f 3,2,1 | rev | sort | uniq -c | sort -nr | grep -v '1 ' | sed -e 's/^[[:space:]]*//' | cut -d ' ' -f 2); do - subfinder -d $sub -all -silent &>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt + subfinder -d $sub -all -silent 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt assetfinder --subs-only $sub.$domain 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt - amass enum -passive -d $sub.$domain -config $AMASS_CONFIG &>>"$LOGFILE" - findomain --quiet -t $sub.$domain &>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt + amass enum -passive -d $sub.$domain -config $AMASS_CONFIG 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt + findomain --quiet -t $sub.$domain 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt done - [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/passive_recurs_tmp.txt" ] && cat .tmp/passive_recurs_tmp.txt | anew -q subdomains/subdomains.txt # Bruteforce recursive @@ -467,13 +467,13 @@ function sub_recursive(){ for sub in $(cat subdomains/subdomains.txt); do sed "s/$/.$sub/" $subs_wordlist >> .tmp/brute_recursive_wordlist.txt done - [ -s ".tmp/brute_recursive_wordlist.txt" ] && puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT -w .tmp/brute_recursive_result.txt &>>"$LOGFILE" + [ -s ".tmp/brute_recursive_wordlist.txt" ] && puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT -w .tmp/brute_recursive_result.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/brute_recursive_result.txt" ] && cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt [ -s ".tmp/brute_recursive.txt" ] && DNScewl --tL .tmp/brute_recursive.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl1_recursive.txt - [ -s ".tmp/DNScewl1_recursive.txt" ] && puredns resolve .tmp/DNScewl1_recursive.txt -w .tmp/permute1_recursive_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/DNScewl1_recursive.txt" ] && puredns resolve .tmp/DNScewl1_recursive.txt -w .tmp/permute1_recursive_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/permute1_recursive_tmp.txt" ] && cat .tmp/permute1_recursive_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute1_recursive.txt [ -s ".tmp/permute1_recursive.txt" ] && DNScewl --tL .tmp/permute1_recursive.txt -p $tools/permutations_list.txt --level=0 --subs --no-color 2>>"$LOGFILE" | tail -n +14 | grep ".$domain$" > .tmp/DNScewl2_recursive.txt - [ -s ".tmp/DNScewl2_recursive.txt" ] && puredns resolve .tmp/DNScewl2_recursive.txt -w .tmp/permute2_recursive_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT &>>"$LOGFILE" + [ -s ".tmp/DNScewl2_recursive.txt" ] && puredns resolve .tmp/DNScewl2_recursive.txt -w .tmp/permute2_recursive_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null cat .tmp/permute1_recursive.txt .tmp/permute2_recursive_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute_recursive.txt NUMOFLINES=$(cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]} @@ -511,7 +511,7 @@ function subtakeover(){ function zonetransfer(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$ZONETRANSFER" = true ]; then start_func "Zone transfer check" - python3 $tools/dnsrecon/dnsrecon.py -d $domain -a -j subdomains/zonetransfer.json &>>"$LOGFILE" + python3 $tools/dnsrecon/dnsrecon.py -d $domain -a -j subdomains/zonetransfer.json 2>>"$LOGFILE" &>/dev/null if [ -s "subdomains/zonetransfer.json" ]; then if grep -q "\"zone_transfer\"\: \"success\"" subdomains/zonetransfer.json ; then notification "Zone transfer found on ${domain}!" info; fi fi @@ -561,7 +561,7 @@ function webprobe_simple(){ end_subfunc "${NUMOFLINES} new websites resolved" ${FUNCNAME[0]} if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs.txt| wc -l) -le 1500 ]]; then notification "Sending websites to proxy" info - ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url &>>"$LOGFILE" + ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null fi else end_subfunc "No new websites to probe" ${FUNCNAME[0]} @@ -579,8 +579,8 @@ function webprobe_full(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBPROBEFULL" = true ]; then start_func "Http probing non standard ports" - [ -s "subdomains/subdomains.txt" ] && sudo unimap --fast-scan -f subdomains/subdomains.txt --ports $UNCOMMON_PORTS_WEB -q -k --url-output | anew -q .tmp/nmap_uncommonweb.txt - [ -s ".tmp/nmap_uncommonweb.txt" ] && cat .tmp/nmap_uncommonweb.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color | cut -d ' ' -f1 | grep ".$domain" | anew -q .tmp/probed_uncommon_ports_tmp.txt + [ -s "subdomains/subdomains.txt" ] && sudo unimap --fast-scan -f subdomains/subdomains.txt --ports $UNCOMMON_PORTS_WEB -q -k --url-output 2>>"$LOGFILE" | anew -q .tmp/nmap_uncommonweb.txt + [ -s ".tmp/nmap_uncommonweb.txt" ] && cat .tmp/nmap_uncommonweb.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color 2>>"$LOGFILE" | cut -d ' ' -f1 | grep ".$domain" | anew -q .tmp/probed_uncommon_ports_tmp.txt #timeout_secs=$(($(cat subdomains/subdomains.txt | wc -l)*5+10)) #cat subdomains/subdomains.txt | timeout $timeout_secs naabu -p $UNCOMMON_PORTS_WEB -o .tmp/nmap_uncommonweb.txt &>>"$LOGFILE" && uncommon_ports_checked=$(cat .tmp/nmap_uncommonweb.txt | cut -d ':' -f2 | sort -u | sed -e 'H;${x;s/\n/,/g;s/^,//;p;};d') @@ -593,7 +593,7 @@ function webprobe_full(){ end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" ${FUNCNAME[0]} if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs_uncommon_ports.txt| wc -l) -le 1500 ]]; then notification "Sending websites uncommon ports to proxy" info - ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url &>>"$LOGFILE" + ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null fi else if [ "$WEBPROBEFULL" = false ]; then @@ -628,7 +628,7 @@ function favicon(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$FAVICON" = true ]; then start_func "Favicon Ip Lookup" cd "$tools/fav-up" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - python3 favUp.py -w "$domain" -sc -o favicontest.json &>>"$LOGFILE" + python3 favUp.py -w "$domain" -sc -o favicontest.json 2>>"$LOGFILE" &>/dev/null if [ -s "favicontest.json" ]; then cat favicontest.json | jq -r '.found_ips' 2>>"$LOGFILE" | grep -v "not-found" > favicontest.txt sed -i "s/|/\n/g" favicontest.txt @@ -665,7 +665,7 @@ function portscan(){ done fi if [ "$PORTSCAN_ACTIVE" = true ]; then - [ -s ".tmp/ips_nowaf.txt" ] && sudo nmap --top-ports 1000 -sV -n --max-retries 2 -Pn -iL .tmp/ips_nowaf.txt -oN hosts/portscan_active.txt -oG .tmp/nmap_grep.gnmap &>>"$LOGFILE" + [ -s ".tmp/ips_nowaf.txt" ] && sudo nmap --top-ports 1000 -sV -n --max-retries 2 -Pn -iL .tmp/ips_nowaf.txt -oN hosts/portscan_active.txt -oG .tmp/nmap_grep.gnmap 2>>"$LOGFILE" &>/dev/null fi end_func "Results are saved in hosts/portscan_[passive|active].txt" ${FUNCNAME[0]} else @@ -681,7 +681,7 @@ function cloudprovider(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CLOUD_IP" = true ]; then start_func "Cloud provider check" cd "$tools/ip2provider" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - [ -s "$dir/hosts/ips.txt" ] && cat $dir/hosts/ips.txt | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | ./ip2provider.py | anew -q $dir/hosts/cloud_providers.txt &>>"$LOGFILE" + [ -s "$dir/hosts/ips.txt" ] && cat $dir/hosts/ips.txt | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | ./ip2provider.py 2>>"$LOGFILE" | anew -q $dir/hosts/cloud_providers.txt cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } end_func "Results are saved in hosts/cloud_providers.txt" ${FUNCNAME[0]} else @@ -701,7 +701,7 @@ function waf_checks(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WAF_DETECTION" = true ]; then start_func "Website's WAF detection" if [ -s "./webs/webs.txt" ]; then - wafw00f -i webs/webs.txt -o .tmp/wafs.txt &>>"$LOGFILE" + wafw00f -i webs/webs.txt -o .tmp/wafs.txt 2>>"$LOGFILE" &>/dev/null if [ -s ".tmp/wafs.txt" ]; then cat .tmp/wafs.txt | sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' | tr -s "\t" ";" > webs/webs_wafs.txt NUMOFLINES=$(cat webs/webs_wafs.txt 2>>"$LOGFILE" | wc -l) @@ -725,7 +725,7 @@ function waf_checks(){ function nuclei_check(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$NUCLEICHECK" = true ]; then start_func "Templates based web scanner" - nuclei -update-templates &>>"$LOGFILE" + nuclei -update-templates 2>>"$LOGFILE" &>/dev/null mkdir -p nuclei_output if [ -s "webs/webs.txt" ]; then printf "${yellow}\n Running : Nuclei Info${reset}\n\n" @@ -755,7 +755,7 @@ function fuzz(){ start_func "Web directory fuzzing" if [ -s "./webs/webs.txt" ]; then mkdir -p $dir/fuzzing - interlace -tL webs/webs.txt -threads 10 -c "ffuf -mc all -fc 404 -ac -t ${FFUF_THREADS} -sf -s -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -of csv -o _output_/_cleantarget_.csv -ac" -o fuzzing &>>"$LOGFILE" + interlace -tL webs/webs.txt -threads 10 -c "ffuf -mc all -fc 404 -ac -t ${FFUF_THREADS} -sf -s -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -of csv -o _output_/_cleantarget_.csv -ac" -o fuzzing 2>>"$LOGFILE" &>/dev/null for sub in $(cat webs/webs.txt); do sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') @@ -782,7 +782,7 @@ function cms_scanner(){ mkdir -p $dir/cms && rm -rf $dir/cms/* if [ -s "./webs/webs.txt" ]; then tr '\n' ',' < webs/webs.txt > .tmp/cms.txt - timeout -k 30 $CMSSCAN_TIMEOUT python3 $tools/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r &>>"$LOGFILE" + timeout -k 30 $CMSSCAN_TIMEOUT python3 $tools/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r 2>>"$LOGFILE" &>/dev/null exit_status=$? if [[ $exit_status -eq 125 ]]; then echo "TIMEOUT cmseek.py - investigate manually for $dir" &>>"$LOGFILE" @@ -827,11 +827,11 @@ function params(){ rm -rf output/ 2>>"$LOGFILE" if [ "$DEEP" = true ]; then printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - [ -s ".tmp/param_tmp.txt" ] && arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt &>>"$LOGFILE" + [ -s ".tmp/param_tmp.txt" ] && arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt 2>>"$LOGFILE" &>/dev/null else if [[ $(cat .tmp/param_tmp.txt | wc -l) -le 50 ]]; then printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - [ -s ".tmp/param_tmp.txt" ] && arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt &>>"$LOGFILE" + [ -s ".tmp/param_tmp.txt" ] && arjun -i .tmp/param_tmp.txt -t $ARJUN_THREADS -oT webs/param.txt 2>>"$LOGFILE" &>/dev/null else [ -s ".tmp/param_tmp.txt" ] && cp .tmp/param_tmp.txt webs/param.txt fi @@ -865,7 +865,7 @@ function urlchecks(){ sed -i '/^.\{2048\}./d' .tmp/gospider.txt [ -s ".tmp/gospider.txt" ] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain$" | anew -q .tmp/url_extract_tmp.txt if [ -s "${GITHUB_TOKENS}" ]; then - github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt &>>"$LOGFILE" + github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt fi [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -Ei "\.(js)" | anew -q js/url_extract_js.txt @@ -873,13 +873,13 @@ function urlchecks(){ [ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | python3 $tools/JSA/jsa.py | anew -q .tmp/url_extract_tmp.txt fi cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt - [ -s ".tmp/url_extract_tmp2.txt" ] && uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt &>>"$LOGFILE" + [ -s ".tmp/url_extract_tmp2.txt" ] && uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt 2>>"$LOGFILE" &>/dev/null NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | wc -l) notification "${NUMOFLINES} new urls with params" info end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/url_extract.txt | wc -l) -le 1500 ]]; then notification "Sending urls to proxy" info - ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url &>>"$LOGFILE" + ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null fi fi else @@ -900,7 +900,7 @@ function url_gf(){ [ -f "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt gf rce webs/url_extract.txt | anew -q gf/rce.txt gf potential webs/url_extract.txt | cut -d ':' -f3-5 |anew -q gf/potential.txt - [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p | anew -q gf/endpoints.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q gf/endpoints.txt gf lfi webs/url_extract.txt | anew -q gf/lfi.txt fi end_func "Results are saved in $domain/gf folder" ${FUNCNAME[0]} @@ -952,7 +952,7 @@ function jschecks(){ cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt fi printf "${yellow} Running : Gathering secrets 4/5${reset}\n" - [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | nuclei -silent -t ~/nuclei-templates/exposures/tokens/ -r $resolvers_trusted -o js/js_secrets.txt &>>"$LOGFILE" + [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | nuclei -silent -t ~/nuclei-templates/exposures/tokens/ -r $resolvers_trusted -o js/js_secrets.txt 2>>"$LOGFILE" &>/dev/null printf "${yellow} Running : Building wordlist 5/5${reset}\n" [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | python3 $tools/getjswords.py 2>>"$LOGFILE" | anew -q webs/dict_words.txt end_func "Results are saved in $domain/js folder" ${FUNCNAME[0]} @@ -972,12 +972,12 @@ function wordlist_gen(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WORDLIST" = true ]; then start_func "Wordlist generation" if [ -s ".tmp/url_extract_tmp.txt" ]; then - cat .tmp/url_extract_tmp.txt | unfurl -u keys | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt - cat .tmp/url_extract_tmp.txt | unfurl -u values | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt + cat .tmp/url_extract_tmp.txt | unfurl -u keys 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt + cat .tmp/url_extract_tmp.txt | unfurl -u values 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt cat .tmp/url_extract_tmp.txt | tr "[:punct:]" "\n" | anew -q webs/dict_words.txt fi - [ -s ".tmp/js_endpoints.txt" ] && cat .tmp/js_endpoints.txt | unfurl -u path | anew -q webs/dict_paths.txt - [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | unfurl -u path | anew -q webs/dict_paths.txt + [ -s ".tmp/js_endpoints.txt" ] && cat .tmp/js_endpoints.txt | unfurl -u path 2>>"$LOGFILE" | anew -q webs/dict_paths.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | unfurl -u path 2>>"$LOGFILE" | anew -q webs/dict_paths.txt end_func "Results are saved in $domain/webs/dict_[words|paths].txt" ${FUNCNAME[0]} else if [ "$WORDLIST" = false ]; then @@ -1054,7 +1054,7 @@ function xss(){ function cors(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CORS" = true ]; then start_func "CORS Scan" - python3 $tools/Corsy/corsy.py -i webs/webs.txt > webs/cors.txt &>>"$LOGFILE" + python3 $tools/Corsy/corsy.py -i webs/webs.txt > webs/cors.txt 2>>"$LOGFILE" &>/dev/null [ -s "webs/cors.txt" ] && cat webs/cors.txt end_func "Results are saved in webs/cors.txt" ${FUNCNAME[0]} else @@ -1148,7 +1148,7 @@ function ssrf_checks(){ function crlf_checks(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CRLF_CHECKS" = true ]; then start_func "CRLF checks" - crlfuzz -l webs/webs.txt -o vulns/crlf.txt &>>"$LOGFILE" + crlfuzz -l webs/webs.txt -o vulns/crlf.txt 2>>"$LOGFILE" &>/dev/null end_func "Results are saved in vulns/crlf.txt" ${FUNCNAME[0]} else if [ "$CRLF_CHECKS" = false ]; then @@ -1238,7 +1238,7 @@ function spraying(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SPRAY" = true ]; then start_func "Password spraying" cd "$tools/brutespray" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - python3 brutespray.py --file $dir/.tmp/nmap_grep.gnmap --threads $BRUTESPRAY_THREADS --hosts $BRUTESPRAY_CONCURRENCE -o $dir/hosts/brutespray.txt &>>"$LOGFILE" + python3 brutespray.py --file $dir/.tmp/nmap_grep.gnmap --threads $BRUTESPRAY_THREADS --hosts $BRUTESPRAY_CONCURRENCE -o $dir/hosts/brutespray.txt 2>>"$LOGFILE" &>/dev/null cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } end_func "Results are saved in hosts/brutespray.txt" ${FUNCNAME[0]} else @@ -1444,7 +1444,7 @@ function ipcidr_detection(){ } function ipcidr_target(){ - ipcidr_detection $1 | cut -d' ' -f3 | unfurl -u domains | sed 's/\.$//' | sort -u > ./target_reconftw_ipcidr.txt + ipcidr_detection $1 | cut -d' ' -f3 | unfurl -u domains 2>>"$LOGFILE" | sed 's/\.$//' | sort -u > ./target_reconftw_ipcidr.txt if [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -eq 1 ]]; then domain=$(cat ./target_reconftw_ipcidr.txt) elif [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -gt 1 ]]; then diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 5140f3a0..10b7f463 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -139,9 +139,9 @@ function github_dorks(){ start_func "Github Dorks in process" if [ -s "${GITHUB_TOKENS}" ]; then if [ "$DEEP" = true ]; then - python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -ri -d "$tools/GitDorker/Dorks/alldorksv3" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt &>>"$LOGFILE" + python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -ri -d "$tools/GitDorker/Dorks/alldorksv3" 2>>"$LOGFILE" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt else - python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -ri -d "$tools/GitDorker/Dorks/medium_dorks.txt" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt &>>"$LOGFILE" + python3 "$tools/GitDorker/GitDorker.py" -tf "${GITHUB_TOKENS}" -e "$GITDORKER_THREADS" -q "$domain" -p -ri -d "$tools/GitDorker/Dorks/medium_dorks.txt" 2>>"$LOGFILE" | grep "\[+\]" | grep "git" | anew -q osint/gitdorks.txt fi sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" osint/gitdorks.txt else @@ -160,9 +160,9 @@ function github_dorks(){ function metadata(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$METADATA" = true ] && [ "$OSINT" = true ]; then start_func "Scanning metadata in public files" - metafinder -d "$domain" -l 20 -o osint -go -bi -ba &>>"$LOGFILE" + metafinder -d "$domain" -l $METAFINDER_LIMIT -o osint -go -bi -ba 2>>"$LOGFILE" &>/dev/null mv "osint/${domain}/"*".txt" "osint/" 2>>"$LOGFILE" - rmdir "osint/${domain}" 2>>"$LOGFILE" + rm -rf "osint/${domain}" 2>>"$LOGFILE" end_func "Results are saved in $domain/osint/[software/authors/metadata_results].txt" ${FUNCNAME[0]} else if [ "$METADATA" = false ] || [ "$OSINT" = false ]; then @@ -184,7 +184,7 @@ function emails(){ cat .tmp/harvester.txt | awk '/Users/,/IPs/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/users.txt cat .tmp/harvester.txt | awk '/Links/,/Users/' | sed -e '1,2d' | head -n -2 | sed -e '/Searching /d' -e '/exception has occurred/d' -e '/found:/Q' | anew -q osint/linkedin.txt fi - h8mail -t $domain -q domain --loose -c $tools/h8mail_config.ini -j .tmp/h8_results.json &>>"$LOGFILE" + h8mail -t $domain -q domain --loose -c $tools/h8mail_config.ini -j .tmp/h8_results.json 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/h8_results.json" ] && cat .tmp/h8_results.json | jq -r '.targets[0] | .data[] | .[]' | cut -d '-' -f2 | anew -q osint/h8mail.txt PWNDB_STATUS=$(timeout 15s curl -Is --socks5-hostname localhost:9050 http://pwndb2am4tzkvold.onion | grep HTTP | cut -d ' ' -f2) @@ -293,20 +293,20 @@ function subdomains_full(){ function sub_passive(){ if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then start_subfunc "Running : Passive Subdomain Enumeration" - axiom-scan $list -m subfinder -all -o .tmp/subfinder_psub.txt &>>"$LOGFILE" - axiom-scan $list -m assetfinder -o .tmp/assetfinder_psub.txt &>>"$LOGFILE" - axiom-scan $list -m amass -passive -o .tmp/amass_psub.txt &>>"$LOGFILE" - axiom-scan $list -m findomain -o .tmp/findomain_psub.txt &>>"$LOGFILE" - axiom-scan $list -m waybackurls -o .tmp/waybackurls_psub_tmp.txt &>>"$LOGFILE" - [ -s ".tmp/waybackurls_psub_tmp.txt" ] && cat .tmp/waybackurls_psub_tmp.txt | unfurl --unique domains | anew -q .tmp/waybackurls_psub.txt - axiom-scan $list -m gau -o .tmp/gau_psub_tmp.txt &>>"$LOGFILE" - [ -s ".tmp/gau_psub_tmp.txt" ] && cat .tmp/gau_psub_tmp.txt | unfurl --unique domains | anew -q .tmp/gau_psub.txt + axiom-scan $list -m subfinder -all -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" &>/dev/null + axiom-scan $list -m assetfinder -o .tmp/assetfinder_psub.txt 2>>"$LOGFILE" &>/dev/null + axiom-scan $list -m amass -passive -o .tmp/amass_psub.txt 2>>"$LOGFILE" &>/dev/null + axiom-scan $list -m findomain -o .tmp/findomain_psub.txt 2>>"$LOGFILE" &>/dev/null + axiom-scan $list -m waybackurls -o .tmp/waybackurls_psub_tmp.txt 2>>"$LOGFILE" &>/dev/null + [ -s ".tmp/waybackurls_psub_tmp.txt" ] && cat .tmp/waybackurls_psub_tmp.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/waybackurls_psub.txt + axiom-scan $list -m gau -o .tmp/gau_psub_tmp.txt 2>>"$LOGFILE" &>/dev/null + [ -s ".tmp/gau_psub_tmp.txt" ] && cat .tmp/gau_psub_tmp.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/gau_psub.txt crobat -s $domain 2>>"$LOGFILE" | anew -q .tmp/crobat_psub.txt if [ -s "${GITHUB_TOKENS}" ]; then if [ "$DEEP" = true ]; then - github-subdomains -d $domain -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt &>>"$LOGFILE" + github-subdomains -d $domain -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" &>/dev/null else - github-subdomains -d $domain -k -q -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt &>>"$LOGFILE" + github-subdomains -d $domain -k -q -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" &>/dev/null fi fi curl -s "https://jldc.me/anubis/subdomains/${domain}" 2>>"$LOGFILE" | grep -Po "((http|https):\/\/)?(([\w.-]*)\.([\w]*)\.([A-z]))\w+" | sed '/^\./d' | anew -q .tmp/jldc_psub.txt @@ -326,7 +326,7 @@ function sub_crt(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBCRT" = true ]; then start_subfunc "Running : Crtsh Subdomain Enumeration" echo "python3 -u /home/op/recon/ctfr/ctfr.py -d ${domain} -o ${domain}_ctfr.txt; cat ${domain}_ctfr.txt" > .tmp/sub_ctrf_commands.txt - axiom-scan .tmp/sub_ctrf_commands.txt -m exec -o .tmp/crtsh_subs_tmp.txt &>>"$LOGFILE" + axiom-scan .tmp/sub_ctrf_commands.txt -m exec -o .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" &>/dev/null sed -i '1,11d' .tmp/crtsh_subs_tmp.txt curl "https://tls.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r .Results[] 2>>"$LOGFILE" | cut -d ',' -f3 | grep -F ".$domain" | anew -q .tmp/crtsh_subs_tmp.txt curl "https://dns.bufferover.run/dns?q=.${domain}" 2>>"$LOGFILE" | jq -r '.FDNS_A'[],'.RDNS'[] 2>>"$LOGFILE" | cut -d ',' -f2 | grep -F ".$domain" | anew -q .tmp/crtsh_subs_tmp.txt @@ -347,7 +347,7 @@ function sub_active(){ [ -s "${inScope_file}" ] && cat ${inScope_file} .tmp/inscope_subs.txt cat .tmp/*_subs.txt | anew -q .tmp/subs_no_resolved.txt deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt - [ -s ".tmp/subs_no_resolved.txt" ] && axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subdomains_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/subs_no_resolved.txt" ] && axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subdomains_tmp.txt 2>>"$LOGFILE" &>/dev/null echo $domain | dnsx -retry 3 -silent -r $resolvers_trusted 2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt NUMOFLINES=$(cat .tmp/subdomains_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (active resolution)" ${FUNCNAME[0]} @@ -359,9 +359,9 @@ function sub_active(){ function sub_dns(){ if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then start_subfunc "Running : DNS Subdomain Enumeration" - [ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -resp -o subdomains/subdomains_cname.txt &>>"$LOGFILE" + [ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -resp -o subdomains/subdomains_cname.txt 2>>"$LOGFILE" &>/dev/null [ -s "subdomains/subdomains_cname.txt" ] && cat subdomains/subdomains_cname.txt | cut -d '[' -f2 | sed 's/.$//' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt - [ -s ".tmp/subdomains_dns.txt" ] && axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subdomains_dns_resolved.txt &>>"$LOGFILE" + [ -s ".tmp/subdomains_dns.txt" ] && axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" &>/dev/null NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]} else @@ -373,11 +373,11 @@ function sub_brute(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBBRUTE" = true ]; then start_subfunc "Running : Bruteforce Subdomain Enumeration" if [ "$DEEP" = true ]; then - axiom-scan $subs_wordlist_big -m puredns-single $domain -r /home/op/lists/resolvers.txt -o .tmp/subs_brute.txt &>>"$LOGFILE" + axiom-scan $subs_wordlist_big -m puredns-single $domain -r /home/op/lists/resolvers.txt -o .tmp/subs_brute.txt 2>>"$LOGFILE" &>/dev/null else - axiom-scan $subs_wordlist -m puredns-single $domain -r /home/op/lists/resolvers.txt -o .tmp/subs_brute.txt &>>"$LOGFILE" + axiom-scan $subs_wordlist -m puredns-single $domain -r /home/op/lists/resolvers.txt -o .tmp/subs_brute.txt 2>>"$LOGFILE" &>/dev/null fi - [ -s ".tmp/subs_brute.txt" ] && axiom-scan .tmp/subs_brute.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subs_brute_valid.txt &>>"$LOGFILE" + [ -s ".tmp/subs_brute.txt" ] && axiom-scan .tmp/subs_brute.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/subs_brute_valid.txt 2>>"$LOGFILE" &>/dev/null NUMOFLINES=$(cat .tmp/subs_brute_valid.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (bruteforce)" ${FUNCNAME[0]} else @@ -394,25 +394,25 @@ function sub_scraping(){ start_subfunc "Running : Source code scraping subdomain search" touch .tmp/scrap_subs.txt if [ -s "$dir/subdomains/subdomains.txt" ]; then - axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap1.txt &>>"$LOGFILE" + axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap1.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/probed_tmp_scrap1.txt" ] && cat .tmp/probed_tmp_scrap1.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt - axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap2.txt &>>"$LOGFILE" - [ -s ".tmp/probed_tmp_scrap2.txt" ] && cat .tmp/probed_tmp_scrap2.txt | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt - axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap3.txt &>>"$LOGFILE" - [ -s ".tmp/probed_tmp_scrap3.txt" ] && cat .tmp/probed_tmp_scrap3.txt | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains | anew -q .tmp/scrap_subs.txt + axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap2.txt 2>>"$LOGFILE" &>/dev/null + [ -s ".tmp/probed_tmp_scrap2.txt" ] && cat .tmp/probed_tmp_scrap2.txt | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt + axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-probe -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap3.txt 2>>"$LOGFILE" &>/dev/null + [ -s ".tmp/probed_tmp_scrap3.txt" ] && cat .tmp/probed_tmp_scrap3.txt | cut -d ' ' -f1 | grep ".$domain$" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt if [ "$DEEP" = true ]; then - [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider 2>>"$LOGFILE" &>/dev/null else - [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider 2>>"$LOGFILE" &>/dev/null fi NUMFILES=0 touch .tmp/gospider.txt [[ -d .tmp/gospider/ ]] && NUMFILES=$(find .tmp/gospider/ -type f | wc -l) [[ $NUMFILES -gt 0 ]] && cat .tmp/gospider/* | sed '/^.\{2048\}./d' | anew -q .tmp/gospider.txt - grep -Eo 'https?://[^ ]+' .tmp/gospider.txt | sed 's/]$//' | unfurl --unique domains | grep ".$domain$" | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/scrap_subs.txt" ] && axiom-scan .tmp/scrap_subs.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/scrap_subs_resolved.txt &>>"$LOGFILE" + grep -Eo 'https?://[^ ]+' .tmp/gospider.txt | sed 's/]$//' | unfurl -u domains 2>>"$LOGFILE" | grep ".$domain$" | anew -q .tmp/scrap_subs.txt + [ -s ".tmp/scrap_subs.txt" ] && axiom-scan .tmp/scrap_subs.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" &>/dev/null NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | wc -l) - axiom-scan .tmp/diff_scrap.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap4.txt &>>"$LOGFILE" + axiom-scan .tmp/diff_scrap.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_scrap4.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/probed_tmp_scrap4.txt" ] && cat .tmp/probed_tmp_scrap4.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp_scrap.txt end_subfunc "${NUMOFLINES} new subs (code scraping)" ${FUNCNAME[0]} else @@ -431,16 +431,16 @@ function sub_permut(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPERMUTE" = true ]; then start_subfunc "Running : Permutations Subdomain Enumeration" - [ "$DEEP" = true ] && [ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" - [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 100 ] && axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" - [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -gt 100 ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 200 ] && axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" - [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -gt 200 ] && [ $(cat subdomains/subdomains.txt | wc -l) -le 100 ] && axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt &>>"$LOGFILE" + [ "$DEEP" = true ] && [ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt 2>>"$LOGFILE" &>/dev/null + [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 100 ] && axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt 2>>"$LOGFILE" &>/dev/null + [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -gt 100 ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -le 200 ] && axiom-scan .tmp/subs_no_resolved.txt -m dnscewl -o .tmp/DNScewl1_.txt 2>>"$LOGFILE" &>/dev/null + [ "$DEEP" = false ] && [ $(cat .tmp/subs_no_resolved.txt | wc -l) -gt 200 ] && [ $(cat subdomains/subdomains.txt | wc -l) -le 100 ] && axiom-scan subdomains/subdomains.txt -m dnscewl -o .tmp/DNScewl1_.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/DNScewl1_.txt" ] && cat .tmp/DNScewl1_.txt | grep ".$domain$" > .tmp/DNScewl1.txt - [ -s ".tmp/DNScewl1.txt" ] && axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl1.txt" ] && axiom-scan .tmp/DNScewl1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_tmp.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/permute1_tmp.txt" ] && cat .tmp/permute1_tmp.txt | anew -q .tmp/permute1.txt - [ -s ".tmp/permute1.txt" ] && axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt &>>"$LOGFILE" + [ -s ".tmp/permute1.txt" ] && axiom-scan .tmp/permute1.txt -m dnscewl -o .tmp/DNScewl2_.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/DNScewl2_.txt" ] && cat .tmp/DNScewl2_.txt | grep ".$domain$" > .tmp/DNScewl2.txt - [ -s ".tmp/DNScewl2.txt" ] && axiom-scan .tmp/DNScewl2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl2.txt" ] && axiom-scan .tmp/DNScewl2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_tmp.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/permute2_tmp.txt" ] && cat .tmp/permute2_tmp.txt | anew -q .tmp/permute2.txt cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt @@ -470,13 +470,13 @@ function sub_recursive(){ echo $sub | anew -q .tmp/sub_pass_recur_target.com done if [ -s ".tmp/sub_pass_recur_target.com" ]; then - axiom-scan .tmp/sub_pass_recur_target.com -m subfinder -all -o .tmp/subfinder_prec.txt &>>"$LOGFILE" - axiom-scan .tmp/sub_pass_recur_target.com -m assetfinder -o .tmp/assetfinder_prec.txt &>>"$LOGFILE" - axiom-scan .tmp/sub_pass_recur_target.com -m amass -passive -o .tmp/amass_prec.txt &>>"$LOGFILE" - axiom-scan .tmp/sub_pass_recur_target.com -m findomain -o .tmp/findomain_prec.txt &>>"$LOGFILE" + axiom-scan .tmp/sub_pass_recur_target.com -m subfinder -all -o .tmp/subfinder_prec.txt 2>>"$LOGFILE" &>/dev/null + axiom-scan .tmp/sub_pass_recur_target.com -m assetfinder -o .tmp/assetfinder_prec.txt 2>>"$LOGFILE" &>/dev/null + axiom-scan .tmp/sub_pass_recur_target.com -m amass -passive -o .tmp/amass_prec.txt 2>>"$LOGFILE" &>/dev/null + axiom-scan .tmp/sub_pass_recur_target.com -m findomain -o .tmp/findomain_prec.txt 2>>"$LOGFILE" &>/dev/null fi cat .tmp/*_prec.txt 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt - [ -s ".tmp/passive_recursive.txt" ] && axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/passive_recurs_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/passive_recursive.txt" ] && axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/passive_recurs_tmp.txt" ] && cat .tmp/passive_recurs_tmp.txt | anew -q subdomains/subdomains.txt # Bruteforce recursive @@ -485,15 +485,15 @@ function sub_recursive(){ for sub in $(cat subdomains/subdomains.txt); do sed "s/$/.$sub/" $subs_wordlist >> .tmp/brute_recursive_wordlist.txt done - [ -s ".tmp/brute_recursive_wordlist.txt" ] && axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/brute_recursive_result.txt &>>"$LOGFILE" + [ -s ".tmp/brute_recursive_wordlist.txt" ] && axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/brute_recursive_result.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/brute_recursive_result.txt" ] && cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt - [ -s ".tmp/brute_recursive.txt" ] && axiom-scan .tmp/brute_recursive.txt -m dnscewl -o .tmp/DNScewl1_recursive_.txt &>>"$LOGFILE" + [ -s ".tmp/brute_recursive.txt" ] && axiom-scan .tmp/brute_recursive.txt -m dnscewl -o .tmp/DNScewl1_recursive_.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/DNScewl1_recursive_.txt" ] && cat .tmp/DNScewl1_recursive_.txt | grep ".$domain$" > .tmp/DNScewl1_recursive.txt - [ -s ".tmp/DNScewl1_recursive.txt" ] && axiom-scan .tmp/DNScewl1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_recursive_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl1_recursive.txt" ] && axiom-scan .tmp/DNScewl1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute1_recursive_tmp.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/permute1_recursive_tmp.txt" ] && cat .tmp/permute1_recursive_tmp.txt | anew -q .tmp/permute1_recursive.txt - [ -s ".tmp/permute1_recursive.txt" ] && axiom-scan .tmp/permute1_recursive.txt -m dnscewl -o .tmp/DNScewl2_recursive_.txt &>>"$LOGFILE" + [ -s ".tmp/permute1_recursive.txt" ] && axiom-scan .tmp/permute1_recursive.txt -m dnscewl -o .tmp/DNScewl2_recursive_.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/DNScewl2_recursive_.txt" ] && cat .tmp/DNScewl2_recursive_.txt | grep ".$domain$" > .tmp/DNScewl2_recursive.txt - [ -s ".tmp/DNScewl2_recursive.txt" ] && axiom-scan .tmp/DNScewl2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_recursive_tmp.txt &>>"$LOGFILE" + [ -s ".tmp/DNScewl2_recursive.txt" ] && axiom-scan .tmp/DNScewl2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/permute2_recursive_tmp.txt 2>>"$LOGFILE" &>/dev/null cat .tmp/permute1_recursive.txt .tmp/permute2_recursive_tmp.txt 2>>"$LOGFILE" | anew -q .tmp/permute_recursive.txt NUMOFLINES=$(cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]} @@ -513,7 +513,7 @@ function subtakeover(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBTAKEOVER" = true ]; then start_func "Looking for possible subdomain takeover" touch .tmp/tko.txt - [ -s "webs/webs.txt" ] && axiom-scan webs/webs.txt -m nuclei -w /home/op/recon/nuclei/takeovers/ -o .tmp/tko.txt &>>"$LOGFILE" + [ -s "webs/webs.txt" ] && axiom-scan webs/webs.txt -m nuclei -w /home/op/recon/nuclei/takeovers/ -o .tmp/tko.txt 2>>"$LOGFILE" &>/dev/null NUMOFLINES=$(cat .tmp/tko.txt 2>>"$LOGFILE" | anew webs/takeover.txt | wc -l) if [ "$NUMOFLINES" -gt 0 ]; then notification "${NUMOFLINES} new possible takeovers found" info @@ -531,7 +531,7 @@ function subtakeover(){ function zonetransfer(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$ZONETRANSFER" = true ]; then start_func "Zone transfer check" - python3 $tools/dnsrecon/dnsrecon.py -d $domain -a -j subdomains/zonetransfer.json &>>"$LOGFILE" + python3 $tools/dnsrecon/dnsrecon.py -d $domain -a -j subdomains/zonetransfer.json 2>>"$LOGFILE" &>/dev/null if [ -s "subdomains/zonetransfer.json" ]; then if grep -q "\"zone_transfer\"\: \"success\"" subdomains/zonetransfer.json ; then notification "Zone transfer found on ${domain}!" info; fi fi @@ -548,7 +548,7 @@ function zonetransfer(){ function s3buckets(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$S3BUCKETS" = true ]; then start_func "AWS S3 buckets search" - axiom-scan webs/webs.txt -m s3scanner -o .tmp/s3buckets_tmp.txt &>>"$LOGFILE" + axiom-scan webs/webs.txt -m s3scanner -o .tmp/s3buckets_tmp.txt 2>>"$LOGFILE" &>/dev/null cat .tmp/s3buckets_tmp.txt | grep -iv "not_exist" | grep -iv "Warning:" | anew -q .tmp/s3buckets.txt NUMOFLINES=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | anew subdomains/s3buckets.txt | wc -l) if [ "$NUMOFLINES" -gt 0 ]; then @@ -574,7 +574,7 @@ function webprobe_simple(){ if [ -s ".tmp/probed_tmp_scrap.txt" ]; then mv .tmp/probed_tmp_scrap.txt .tmp/probed_tmp.txt else - axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -threads $HTTPX_THREADS -status-code -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_.txt &>>"$LOGFILE" + axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -threads $HTTPX_THREADS -status-code -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_tmp_.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/probed_tmp_.txt" ] && cat .tmp/probed_tmp_.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_tmp.txt fi if [ -s ".tmp/probed_tmp.txt" ]; then @@ -583,7 +583,7 @@ function webprobe_simple(){ end_subfunc "${NUMOFLINES} new websites resolved" ${FUNCNAME[0]} if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs.txt| wc -l) -le 1500 ]]; then notification "Sending websites to proxy" info - ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url &>>"$LOGFILE" + ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null fi else end_subfunc "No new websites to probe" ${FUNCNAME[0]} @@ -601,8 +601,8 @@ function webprobe_full(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBPROBEFULL" = true ]; then start_func "Http probing non standard ports" - axiom-scan subdomains/subdomains.txt -m unimap --fast-scan --ports $UNCOMMON_PORTS_WEB -q -k --url-output -o .tmp/nmap_uncommonweb.txt &>>"$LOGFILE" - [ -s ".tmp/nmap_uncommonweb.txt" ] && axiom-scan .tmp/nmap_uncommonweb.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_uncommon_ports_tmp_.txt &>>"$LOGFILE" + axiom-scan subdomains/subdomains.txt -m unimap --fast-scan --ports $UNCOMMON_PORTS_WEB -q -k --url-output -o .tmp/nmap_uncommonweb.txt 2>>"$LOGFILE" &>/dev/null + [ -s ".tmp/nmap_uncommonweb.txt" ] && axiom-scan .tmp/nmap_uncommonweb.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -no-color -o .tmp/probed_uncommon_ports_tmp_.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/probed_uncommon_ports_tmp_.txt" ] && cat .tmp/probed_uncommon_ports_tmp_.txt | cut -d ' ' -f1 | grep ".$domain$" | anew -q .tmp/probed_uncommon_ports_tmp.txt #axiom-scan subdomains/subdomains.txt -m naabu -p $UNCOMMON_PORTS_WEB -o .tmp/nmap_uncommonweb.txt &>>"$LOGFILE" && uncommon_ports_checked=$(cat .tmp/nmap_uncommonweb.txt | cut -d ':' -f2 | sort -u | sed -e 'H;${x;s/\n/,/g;s/^,//;p;};d') @@ -615,7 +615,7 @@ function webprobe_full(){ end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" ${FUNCNAME[0]} if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs_uncommon_ports.txt| wc -l) -le 1500 ]]; then notification "Sending websites uncommon ports to proxy" info - ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url &>>"$LOGFILE" + ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null fi else if [ "$WEBPROBEFULL" = false ]; then @@ -630,7 +630,7 @@ function screenshot(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBSCREENSHOT" = true ]; then start_func "Web Screenshots" cat webs/webs.txt webs/webs_uncommon_ports.txt 2>>"$LOGFILE" | anew -q .tmp/webs_screenshots.txt - axiom-scan .tmp/webs_screenshots.txt -m webscreenshot -w $WEBSCREENSHOT_THREADS -o screenshots &>>"$LOGFILE" + axiom-scan .tmp/webs_screenshots.txt -m webscreenshot -w $WEBSCREENSHOT_THREADS -o screenshots 2>>"$LOGFILE" &>/dev/null # axiom-scan .tmp/webs_screenshots.txt -m "$AXIOM_SCREENSHOT_MODULE" -o screenshots &>>"$LOGFILE" end_func "Results are saved in $domain/screenshots folder" ${FUNCNAME[0]} else @@ -650,7 +650,7 @@ function favicon(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$FAVICON" = true ]; then start_func "Favicon Ip Lookup" cd "$tools/fav-up" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - python3 favUp.py -w "$domain" -sc -o favicontest.json &>>"$LOGFILE" + python3 favUp.py -w "$domain" -sc -o favicontest.json 2>>"$LOGFILE" &>/dev/null if [ -s "favicontest.json" ]; then cat favicontest.json | jq -r '.found_ips' 2>>"$LOGFILE" | grep -v "not-found" > favicontest.txt sed -i "s/|/\n/g" favicontest.txt @@ -677,7 +677,7 @@ function portscan(){ done awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt [ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt - [ -s "hosts/ips.txt" ] && axiom-scan hosts/ips.txt -m cf-check -o .tmp/ips_nowaf_.txt &>>"$LOGFILE" + [ -s "hosts/ips.txt" ] && axiom-scan hosts/ips.txt -m cf-check -o .tmp/ips_nowaf_.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/ips_nowaf_.txt" ] && cat .tmp/ips_nowaf_.txt | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q .tmp/ips_nowaf.txt printf "${bblue}\n Resolved IP addresses (No WAF) ${reset}\n\n"; [ -s ".tmp/ips_nowaf.txt" ] && cat .tmp/ips_nowaf.txt | sort @@ -688,7 +688,7 @@ function portscan(){ done fi if [ "$PORTSCAN_ACTIVE" = true ]; then - [ -s ".tmp/ips_nowaf.txt" ] && axiom-scan .tmp/ips_nowaf.txt -m nmapx --top-ports 1000 -sV -n -Pn --max-retries 2 -o hosts/portscan_active.txt &>>"$LOGFILE" + [ -s ".tmp/ips_nowaf.txt" ] && axiom-scan .tmp/ips_nowaf.txt -m nmapx --top-ports 1000 -sV -n -Pn --max-retries 2 -o hosts/portscan_active.txt 2>>"$LOGFILE" &>/dev/null fi end_func "Results are saved in hosts/portscan_[passive|active].txt" ${FUNCNAME[0]} else @@ -704,7 +704,7 @@ function cloudprovider(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CLOUD_IP" = true ]; then start_func "Cloud provider check" cd "$tools/ip2provider" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - [ -s "$dir/hosts/ips.txt" ] && cat $dir/hosts/ips.txt | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | ./ip2provider.py | anew -q $dir/hosts/cloud_providers.txt &>>"$LOGFILE" + [ -s "$dir/hosts/ips.txt" ] && cat $dir/hosts/ips.txt | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | ./ip2provider.py | anew -q $dir/hosts/cloud_providers.txt 2>>"$LOGFILE" &>/dev/null cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } end_func "Results are saved in hosts/cloud_providers.txt" ${FUNCNAME[0]} else @@ -724,7 +724,7 @@ function waf_checks(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WAF_DETECTION" = true ]; then start_func "Website's WAF detection" if [ -s "./webs/webs.txt" ]; then - axiom-scan webs/webs.txt -m wafw00f -o .tmp/wafs.txt &>>"$LOGFILE" + axiom-scan webs/webs.txt -m wafw00f -o .tmp/wafs.txt 2>>"$LOGFILE" &>/dev/null if [ -s ".tmp/wafs.txt" ]; then cat .tmp/wafs.txt | sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' | tr -s "\t" ";" > webs/webs_wafs.txt NUMOFLINES=$(cat webs/webs_wafs.txt 2>>"$LOGFILE" | wc -l) @@ -748,19 +748,19 @@ function waf_checks(){ function nuclei_check(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$NUCLEICHECK" = true ]; then start_func "Templates based web scanner" - nuclei -update-templates &>>"$LOGFILE" + nuclei -update-templates 2>>"$LOGFILE" &>/dev/null mkdir -p nuclei_output if [ -s "webs/webs.txt" ]; then printf "${yellow}\n Running : Nuclei Info${reset}\n\n" - axiom-scan webs/webs.txt -m nuclei -severity info -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/info.txt &>>"$LOGFILE" + axiom-scan webs/webs.txt -m nuclei -severity info -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/info.txt 2>>"$LOGFILE" &>/dev/null printf "${yellow}\n\n Running : Nuclei Low${reset}\n\n" - axiom-scan webs/webs.txt -m nuclei -severity low -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/low.txt &>>"$LOGFILE" + axiom-scan webs/webs.txt -m nuclei -severity low -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/low.txt 2>>"$LOGFILE" &>/dev/null printf "${yellow}\n\n Running : Nuclei Medium${reset}\n\n" - axiom-scan webs/webs.txt -m nuclei -severity medium -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/medium.txt &>>"$LOGFILE" + axiom-scan webs/webs.txt -m nuclei -severity medium -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/medium.txt 2>>"$LOGFILE" &>/dev/null printf "${yellow}\n\n Running : Nuclei High${reset}\n\n" - axiom-scan webs/webs.txt -m nuclei -severity high -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/high.txt &>>"$LOGFILE" + axiom-scan webs/webs.txt -m nuclei -severity high -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/high.txt 2>>"$LOGFILE" &>/dev/null printf "${yellow}\n\n Running : Nuclei Critical${reset}\n\n" - axiom-scan webs/webs.txt -m nuclei -severity critical -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/critical.txt &>>"$LOGFILE" + axiom-scan webs/webs.txt -m nuclei -severity critical -r /home/op/lists/resolvers_trusted.txt -o nuclei_output/critical.txt 2>>"$LOGFILE" &>/dev/null printf "\n\n" fi end_func "Results are saved in $domain/nuclei_output folder" ${FUNCNAME[0]} @@ -778,7 +778,7 @@ function fuzz(){ start_func "Web directory fuzzing" if [ -s "webs/webs.txt" ]; then mkdir -p $dir/fuzzing - axiom-scan webs/webs.txt -m ffuf -w /home/op/lists/onelistforallmicro.txt -H \"${HEADER}\" -mc all -fc 404 -sf -s -maxtime $FFUF_MAXTIME -o $dir/fuzzing/ffuf-content.csv &>>"$LOGFILE" + axiom-scan webs/webs.txt -m ffuf -w /home/op/lists/onelistforallmicro.txt -H \"${HEADER}\" -mc all -fc 404 -sf -s -maxtime $FFUF_MAXTIME -o $dir/fuzzing/ffuf-content.csv 2>>"$LOGFILE" &>/dev/null grep -v "FUZZ,url,redirectlocation" $dir/fuzzing/ffuf-content.csv | awk -F "," '{print $2" "$5" "$6}' | sort > $dir/fuzzing/ffuf-content.tmp for sub in $(cat webs/webs.txt); do sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') @@ -804,14 +804,14 @@ function cms_scanner(){ mkdir -p $dir/cms && rm -rf $dir/cms/* if [ -s "./webs/webs.txt" ]; then tr '\n' ',' < webs/webs.txt > .tmp/cms.txt - timeout -k 30 $CMSSCAN_TIMEOUT python3 $tools/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r &>>"$LOGFILE" + timeout -k 30 $CMSSCAN_TIMEOUT python3 $tools/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r 2>>"$LOGFILE" &>/dev/null exit_status=$? if [[ $exit_status -eq 125 ]]; then - echo "TIMEOUT cmseek.py - investigate manually for $dir" &>>"$LOGFILE" + echo "TIMEOUT cmseek.py - investigate manually for $dir" 2>>"$LOGFILE" &>/dev/null end_func "TIMEOUT cmseek.py - investigate manually for $dir" ${FUNCNAME[0]} return elif [[ $exit_status -ne 0 ]]; then - echo "ERROR cmseek.py - investigate manually for $dir" &>>"$LOGFILE" + echo "ERROR cmseek.py - investigate manually for $dir" 2>>"$LOGFILE" &>/dev/null end_func "ERROR cmseek.py - investigate manually for $dir" ${FUNCNAME[0]} return fi # otherwise Assume we have a successfully exited cmseek @@ -843,17 +843,17 @@ function params(){ printf "${yellow}\n\n Running : Searching params with paramspider${reset}\n" if [ -s "webs/webs.txt" ]; then cat webs/webs.txt | sed -r "s/https?:\/\///" | anew -q .tmp/probed_nohttp.txt - axiom-scan .tmp/probed_nohttp.txt -m paramspider -l high -q --exclude eot,jpg,jpeg,gif,css,tif,tiff,png,ttf,otf,woff,woff2,ico,pdf,svg,txt,js -o output_paramspider &>>"$LOGFILE" + axiom-scan .tmp/probed_nohttp.txt -m paramspider -l high -q --exclude eot,jpg,jpeg,gif,css,tif,tiff,png,ttf,otf,woff,woff2,ico,pdf,svg,txt,js -o output_paramspider 2>>"$LOGFILE" &>/dev/null find output_paramspider/ -name '*.txt' -exec cat {} \; | anew -q .tmp/param_tmp.txt sed '/^FUZZ/d' -i .tmp/param_tmp.txt rm -rf output_paramspider/ 2>>"$LOGFILE" if [ "$DEEP" = true ]; then printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - axiom-scan .tmp/param_tmp.txt -m arjun -t $ARJUN_THREADS -o webs/param.txt &>>"$LOGFILE" + axiom-scan .tmp/param_tmp.txt -m arjun -t $ARJUN_THREADS -o webs/param.txt 2>>"$LOGFILE" &>/dev/null else if [[ $(cat .tmp/param_tmp.txt | wc -l) -le 50 ]]; then printf "${yellow}\n\n Running : Checking ${domain} with Arjun${reset}\n" - axiom-scan .tmp/param_tmp.txt -m arjun -t $ARJUN_THREADS -o webs/param.txt &>>"$LOGFILE" + axiom-scan .tmp/param_tmp.txt -m arjun -t $ARJUN_THREADS -o webs/param.txt 2>>"$LOGFILE" &>/dev/null else cp .tmp/param_tmp.txt webs/param.txt fi @@ -874,23 +874,23 @@ function urlchecks(){ start_func "URL Extraction" mkdir -p js if [ -s "webs/webs.txt" ]; then - axiom-scan webs/webs.txt -m waybackurls -o .tmp/url_extract_way_tmp.txt &>>"$LOGFILE" + axiom-scan webs/webs.txt -m waybackurls -o .tmp/url_extract_way_tmp.txt 2>>"$LOGFILE" &>/dev/null [ -f ".tmp/url_extract_way_tmp.txt" ] && cat .tmp/url_extract_way_tmp.txt | anew -q .tmp/url_extract_tmp.txt - axiom-scan webs/webs.txt -m gau -o .tmp/url_extract_gau_tmp.txt &>>"$LOGFILE" + axiom-scan webs/webs.txt -m gau -o .tmp/url_extract_gau_tmp.txt 2>>"$LOGFILE" &>/dev/null [ -f ".tmp/url_extract_gau_tmp.txt" ] && cat .tmp/url_extract_gau_tmp.txt | anew -q .tmp/url_extract_tmp.txt diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u webs/webs.txt) | wc -l) if [ $diff_webs != "0" ] || [ ! -s ".tmp/gospider.txt" ]; then if [ "$DEEP" = true ]; then - axiom-scan webs/webs.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + axiom-scan webs/webs.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider 2>>"$LOGFILE" &>/dev/null else - axiom-scan webs/webs.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + axiom-scan webs/webs.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider 2>>"$LOGFILE" &>/dev/null fi [[ -d .tmp/gospider/ ]] && cat .tmp/gospider/* 2>>"$LOGFILE" | sed '/^.\{2048\}./d' | anew -q .tmp/gospider.txt fi [[ -d .tmp/gospider/ ]] && NUMFILES=$(find .tmp/gospider/ -type f | wc -l) [[ $NUMFILES -gt 0 ]] && cat .tmp/gospider.txt | grep -Eo 'https?://[^ ]+' | sed 's/]$//' | grep ".$domain$" | anew -q .tmp/url_extract_tmp.txt if [ -s "${GITHUB_TOKENS}" ]; then - github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt &>>"$LOGFILE" + github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt fi [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -Ei "\.(js)" | anew -q js/url_extract_js.txt @@ -898,13 +898,13 @@ function urlchecks(){ [ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | python3 $tools/JSA/jsa.py | anew -q .tmp/url_extract_tmp.txt fi cat .tmp/url_extract_tmp.txt webs/param.txt 2>>"$LOGFILE" | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt - uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt &>>"$LOGFILE" + uddup -u .tmp/url_extract_tmp2.txt -o .tmp/url_extract_uddup.txt 2>>"$LOGFILE" &>/dev/null NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | wc -l) notification "${NUMOFLINES} new urls with params" info end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/url_extract.txt | wc -l) -le 1500 ]]; then notification "Sending urls to proxy" info - ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url &>>"$LOGFILE" + ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" &>/dev/null fi fi else @@ -924,7 +924,7 @@ function url_gf(){ [ -f "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt gf rce webs/url_extract.txt | anew -q gf/rce.txt gf potential webs/url_extract.txt | cut -d ':' -f3-5 |anew -q gf/potential.txt - [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p | anew -q gf/endpoints.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q gf/endpoints.txt gf lfi webs/url_extract.txt | anew -q gf/lfi.txt end_func "Results are saved in $domain/gf folder" ${FUNCNAME[0]} else @@ -965,7 +965,7 @@ function jschecks(){ cat js/url_extract_js.txt | cut -d '?' -f 1 | grep -iE "\.js$" | grep "$domain$" | anew -q js/jsfile_links.txt cat js/url_extract_js.txt | subjs | grep "$domain$" | anew -q js/jsfile_links.txt printf "${yellow} Running : Resolving JS Urls 2/5${reset}\n" - axiom-scan js/jsfile_links.txt -m httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -status-code -retries 2 -no-color -o .tmp/js_livelinks.txt &>>"$LOGFILE" + axiom-scan js/jsfile_links.txt -m httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -status-code -retries 2 -no-color -o .tmp/js_livelinks.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/js_livelinks.txt" ] && cat .tmp/js_livelinks.txt | grep "[200]" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt printf "${yellow} Running : Gathering endpoints 3/5${reset}\n" if [ -s "js/js_livelinks.txt" ]; then @@ -976,7 +976,7 @@ function jschecks(){ cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt fi printf "${yellow} Running : Gathering secrets 4/5${reset}\n" - [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m nuclei -w /home/op/recon/nuclei/exposures/tokens/ -r /home/op/lists/resolvers_trusted.txt -o js/js_secrets.txt &>>"$LOGFILE" + [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m nuclei -w /home/op/recon/nuclei/exposures/tokens/ -r /home/op/lists/resolvers_trusted.txt -o js/js_secrets.txt 2>>"$LOGFILE" &>/dev/null printf "${yellow} Running : Building wordlist 5/5${reset}\n" [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | python3 $tools/getjswords.py 2>>"$LOGFILE" | anew -q webs/dict_words.txt end_func "Results are saved in $domain/js folder" ${FUNCNAME[0]} @@ -996,12 +996,12 @@ function wordlist_gen(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WORDLIST" = true ]; then start_func "Wordlist generation" if [ -s ".tmp/url_extract_tmp.txt" ]; then - cat .tmp/url_extract_tmp.txt | unfurl -u keys | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt - cat .tmp/url_extract_tmp.txt | unfurl -u values | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt + cat .tmp/url_extract_tmp.txt | unfurl -u keys 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt + cat .tmp/url_extract_tmp.txt | unfurl -u values 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_words.txt cat .tmp/url_extract_tmp.txt | tr "[:punct:]" "\n" | anew -q webs/dict_words.txt fi - [ -s ".tmp/js_endpoints.txt" ] && cat .tmp/js_endpoints.txt | unfurl -u path | anew -q webs/dict_paths.txt - [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | unfurl -u path | anew -q webs/dict_paths.txt + [ -s ".tmp/js_endpoints.txt" ] && cat .tmp/js_endpoints.txt | unfurl -u path 2>>"$LOGFILE" | anew -q webs/dict_paths.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | unfurl -u path 2>>"$LOGFILE" | anew -q webs/dict_paths.txt end_func "Results are saved in $domain/webs/dict_[words|paths].txt" ${FUNCNAME[0]} else if [ "$WORDLIST" = false ]; then @@ -1021,9 +1021,9 @@ function brokenLinks(){ start_func "Broken links checks" if [ ! -s ".tmp/gospider.txt" ]; then if [ "$DEEP" = true ]; then - [ -s "webs/webs.txt" ] && axiom-scan webs/webs.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -s "webs/webs.txt" ] && axiom-scan webs/webs.txt -m gospider --js -d 3 --sitemap --robots -w -r -o .tmp/gospider 2>>"$LOGFILE" &>/dev/null else - [ -s "webs/webs.txt" ] && axiom-scan webs/webs.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider &>>"$LOGFILE" + [ -s "webs/webs.txt" ] && axiom-scan webs/webs.txt -m gospider --js -d 2 --sitemap --robots -w -r -o .tmp/gospider 2>>"$LOGFILE" &>/dev/null fi cat .tmp/gospider/* | sed '/^.\{2048\}./d' | anew -q .tmp/gospider.txt fi @@ -1046,18 +1046,18 @@ function xss(){ [ -s "gf/xss.txt" ] && cat gf/xss.txt | qsreplace FUZZ | Gxss -c 100 -p Xss | anew -q .tmp/xss_reflected.txt if [ "$DEEP" = true ]; then if [ -n "$XSS_SERVER" ]; then - [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt &>>"$LOGFILE" + [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt 2>>"$LOGFILE" &>/dev/null else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" - [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav -w $DALFOX_THREADS -o vulns/xss.txt &>>"$LOGFILE" + [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav -w $DALFOX_THREADS -o vulns/xss.txt 2>>"$LOGFILE" &>/dev/null fi else if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le 500 ]]; then if [ -n "$XSS_SERVER" ]; then - axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt &>>"$LOGFILE" + axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt 2>>"$LOGFILE" &>/dev/null else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" - axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -w $DALFOX_THREADS -o vulns/xss.txt &>>"$LOGFILE" + axiom-scan .tmp/xss_reflected.txt -m dalfox --mass --mass-worker 100 --multicast --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -w $DALFOX_THREADS -o vulns/xss.txt 2>>"$LOGFILE" &>/dev/null fi else printf "${bred} Skipping XSS: Too many URLs to test, try with --deep flag${reset}\n" @@ -1078,7 +1078,7 @@ function xss(){ function cors(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CORS" = true ]; then start_func "CORS Scan" - python3 $tools/Corsy/corsy.py -i webs/webs.txt > webs/cors.txt &>>"$LOGFILE" + python3 $tools/Corsy/corsy.py -i webs/webs.txt > webs/cors.txt 2>>"$LOGFILE" &>/dev/null [ -s "webs/cors.txt" ] && cat webs/cors.txt end_func "Results are saved in webs/cors.txt" ${FUNCNAME[0]} else @@ -1172,7 +1172,7 @@ function ssrf_checks(){ function crlf_checks(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CRLF_CHECKS" = true ]; then start_func "CRLF checks" - crlfuzz -l webs/webs.txt -o vulns/crlf.txt &>>"$LOGFILE" + crlfuzz -l webs/webs.txt -o vulns/crlf.txt 2>>"$LOGFILE" &>/dev/null end_func "Results are saved in vulns/crlf.txt" ${FUNCNAME[0]} else if [ "$CRLF_CHECKS" = false ]; then @@ -1262,7 +1262,7 @@ function spraying(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SPRAY" = true ]; then start_func "Password spraying" cd "$tools/brutespray" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - python3 brutespray.py --file $dir/hosts/portscan_active.txt --threads $BRUTESPRAY_THREADS --hosts $BRUTESPRAY_CONCURRENCE -o $dir/hosts/brutespray.txt &>>"$LOGFILE" + python3 brutespray.py --file $dir/hosts/portscan_active.txt --threads $BRUTESPRAY_THREADS --hosts $BRUTESPRAY_CONCURRENCE -o $dir/hosts/brutespray.txt 2>>"$LOGFILE" &>/dev/null cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } end_func "Results are saved in hosts/brutespray.txt" ${FUNCNAME[0]} else @@ -1471,7 +1471,7 @@ function ipcidr_detection(){ } function ipcidr_target(){ - ipcidr_detection $1 | cut -d' ' -f3 | unfurl -u domains | sed 's/\.$//' | sort -u > ./target_reconftw_ipcidr.txt + ipcidr_detection $1 | cut -d' ' -f3 | unfurl -u domains 2>>"$LOGFILE" | sed 's/\.$//' | sort -u > ./target_reconftw_ipcidr.txt if [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -eq 1 ]]; then domain=$(cat ./target_reconftw_ipcidr.txt) elif [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -gt 1 ]]; then From 694ff1b85b66384c179df2cddc842c4a57682d16 Mon Sep 17 00:00:00 2001 From: six2dez Date: Fri, 21 May 2021 11:08:03 +0200 Subject: [PATCH 29/32] Better IP extraction --- reconftw.sh | 4 +--- reconftw_axiom.sh | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 4cd9cdf2..14d1c055 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -650,9 +650,7 @@ function favicon(){ function portscan(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PORTSCANNER" = true ]; then start_func "Port scan" - for sub in $(cat subdomains/subdomains.txt); do - echo "$sub $(dig +short a $sub | tail -n1)" | anew -q .tmp/subs_ips.txt - done + interlace -tL subdomains/subdomains.txt -threads 50 -c 'echo "_target_ $(dig +short a _target_ | tail -n1)" | anew -q _output_' -o .tmp/subs_ips.txt awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt [ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt [ -s "hosts/ips.txt" ] && cat hosts/ips.txt | cf-check | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q .tmp/ips_nowaf.txt diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 10b7f463..21a8cfbc 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -672,9 +672,7 @@ function favicon(){ function portscan(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PORTSCANNER" = true ]; then start_func "Port scan" - for sub in $(cat subdomains/subdomains.txt); do - echo "$sub $(dig +short a $sub | tail -n1)" | anew -q .tmp/subs_ips.txt - done + interlace -tL subdomains/subdomains.txt -threads 50 -c 'echo "_target_ $(dig +retry=5 +short a _target_ | tail -n1)" | anew -q _output_' -o .tmp/subs_ips.txt awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt [ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt [ -s "hosts/ips.txt" ] && axiom-scan hosts/ips.txt -m cf-check -o .tmp/ips_nowaf_.txt 2>>"$LOGFILE" &>/dev/null From e0306261beee0990f990f7c6cd3d6bb2c463214c Mon Sep 17 00:00:00 2001 From: six2dez Date: Sat, 22 May 2021 01:23:12 +0200 Subject: [PATCH 30/32] Added resolveDomains --- install.sh | 1 + reconftw.cfg | 1 + reconftw.sh | 6 ++++-- reconftw_axiom.sh | 8 +++++--- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/install.sh b/install.sh index dfe1a82f..38ed0463 100755 --- a/install.sh +++ b/install.sh @@ -31,6 +31,7 @@ gotools["dalfox"]="GO111MODULE=on go get -v github.com/hahwul/dalfox/v2" gotools["puredns"]="GO111MODULE=on go get github.com/d3mondev/puredns/v2" gotools["hakrevdns"]="go get github.com/hakluke/hakrevdns" gotools["gdn"]="GO111MODULE=on go get -v github.com/kmskrishna/gdn" +gotools["resolveDomains"]="go get -v github.com/Josue87/resolveDomains" declare -A repos repos["degoogle_hunter"]="six2dez/degoogle_hunter" diff --git a/reconftw.cfg b/reconftw.cfg index a883f2c0..91fe8dd8 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -125,6 +125,7 @@ PUREDNS_PUBLIC_LIMIT=0 # Set between 2000 - 10000 if your router blows up, 0 is PUREDNS_TRUSTED_LIMIT=400 DIRDAR_THREADS=200 WEBSCREENSHOT_THREADS=200 +RESOLVE_DOMAINS_THREADS=150 # Timeouts CMSSCAN_TIMEOUT=3600 diff --git a/reconftw.sh b/reconftw.sh index 14d1c055..e0bc7fd8 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -100,6 +100,7 @@ function tools_installed(){ type -P unimap &>/dev/null || { printf "${bred} [*] unimap [NO]${reset}\n${reset}"; allinstalled=false;} type -P hakrevdns &>/dev/null || { printf "${bred} [*] hakrevdns [NO]${reset}\n${reset}"; allinstalled=false;} type -P gdn &>/dev/null || { printf "${bred} [*] gdn [NO]${reset}\n"; allinstalled=false;} + type -P resolveDomains &>/dev/null || { printf "${bred} [*] resolveDomains [NO]${reset}\n"; allinstalled=false;} if [ "${allinstalled}" = true ]; then printf "${bgreen} Good! All installed! ${reset}\n\n" @@ -650,8 +651,9 @@ function favicon(){ function portscan(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PORTSCANNER" = true ]; then start_func "Port scan" - interlace -tL subdomains/subdomains.txt -threads 50 -c 'echo "_target_ $(dig +short a _target_ | tail -n1)" | anew -q _output_' -o .tmp/subs_ips.txt - awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt + #interlace -tL subdomains/subdomains.txt -threads 50 -c 'echo "_target_ $(dig +short a _target_ | tail -n1)" | anew -q _output_' -o .tmp/subs_ips.txt + [ -s "subdomains/subdomains.txt" ] && resolveDomains -d subdomains/subdomains.txt -t $RESOLVE_DOMAINS_THREADS 2>>"$LOGFILE" | anew -q .tmp/subs_ips.txt + [ -s ".tmp/subs_ips.txt" ] && awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt [ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt [ -s "hosts/ips.txt" ] && cat hosts/ips.txt | cf-check | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q .tmp/ips_nowaf.txt printf "${bblue}\n Resolved IP addresses (No WAF) ${reset}\n\n"; diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 21a8cfbc..9e8d80a2 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -100,6 +100,7 @@ function tools_installed(){ type -P unimap &>/dev/null || { printf "${bred} [*] unimap [NO]${reset}\n${reset}"; allinstalled=false;} type -P hakrevdns &>/dev/null || { printf "${bred} [*] hakrevdns [NO]${reset}\n${reset}"; allinstalled=false;} type -P gdn &>/dev/null || { printf "${bred} [*] gdn [NO]${reset}\n"; allinstalled=false;} + type -P resolveDomains &>/dev/null || { printf "${bred} [*] resolveDomains [NO]${reset}\n"; allinstalled=false;} type -P axiom-ls &>/dev/null || { printf "${bred} [*] axiom [NO]${reset}\n${reset}"; allinstalled=false;} if [ "${allinstalled}" = true ]; then @@ -656,7 +657,7 @@ function favicon(){ sed -i "s/|/\n/g" favicontest.txt cat favicontest.txt 2>>"$LOGFILE" mv favicontest.txt $dir/hosts/favicontest.txt 2>>"$LOGFILE" - rm favicontest.json 2>>"$LOGFILE" + rm -f favicontest.json 2>>"$LOGFILE" fi cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } end_func "Results are saved in hosts/favicontest.txt" ${FUNCNAME[0]} @@ -672,8 +673,9 @@ function favicon(){ function portscan(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PORTSCANNER" = true ]; then start_func "Port scan" - interlace -tL subdomains/subdomains.txt -threads 50 -c 'echo "_target_ $(dig +retry=5 +short a _target_ | tail -n1)" | anew -q _output_' -o .tmp/subs_ips.txt - awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt + #interlace -tL subdomains/subdomains.txt -threads 50 -c 'echo "_target_ $(dig +short a _target_ | tail -n1)" | anew -q _output_' -o .tmp/subs_ips.txt + [ -s "subdomains/subdomains.txt" ] && resolveDomains -d subdomains/subdomains.txt -t $RESOLVE_DOMAINS_THREADS 2>>"$LOGFILE" | anew -q .tmp/subs_ips.txt + [ -s ".tmp/subs_ips.txt" ] && awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt [ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt [ -s "hosts/ips.txt" ] && axiom-scan hosts/ips.txt -m cf-check -o .tmp/ips_nowaf_.txt 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/ips_nowaf_.txt" ] && cat .tmp/ips_nowaf_.txt | grep -Eiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q .tmp/ips_nowaf.txt From a932db03609385ffa1123ce910791972ebf01d7b Mon Sep 17 00:00:00 2001 From: six2dez Date: Sat, 22 May 2021 18:28:55 +0200 Subject: [PATCH 31/32] Fix IP/cidr detection --- reconftw.sh | 2 +- reconftw_axiom.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index e0bc7fd8..8b0382ef 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1444,7 +1444,7 @@ function ipcidr_detection(){ } function ipcidr_target(){ - ipcidr_detection $1 | cut -d' ' -f3 | unfurl -u domains 2>>"$LOGFILE" | sed 's/\.$//' | sort -u > ./target_reconftw_ipcidr.txt + ipcidr_detection $1 | cut -d' ' -f3 | unfurl -u domains 2>/dev/null | sed 's/\.$//' | sort -u > ./target_reconftw_ipcidr.txt if [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -eq 1 ]]; then domain=$(cat ./target_reconftw_ipcidr.txt) elif [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -gt 1 ]]; then diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 9e8d80a2..7792a28c 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -1471,7 +1471,7 @@ function ipcidr_detection(){ } function ipcidr_target(){ - ipcidr_detection $1 | cut -d' ' -f3 | unfurl -u domains 2>>"$LOGFILE" | sed 's/\.$//' | sort -u > ./target_reconftw_ipcidr.txt + ipcidr_detection $1 | cut -d' ' -f3 | unfurl -u domains 2>/dev/null | sed 's/\.$//' | sort -u > ./target_reconftw_ipcidr.txt if [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -eq 1 ]]; then domain=$(cat ./target_reconftw_ipcidr.txt) elif [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -gt 1 ]]; then From 5658d3547951f62176e08a8ccd8d08c002ba8b8b Mon Sep 17 00:00:00 2001 From: six2dez Date: Sun, 23 May 2021 01:56:33 +0200 Subject: [PATCH 32/32] emailfinder and analytics relationships --- install.sh | 1 + reconftw.cfg | 1 + reconftw.sh | 25 +++++++++++++++++++++++++ reconftw_axiom.sh | 29 +++++++++++++++++++++++++++-- requirements.txt | 3 ++- 5 files changed, 56 insertions(+), 3 deletions(-) diff --git a/install.sh b/install.sh index 38ed0463..4ad34f41 100755 --- a/install.sh +++ b/install.sh @@ -59,6 +59,7 @@ repos["testssl"]="drwetter/testssl.sh" repos["ip2provider"]="oldrho/ip2provider" repos["commix"]="commixproject/commix" repos["JSA"]="w9w/JSA" +repos["AnalyticsRelationships"]="Josue87/AnalyticsRelationships" dir=${tools} double_check=false diff --git a/reconftw.cfg b/reconftw.cfg index 91fe8dd8..66a9f7bb 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -48,6 +48,7 @@ METAFINDER_LIMIT=20 # Max 250 # Subdomains SUBCRT=true +SUBANALYTICS=true SUBBRUTE=true SUBSCRAPING=true SUBPERMUTE=true diff --git a/reconftw.sh b/reconftw.sh index 8b0382ef..372d1cde 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -101,6 +101,7 @@ function tools_installed(){ type -P hakrevdns &>/dev/null || { printf "${bred} [*] hakrevdns [NO]${reset}\n${reset}"; allinstalled=false;} type -P gdn &>/dev/null || { printf "${bred} [*] gdn [NO]${reset}\n"; allinstalled=false;} type -P resolveDomains &>/dev/null || { printf "${bred} [*] resolveDomains [NO]${reset}\n"; allinstalled=false;} + type -P emailfinder &>/dev/null || { printf "${bred} [*] emailfinder [NO]${reset}\n"; allinstalled=false;} if [ "${allinstalled}" = true ]; then printf "${bgreen} Good! All installed! ${reset}\n\n" @@ -176,6 +177,8 @@ function metadata(){ function emails(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$EMAILS" = true ] && [ "$OSINT" = true ]; then start_func "Searching emails/users/passwords leaks" + emailfinder -d $domain | anew -q .tmp/emailfinder.txt + [ -s ".tmp/emailfinder.txt" ] && cat .tmp/emailfinder.txt | awk 'matched; /^-----------------$/ { matched = 1 }' | anew -q osint/emails.txt cd "$tools/theHarvester" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } python3 theHarvester.py -d $domain -b all 2>>"$LOGFILE" > $dir/.tmp/harvester.txt cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } @@ -271,6 +274,7 @@ function subdomains_full(){ sub_recursive sub_dns sub_scraping + sub_analytics webprobe_simple if [ -f "subdomains/subdomains.txt" ]; then deleteOutScoped $outOfScope_file subdomains/subdomains.txt @@ -416,6 +420,27 @@ function sub_scraping(){ fi } +function sub_analytics(){ + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBANALYTICS" = true ]; then + start_subfunc "Running : Analytics Subdomain Enumeration" + if [ -s ".tmp/probed_tmp_scrap.txt" ]; then + for sub in $(cat .tmp/probed_tmp_scrap.txt); do + python3 $tools/AnalyticsRelationships/Python/analyticsrelationships.py -u $sub | anew -q .tmp/analytics_subs_tmp.txt 2>>"$LOGFILE" &>/dev/null + done + [ -s ".tmp/analytics_subs_tmp.txt" ] && cat .tmp/analytics_subs_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | sed "s/|__ //" | anew -q .tmp/analytics_subs_clean.txt + [ -s ".tmp/analytics_subs_clean.txt" ] && puredns resolve .tmp/analytics_subs_clean.txt -w .tmp/analytics_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT 2>>"$LOGFILE" &>/dev/null + fi + NUMOFLINES=$(cat .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" | anew subdomains/subdomains.txt | wc -l) + end_subfunc "${NUMOFLINES} new subs (analytics relationship)" ${FUNCNAME[0]} + else + if [ "$SUBANALYTICS" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + function sub_permut(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPERMUTE" = true ]; then start_subfunc "Running : Permutations Subdomain Enumeration" diff --git a/reconftw_axiom.sh b/reconftw_axiom.sh index 7792a28c..212d5bfe 100755 --- a/reconftw_axiom.sh +++ b/reconftw_axiom.sh @@ -101,6 +101,7 @@ function tools_installed(){ type -P hakrevdns &>/dev/null || { printf "${bred} [*] hakrevdns [NO]${reset}\n${reset}"; allinstalled=false;} type -P gdn &>/dev/null || { printf "${bred} [*] gdn [NO]${reset}\n"; allinstalled=false;} type -P resolveDomains &>/dev/null || { printf "${bred} [*] resolveDomains [NO]${reset}\n"; allinstalled=false;} + type -P emailfinder &>/dev/null || { printf "${bred} [*] emailfinder [NO]${reset}\n"; allinstalled=false;} type -P axiom-ls &>/dev/null || { printf "${bred} [*] axiom [NO]${reset}\n${reset}"; allinstalled=false;} if [ "${allinstalled}" = true ]; then @@ -177,6 +178,8 @@ function metadata(){ function emails(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$EMAILS" = true ] && [ "$OSINT" = true ]; then start_func "Searching emails/users/passwords leaks" + emailfinder -d $domain | anew -q .tmp/emailfinder.txt + [ -s ".tmp/emailfinder.txt" ] && cat .tmp/emailfinder.txt | awk 'matched; /^-----------------$/ { matched = 1 }' | anew -q osint/emails.txt cd "$tools/theHarvester" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } python3 theHarvester.py -d $domain -b all 2>>"$LOGFILE" > $dir/.tmp/harvester.txt cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } @@ -272,6 +275,7 @@ function subdomains_full(){ sub_recursive sub_dns sub_scraping + sub_analytics webprobe_simple if [ -f "subdomains/subdomains.txt" ]; then deleteOutScoped $outOfScope_file subdomains/subdomains.txt @@ -428,6 +432,27 @@ function sub_scraping(){ fi } +function sub_analytics(){ + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBANALYTICS" = true ]; then + start_subfunc "Running : Analytics Subdomain Enumeration" + if [ -s ".tmp/probed_tmp_scrap.txt" ]; then + for sub in $(cat .tmp/probed_tmp_scrap.txt); do + python3 $tools/AnalyticsRelationships/Python/analyticsrelationships.py -u $sub | anew -q .tmp/analytics_subs_tmp.txt 2>>"$LOGFILE" &>/dev/null + done + [ -s ".tmp/analytics_subs_tmp.txt" ] && cat .tmp/analytics_subs_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | sed "s/|__ //" | anew -q .tmp/analytics_subs_clean.txt + [ -s ".tmp/analytics_subs_clean.txt" ] && axiom-scan .tmp/analytics_subs_clean.txt -m puredns-resolve -r /home/op/lists/resolvers.txt -o .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" &>/dev/null + fi + NUMOFLINES=$(cat .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" | anew subdomains/subdomains.txt | wc -l) + end_subfunc "${NUMOFLINES} new subs (analytics relationship)" ${FUNCNAME[0]} + else + if [ "$SUBANALYTICS" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + function sub_permut(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPERMUTE" = true ]; then start_subfunc "Running : Permutations Subdomain Enumeration" @@ -631,8 +656,8 @@ function screenshot(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBSCREENSHOT" = true ]; then start_func "Web Screenshots" cat webs/webs.txt webs/webs_uncommon_ports.txt 2>>"$LOGFILE" | anew -q .tmp/webs_screenshots.txt - axiom-scan .tmp/webs_screenshots.txt -m webscreenshot -w $WEBSCREENSHOT_THREADS -o screenshots 2>>"$LOGFILE" &>/dev/null -# axiom-scan .tmp/webs_screenshots.txt -m "$AXIOM_SCREENSHOT_MODULE" -o screenshots &>>"$LOGFILE" + [ "$AXIOM_SCREENSHOT_MODULE" = "webscreenshot" ] && axiom-scan .tmp/webs_screenshots.txt -m $AXIOM_SCREENSHOT_MODULE -w $WEBSCREENSHOT_THREADS -o screenshots 2>>"$LOGFILE" &>/dev/null + [ "$AXIOM_SCREENSHOT_MODULE" != "webscreenshot" ] && axiom-scan .tmp/webs_screenshots.txt -m $AXIOM_SCREENSHOT_MODULE -o screenshots &>>"$LOGFILE" end_func "Results are saved in $domain/screenshots folder" ${FUNCNAME[0]} else if [ "$WEBSCREENSHOT" = false ]; then diff --git a/requirements.txt b/requirements.txt index e71a9331..c0ce4d7c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -46,4 +46,5 @@ s3scanner shodan dnspython pytest-runner -webscreenshot \ No newline at end of file +webscreenshot +emailfinder \ No newline at end of file