Skip to content

Commit

Permalink
Merge pull request #417 from six2dez/dev
Browse files Browse the repository at this point in the history
Gau instead gauplus
Added @NagliNagli nuclei templates
Removed urldedupe from installer
Hotfix axiom fleet's number
Added ffuf flag control
Added subdomains recursive bruteforce control
  • Loading branch information
six2dez authored Nov 21, 2021
2 parents b41c06e + 649d6f0 commit 5b62669
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 17 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@


<p align="center">
<a href="https://github.com/six2dez/reconftw/releases/tag/v2.1.1">
<img src="https://img.shields.io/badge/release-v2.1.1-green">
<a href="https://github.com/six2dez/reconftw/releases/tag/v2.1.3">
<img src="https://img.shields.io/badge/release-v2.1.3-green">
</a>
</a>
<a href="https://www.gnu.org/licenses/gpl-3.0.en.html">
Expand Down Expand Up @@ -427,7 +427,7 @@ reset='\033[0m'
- Google Dorks ([uDork](https://github.com/m3n0sd0n4ld/uDork))
- Github Dorks ([GitDorker](https://github.com/obheda12/GitDorker))
## Subdomains
- Passive ([subfinder](https://github.com/projectdiscovery/subfinder), [assetfinder](https://github.com/tomnomnom/assetfinder), [amass](https://github.com/OWASP/Amass), [findomain](https://github.com/Findomain/Findomain), [crobat](https://github.com/cgboal/sonarsearch), [waybackurls](https://github.com/tomnomnom/waybackurls), [github-subdomains](https://github.com/gwen001/github-subdomains), [Anubis](https://jldc.me), [gauplus](https://github.com/bp0lr/gauplus))
- Passive ([subfinder](https://github.com/projectdiscovery/subfinder), [assetfinder](https://github.com/tomnomnom/assetfinder), [amass](https://github.com/OWASP/Amass), [findomain](https://github.com/Findomain/Findomain), [crobat](https://github.com/cgboal/sonarsearch), [waybackurls](https://github.com/tomnomnom/waybackurls), [github-subdomains](https://github.com/gwen001/github-subdomains), [Anubis](https://jldc.me), [gau](https://github.com/lc/gau))
- Certificate transparency ([ctfr](https://github.com/UnaPibaGeek/ctfr), [tls.bufferover](tls.bufferover.run) and [dns.bufferover](dns.bufferover.run)))
- Bruteforce ([puredns](https://github.com/d3mondev/puredns))
- Permutations ([Gotator](https://github.com/Josue87/gotator))
Expand Down
5 changes: 3 additions & 2 deletions install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ gotools["httpx"]="GO111MODULE=on go get -v github.com/projectdiscovery/httpx/cmd
gotools["github-endpoints"]="go get -u github.com/gwen001/github-endpoints"
gotools["dnsx"]="GO111MODULE=on go get -v github.com/projectdiscovery/dnsx/cmd/dnsx"
gotools["subfinder"]="GO111MODULE=on go get -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder"
gotools["gauplus"]="GO111MODULE=on go get -v github.com/bp0lr/gauplus"
gotools["gau"]="go install github.com/lc/gau/v2/cmd/gau@latest"
gotools["subjs"]="GO111MODULE=on go get -v github.com/lc/subjs"
gotools["Gxss"]="go get -u -v github.com/KathanP19/Gxss"
gotools["gospider"]="go get -u github.com/jaeles-project/gospider"
Expand Down Expand Up @@ -59,7 +59,6 @@ repos["GitDorker"]="obheda12/GitDorker"
repos["testssl"]="drwetter/testssl.sh"
repos["commix"]="commixproject/commix"
repos["JSA"]="w9w/JSA"
repos["urldedupe"]="ameenmaali/urldedupe"
repos["cloud_enum"]="initstring/cloud_enum"
repos["ultimate-nmap-parser"]="shifty0g/ultimate-nmap-parser"
repos["pydictor"]="LandGrey/pydictor"
Expand Down Expand Up @@ -219,6 +218,8 @@ printf "${bblue}\n Running: Installing repositories (${#repos[@]})${reset}\n\n"
# Repos with special configs
eval git clone https://github.com/projectdiscovery/nuclei-templates ~/nuclei-templates $DEBUG_STD
eval git clone https://github.com/geeknik/the-nuclei-templates.git ~/nuclei-templates/extra_templates $DEBUG_STD
eval wget -nc -O ~/nuclei-templates/ssrf_nagli.yaml https://raw.githubusercontent.com/NagliNagli/BountyTricks/main/ssrf.yaml $DEBUG_STD
eval wget -nc -O ~/nuclei-templates/sap-redirect_nagli.yaml https://raw.githubusercontent.com/NagliNagli/BountyTricks/main/sap-redirect.yaml $DEBUG_STD
eval nuclei -update-templates $DEBUG_STD
cd ~/nuclei-templates/extra_templates && eval git pull $DEBUG_STD
cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; }
Expand Down
2 changes: 2 additions & 0 deletions reconftw.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ SUBPERMUTE=true
SUBTAKEOVER=true
SUBRECURSIVE=true
SUB_RECURSIVE_PASSIVE=false # Uses a lot of API keys queries
SUB_RECURSIVE_BRUTE=false # Needs big disk space and time to resolve
ZONETRANSFER=true
S3BUCKETS=true
REVERSE_IP=false
Expand Down Expand Up @@ -123,6 +124,7 @@ REMOVELOG=false
PROXY=false
SENDZIPNOTIFY=false
PRESERVE=true # set to true to avoid deleting the .called_fn files on really large scans
FFUF_FLAGS="-mc all -fc 404 -ac -sf -s"

# HTTP options
HEADER="User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0"
Expand Down
24 changes: 12 additions & 12 deletions reconftw.sh
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ function tools_installed(){
type -P amass &>/dev/null || { printf "${bred} [*] Amass [NO]${reset}\n"; allinstalled=false;}
type -P crobat &>/dev/null || { printf "${bred} [*] Crobat [NO]${reset}\n"; allinstalled=false;}
type -P waybackurls &>/dev/null || { printf "${bred} [*] Waybackurls [NO]${reset}\n"; allinstalled=false;}
type -P gauplus &>/dev/null || { printf "${bred} [*] gauplus [NO]${reset}\n"; allinstalled=false;}
type -P gau &>/dev/null || { printf "${bred} [*] gau [NO]${reset}\n"; allinstalled=false;}
type -P dnsx &>/dev/null || { printf "${bred} [*] dnsx [NO]${reset}\n"; allinstalled=false;}
type -P gotator &>/dev/null || { printf "${bred} [*] gotator [NO]${reset}\n"; allinstalled=false;}
type -P cf-check &>/dev/null || { printf "${bred} [*] Cf-check [NO]${reset}\n"; allinstalled=false;}
Expand Down Expand Up @@ -350,7 +350,7 @@ function sub_passive(){
amass enum -passive -d $domain -config $AMASS_CONFIG -o .tmp/amass_psub.txt 2>>"$LOGFILE" &>/dev/null
findomain --quiet -t $domain -u .tmp/findomain_psub.txt 2>>"$LOGFILE" &>/dev/null
timeout 10m waybackurls $domain | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/waybackurls_psub.txt
timeout 10m gauplus -t $GAUPLUS_THREADS -random-agent -subs $domain | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/gau_psub.txt
timeout 10m gau --subs --threads $GAUPLUS_THREADS $domain | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/gau_psub.txt
else
axiom-scan $list -m subfinder -all -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" &>/dev/null
axiom-scan $list -m assetfinder -o .tmp/assetfinder_psub.txt 2>>"$LOGFILE" &>/dev/null
Expand Down Expand Up @@ -620,7 +620,7 @@ function sub_recursive(){
fi
fi
# Bruteforce recursive
if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]]; then
if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] && [ "$SUB_RECURSIVE_BRUTE" = true ] ; then
echo "" > .tmp/brute_recursive_wordlist.txt
for sub in $(cat subdomains/subdomains.txt); do
sed "s/$/.$sub/" $subs_wordlist >> .tmp/brute_recursive_wordlist.txt
Expand Down Expand Up @@ -722,7 +722,7 @@ function s3buckets(){
[ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt | grep -iv "not_exist" | grep -iv "Warning:" | grep -iv "invalid_name" | anew -q .tmp/s3buckets.txt
else
axiom-scan webs/webs.txt -m s3scanner -o .tmp/s3buckets_tmp.txt 2>>"$LOGFILE" &>/dev/null
cat .tmp/s3buckets_tmp.txt | grep -iv "not_exist" | grep -iv "Warning:" | anew -q .tmp/s3buckets.txt
[ -s ".tmp/s3buckets_tmp.txt" ] && cat .tmp/s3buckets_tmp.txt | grep -iv "not_exist" | grep -iv "Warning:" | anew -q .tmp/s3buckets.txt
fi
# Cloudenum
keyword=${domain%%.*}
Expand Down Expand Up @@ -1065,15 +1065,15 @@ function fuzz(){
if [ -s "webs/webs.txt" ]; then
mkdir -p $dir/fuzzing
if [ ! "$AXIOM" = true ]; then
interlace -tL webs/webs.txt -threads 10 -c "ffuf -mc all -fc 404 -ac -t ${FFUF_THREADS} -sf -s -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -of csv -o _output_/_cleantarget_.csv" -o fuzzing 2>>"$LOGFILE" &>/dev/null
interlace -tL webs/webs.txt -threads 10 -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -of csv -o _output_/_cleantarget_.csv" -o fuzzing 2>>"$LOGFILE" &>/dev/null
for sub in $(cat webs/webs.txt); do
sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
[ -s "$dir/fuzzing/${sub_out}.csv" ] && cat $dir/fuzzing/${sub_out}.csv | cut -d ',' -f2,5,6 | tr ',' ' ' | awk '{ print $2 " " $3 " " $1}' | tail -n +2 | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt
rm -f $dir/fuzzing/${sub_out}.csv 2>>"$LOGFILE"
done
find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | anew -q $dir/fuzzing/fuzzing_full.txt
else
axiom-scan webs/webs.txt -m ffuf -w /home/op/lists/onelistforallmicro.txt -H \"${HEADER}\" -mc all -fc 404 -sf -ac -s -maxtime $FFUF_MAXTIME -o $dir/fuzzing/ffuf-content.csv 2>>"$LOGFILE" &>/dev/null
axiom-scan webs/webs.txt -m ffuf -w /home/op/lists/onelistforallmicro.txt -H \"${HEADER}\" $FFUF_FLAGS -maxtime $FFUF_MAXTIME -o $dir/fuzzing/ffuf-content.csv 2>>"$LOGFILE" &>/dev/null
grep -v "FUZZ,url,redirectlocation" $dir/fuzzing/ffuf-content.csv | awk -F "," '{print $2" "$5" "$6}' | sort > $dir/fuzzing/ffuf-content.tmp
for sub in $(cat webs/webs.txt); do
sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
Expand Down Expand Up @@ -1142,7 +1142,7 @@ function urlchecks(){
if [ ! "$AXIOM" = true ]; then
if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
cat webs/webs.txt | waybackurls | anew -q .tmp/url_extract_tmp.txt
cat webs/webs.txt | gauplus -t $GAUPLUS_THREADS -subs | anew -q .tmp/url_extract_tmp.txt
cat webs/webs.txt | gau --subs --threads $GAUPLUS_THREADS | anew -q .tmp/url_extract_tmp.txt
fi
diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt 2>>"$LOGFILE") <(sort -u webs/webs.txt 2>>"$LOGFILE") | wc -l)
if [ $diff_webs != "0" ] || [ ! -s ".tmp/gospider.txt" ]; then
Expand Down Expand Up @@ -1783,8 +1783,8 @@ function sendToNotify {
fi
if grep -q '^ telegram\|^telegram\|^ telegram' $NOTIFY_CONFIG ; then
notification "Sending ${domain} data over Telegram" info
telegram_chat_id=$(cat ${NOTIFY_CONFIG} | grep '^ telegram_chat_id\|^telegram_chat_id\|^ telegram_chat_id' | xargs | cut -d' ' -f2)
telegram_key=$(cat ${NOTIFY_CONFIG} | grep '^ telegram_apikey\|^telegram_apikey\|^ telegram_apikey' | xargs | cut -d' ' -f2 )
telegram_chat_id=$(cat ${NOTIFY_CONFIG} | grep '^ telegram_chat_id\|^telegram_chat_id\|^ telegram_chat_id' | xargs | cut -d' ' -f2)
telegram_key=$(cat ${NOTIFY_CONFIG} | grep '^ telegram_api_key\|^telegram_api_key\|^ telegram_apikey' | xargs | cut -d' ' -f2 )
curl -F document=@${1} "https://api.telegram.org/bot${telegram_key}/sendDocument?chat_id=${telegram_chat_id}" &>/dev/null
fi
if grep -q '^ discord\|^discord\|^ discord' $NOTIFY_CONFIG ; then
Expand Down Expand Up @@ -1891,12 +1891,12 @@ function axiom_lauch(){
else
startcount=$((AXIOM_FLEET_COUNT-NUMOFNODES))
fi
axiom_args=" -i=$startcount "
AXIOM_ARGS=" -i $startcount"
# Temporarily disabled multiple axiom regions
# [ -n "$AXIOM_FLEET_REGIONS" ] && axiom_args="$axiom_args --regions=\"$AXIOM_FLEET_REGIONS\" "

echo "axiom-fleet $AXIOM_FLEET_NAME $axiom_args"
axiom-fleet $AXIOM_FLEET_NAME "$axiom_args"
echo "axiom-fleet ${AXIOM_FLEET_NAME} ${AXIOM_ARGS}"
axiom-fleet ${AXIOM_FLEET_NAME} ${AXIOM_ARGS}
axiom-select "$AXIOM_FLEET_NAME*"
if [ -n "$AXIOM_POST_START" ]; then
eval "$AXIOM_POST_START" 2>>"$LOGFILE" &>/dev/null
Expand Down

0 comments on commit 5b62669

Please sign in to comment.