From dedb585bf878990e2aa24e8927b4d34b1622d709 Mon Sep 17 00:00:00 2001 From: six2dez Date: Mon, 23 Oct 2023 09:34:11 +0200 Subject: [PATCH 01/17] First commit for v3.0 dev --- bin/rftw_ip_cdnprovider | 65 +++++++++++++++++ bin/rftw_ip_favicon | 86 ++++++++++++++++++++++ bin/rftw_ip_info | 101 ++++++++++++++++++++++++++ bin/rftw_ip_portscan | 100 ++++++++++++++++++++++++++ bin/rftw_osint_emails | 95 +++++++++++++++++++++++++ bin/rftw_osint_ghdorks | 67 +++++++++++++++++ bin/rftw_osint_ghrepos | 89 +++++++++++++++++++++++ bin/rftw_osint_googledorks | 58 +++++++++++++++ bin/rftw_osint_metadata | 40 +++++++++++ bin/rftw_osint_postleaks | 51 +++++++++++++ bin/rftw_osint_whois | 68 ++++++++++++++++++ bin/rftw_sub_active | 75 ++++++++++++++++++++ bin/rftw_sub_analytics | 96 +++++++++++++++++++++++++ bin/rftw_sub_brute | 88 +++++++++++++++++++++++ bin/rftw_sub_crt | 75 ++++++++++++++++++++ bin/rftw_sub_dns | 85 ++++++++++++++++++++++ bin/rftw_sub_full | 130 +++++++++++++++++++++++++++++++++ bin/rftw_sub_noerror | 74 +++++++++++++++++++ bin/rftw_sub_passive | 79 +++++++++++++++++++++ bin/rftw_sub_permut | 92 ++++++++++++++++++++++++ bin/rftw_sub_recbrute | 130 +++++++++++++++++++++++++++++++++ bin/rftw_sub_recpassive | 68 ++++++++++++++++++ bin/rftw_sub_regex | 67 +++++++++++++++++ bin/rftw_sub_s3buckets | 79 +++++++++++++++++++++ bin/rftw_sub_scraping | 92 ++++++++++++++++++++++++ bin/rftw_sub_takeover | 66 +++++++++++++++++ bin/rftw_sub_vhosts | 69 ++++++++++++++++++ bin/rftw_sub_zonetransfer | 64 +++++++++++++++++ bin/rftw_uti_transfer | 62 ++++++++++++++++ bin/rftw_util_ascii | 57 +++++++++++++++ bin/rftw_util_axiomoff | 60 ++++++++++++++++ bin/rftw_util_axiomon | 64 +++++++++++++++++ bin/rftw_util_axiomsel | 50 +++++++++++++ bin/rftw_util_deleteoos | 72 +++++++++++++++++++ bin/rftw_util_gettime | 65 +++++++++++++++++ bin/rftw_util_ipcidr | 61 ++++++++++++++++ bin/rftw_util_notification | 74 +++++++++++++++++++ bin/rftw_util_output | 56 +++++++++++++++ bin/rftw_util_removebig | 53 ++++++++++++++ bin/rftw_util_resolver | 63 ++++++++++++++++ bin/rftw_util_sendnotify | 78 ++++++++++++++++++++ bin/rftw_util_tools | 142 +++++++++++++++++++++++++++++++++++++ bin/rftw_util_version | 56 +++++++++++++++ bin/rftw_util_zipfolder | 60 ++++++++++++++++ bin/rftw_vuln_4xx | 77 ++++++++++++++++++++ bin/rftw_vuln_brokenlink | 68 ++++++++++++++++++ bin/rftw_vuln_comminject | 76 ++++++++++++++++++++ bin/rftw_vuln_cors | 82 +++++++++++++++++++++ bin/rftw_vuln_crlf | 83 ++++++++++++++++++++++ bin/rftw_vuln_fuzzparam | 81 +++++++++++++++++++++ bin/rftw_vuln_lfi | 83 ++++++++++++++++++++++ bin/rftw_vuln_openredir | 89 +++++++++++++++++++++++ bin/rftw_vuln_protpollut | 74 +++++++++++++++++++ bin/rftw_vuln_smuggling | 77 ++++++++++++++++++++ bin/rftw_vuln_spray | 71 +++++++++++++++++++ bin/rftw_vuln_sqli | 92 ++++++++++++++++++++++++ bin/rftw_vuln_ssrf | 108 ++++++++++++++++++++++++++++ bin/rftw_vuln_ssti | 83 ++++++++++++++++++++++ bin/rftw_vuln_testssl | 69 ++++++++++++++++++ bin/rftw_vuln_webcache | 77 ++++++++++++++++++++ bin/rftw_vuln_xss | 120 +++++++++++++++++++++++++++++++ bin/rftw_web_cms | 86 ++++++++++++++++++++++ bin/rftw_web_fuzz | 92 ++++++++++++++++++++++++ bin/rftw_web_jschecks | 98 +++++++++++++++++++++++++ bin/rftw_web_nucleicheck | 82 +++++++++++++++++++++ bin/rftw_web_passdict | 67 +++++++++++++++++ bin/rftw_web_probecommon | 70 ++++++++++++++++++ bin/rftw_web_probeuncommon | 81 +++++++++++++++++++++ bin/rftw_web_roboxtractor | 69 ++++++++++++++++++ bin/rftw_web_screenshot | 65 +++++++++++++++++ bin/rftw_web_urlchecks | 133 ++++++++++++++++++++++++++++++++++ bin/rftw_web_urlext | 97 +++++++++++++++++++++++++ bin/rftw_web_urlgf | 84 ++++++++++++++++++++++ bin/rftw_web_wafcheck | 82 +++++++++++++++++++++ bin/rftw_web_wordlists | 73 +++++++++++++++++++ images/banner.png | Bin images/docker.png | Bin images/reconFTW.gif | Bin install.sh | 2 +- reconftw.cfg | 2 +- reconftw.sh | 141 +++++------------------------------- 81 files changed, 5931 insertions(+), 125 deletions(-) create mode 100755 bin/rftw_ip_cdnprovider create mode 100755 bin/rftw_ip_favicon create mode 100755 bin/rftw_ip_info create mode 100755 bin/rftw_ip_portscan create mode 100755 bin/rftw_osint_emails create mode 100755 bin/rftw_osint_ghdorks create mode 100755 bin/rftw_osint_ghrepos create mode 100755 bin/rftw_osint_googledorks create mode 100755 bin/rftw_osint_metadata create mode 100755 bin/rftw_osint_postleaks create mode 100755 bin/rftw_osint_whois create mode 100755 bin/rftw_sub_active create mode 100755 bin/rftw_sub_analytics create mode 100755 bin/rftw_sub_brute create mode 100755 bin/rftw_sub_crt create mode 100755 bin/rftw_sub_dns create mode 100755 bin/rftw_sub_full create mode 100755 bin/rftw_sub_noerror create mode 100755 bin/rftw_sub_passive create mode 100755 bin/rftw_sub_permut create mode 100755 bin/rftw_sub_recbrute create mode 100755 bin/rftw_sub_recpassive create mode 100755 bin/rftw_sub_regex create mode 100755 bin/rftw_sub_s3buckets create mode 100755 bin/rftw_sub_scraping create mode 100755 bin/rftw_sub_takeover create mode 100755 bin/rftw_sub_vhosts create mode 100755 bin/rftw_sub_zonetransfer create mode 100755 bin/rftw_uti_transfer create mode 100755 bin/rftw_util_ascii create mode 100755 bin/rftw_util_axiomoff create mode 100755 bin/rftw_util_axiomon create mode 100755 bin/rftw_util_axiomsel create mode 100755 bin/rftw_util_deleteoos create mode 100755 bin/rftw_util_gettime create mode 100755 bin/rftw_util_ipcidr create mode 100755 bin/rftw_util_notification create mode 100755 bin/rftw_util_output create mode 100755 bin/rftw_util_removebig create mode 100755 bin/rftw_util_resolver create mode 100755 bin/rftw_util_sendnotify create mode 100755 bin/rftw_util_tools create mode 100755 bin/rftw_util_version create mode 100755 bin/rftw_util_zipfolder create mode 100755 bin/rftw_vuln_4xx create mode 100755 bin/rftw_vuln_brokenlink create mode 100755 bin/rftw_vuln_comminject create mode 100755 bin/rftw_vuln_cors create mode 100755 bin/rftw_vuln_crlf create mode 100755 bin/rftw_vuln_fuzzparam create mode 100755 bin/rftw_vuln_lfi create mode 100755 bin/rftw_vuln_openredir create mode 100755 bin/rftw_vuln_protpollut create mode 100755 bin/rftw_vuln_smuggling create mode 100755 bin/rftw_vuln_spray create mode 100755 bin/rftw_vuln_sqli create mode 100755 bin/rftw_vuln_ssrf create mode 100755 bin/rftw_vuln_ssti create mode 100755 bin/rftw_vuln_testssl create mode 100755 bin/rftw_vuln_webcache create mode 100755 bin/rftw_vuln_xss create mode 100755 bin/rftw_web_cms create mode 100755 bin/rftw_web_fuzz create mode 100755 bin/rftw_web_jschecks create mode 100755 bin/rftw_web_nucleicheck create mode 100755 bin/rftw_web_passdict create mode 100755 bin/rftw_web_probecommon create mode 100755 bin/rftw_web_probeuncommon create mode 100755 bin/rftw_web_roboxtractor create mode 100755 bin/rftw_web_screenshot create mode 100755 bin/rftw_web_urlchecks create mode 100755 bin/rftw_web_urlext create mode 100755 bin/rftw_web_urlgf create mode 100755 bin/rftw_web_wafcheck create mode 100755 bin/rftw_web_wordlists mode change 100644 => 100755 images/banner.png mode change 100644 => 100755 images/docker.png mode change 100644 => 100755 images/reconFTW.gif diff --git a/bin/rftw_ip_cdnprovider b/bin/rftw_ip_cdnprovider new file mode 100755 index 00000000..0da6e308 --- /dev/null +++ b/bin/rftw_ip_cdnprovider @@ -0,0 +1,65 @@ +#!/bin/bash +# Check manually, the logic for extracting the ips from dnsx results should be on reconftw.sh script, this script should only scan the ips + +# Default config path +CONFIG_PATH="$RECONFTW_CFG" + +# Check if the config file exists +if [ -f "$CONFIG_PATH" ]; then + source "$CONFIG_PATH" +else + echo "Error: reconftw.cfg not found at $CONFIG_PATH!" + exit 1 +fi + +# Help function +help_menu() { + cat <<- EOF +Usage: $0 [OPTIONS] + +CDN Provider Check Tool + +Options: + -h, --help Show this help menu + -i, --input Specify the input IP lists file (required) + -f, --force Force the execution even if already processed + -o, --output Specify the output file (default is print to stdout) +EOF +} + +# Default values +FORCE=false +INPUT_FILE="" +OUTPUT_FILE="" + +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -i|--input) INPUT_FILE="$2"; shift ;; + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE=true; shift ;; + -o|--output) OUTPUT_FILE="$2"; shift ;; + *) + if [ -z "$INPUT_FILE" ]; then + INPUT_FILE=$1 + else + echo "Unknown parameter passed: $1"; exit 1; + fi + ;; + esac + shift +done + +# Validate input file +if [ ! -f "$INPUT_FILE" ] || [ ! -s "$INPUT_FILE" ]; then + echo "Invalid or empty input file!" + help_menu + exit 1 +fi + +# Handle the output of the cdncheck command based on the presence of the -o flag +if [ -z "$OUTPUT_FILE" ]; then + cdncheck -silent -resp -nc < "$INPUT_FILE" +else + cdncheck -silent -resp -nc < "$INPUT_FILE" | sort -u > "$OUTPUT_FILE" +fi diff --git a/bin/rftw_ip_favicon b/bin/rftw_ip_favicon new file mode 100755 index 00000000..ec07f8bb --- /dev/null +++ b/bin/rftw_ip_favicon @@ -0,0 +1,86 @@ +#!/bin/bash +# Looks good, needs testing + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +help_menu() { + echo "Usage: $0 [OPTIONS] DOMAIN" + echo "" + echo "Favicon Ip Lookup Tool" + echo "" + echo "Options:" + echo " -h, --help Show this help menu" + echo " -d, --domain DOMAIN Specify the domain to process" + echo " -f, --force Force the execution even if already processed" +} + +# Start function +start_func() { + echo "Starting $1..." +} + +# End function +end_func() { + echo "$1" + echo "End of $2..." +} + +# Default values +FORCE=false +DOMAIN="" + +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -d|--domain) DOMAIN="$2"; shift ;; + -f|--force) FORCE=true ;; + *) + # Set the domain if it's not specified with -d + [ -z "$DOMAIN" ] && DOMAIN="$1" || { echo "Unknown parameter passed: $1"; exit 1; } + ;; + esac + shift +done + +# Input validation +if [ -z "$DOMAIN" ]; then + help_menu + exit 1 +fi + +# Validate domain format +if [[ ! "$DOMAIN" =~ ^[a-zA-Z0-9.-]+$ ]]; then + echo "Invalid domain format." + exit 1 +fi + +if { [ ! -f "$called_fn_dir/.favicon" ] || [ "$FORCE" = true ]; } && [ "$FAVICON" = true ] && ! [[ $DOMAIN =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + start_func "Favicon Ip Lookup" + + cd "$tools/fav-up" || { echo "Failed to cd to $tools/fav-up in favicon"; exit 1; } + python3 favUp.py -w "$DOMAIN" -sc -o favicontest.json 2>>"$LOGFILE" >/dev/null + + if [ -s "favicontest.json" ]; then + jq -r 'try .found_ips' favicontest.json 2>>"$LOGFILE" | grep -v "not-found" > favicontest.txt + sed -i "s/|/\n/g" favicontest.txt + cat favicontest.txt 2>>"$LOGFILE" + mv favicontest.txt $dir/hosts/favicontest.txt 2>>"$LOGFILE" + rm -f favicontest.json 2>>"$LOGFILE" + fi + + cd "$dir" || { echo "Failed to cd to $dir in favicon"; exit 1; } + end_func "Results are saved in hosts/favicontest.txt" "favicon" +else + if [ "$FAVICON" = false ] || [[ $DOMAIN =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + echo -e "\n${yellow} favicon skipped in this mode, defined in reconftw.cfg, or the domain is an IP address${reset}\n" + else + echo -e "${yellow} favicon is already processed. To force executing favicon, delete\n $called_fn_dir/.favicon ${reset}\n\n" + fi +fi diff --git a/bin/rftw_ip_info b/bin/rftw_ip_info new file mode 100755 index 00000000..240e6d92 --- /dev/null +++ b/bin/rftw_ip_info @@ -0,0 +1,101 @@ +#!/bin/bash +# Looks good, needs testing + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Variables +called_fn_dir="./called_functions" +LOGFILE="./log.txt" +IP_INFO=true +OSINT=true +WHOISXML_API_LOCAL="YOUR_API_KEY_HERE" # Replace with your actual API key + +# If WHOISXML_API is not in reconftw.cfg the use the one in the script +if $WHOISXML_API; then + WHOISXML_API=$WHOISXML_API_LOCAL +fi + +# If WHOISXML_API is not present or empty, exit the program +if [ -z "$WHOISXML_API" ]; then + echo "No WHOISXML_API var defined, exiting..." + exit 1 +fi + +# Function to display help menu +help_menu() { + cat <<- EOF +Usage: $0 -i -o + +Search for IP information. + +Options: + -i, --input Target IP address + -o, --output Output file to save results + -h, --help Display this help menu +EOF + exit 0 +} + +# Function to start the process +start_func() { + echo "[*] $1" +} + +# Function to end the process +end_func() { + echo "[+] $1" +} + +# Main function to gather IP info +ip_info_func() { + local ip="$1" + if { [ ! -f "$called_fn_dir/.ip_info" ]; } && [ "$IP_INFO" = true ] && [ "$OSINT" = true ] && [[ $ip =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + start_func "Searching IP info for $ip" + + if [ -n "$WHOISXML_API" ]; then + # IP Relations + curl "https://reverse-ip.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ip=${ip}" 2>/dev/null | jq -r '.result[].name' 2>>"$LOGFILE" >> "$OUTPUT_FILE" + # IP Whois + curl "https://www.whoisxmlapi.com/whoisserver/WhoisService?apiKey=${WHOISXML_API}&domainName=${ip}&outputFormat=json&da=2®istryRawText=1®istrarRawText=1&ignoreRawTexts=1" 2>/dev/null | jq 2>>"$LOGFILE" >> "$OUTPUT_FILE" + # IP Location + curl "https://ip-geolocation.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ipAddress=${ip}" 2>/dev/null | jq -r '.ip,.location' 2>>"$LOGFILE" >> "$OUTPUT_FILE" + else + echo "${yellow} No WHOISXML_API var defined, skipping function ${reset}" + fi + else + echo "Skipped ip_info for $ip due to conditions or configuration." + fi +} + +# Default values +INPUT_FILE="" +OUTPUT_FILE="" + +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -i|--input) INPUT_FILE="$2"; shift ;; + -o|--output) OUTPUT_FILE="$2"; shift ;; + -h|--help) help_menu ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +# Input validation +if [[ -z "$INPUT_FILE" || -z "$OUTPUT_FILE" ]]; then + help_menu +fi + +# Process IPs +while IFS= read -r IP; do + ip_info_func "$IP" +done < "$INPUT_FILE" + +echo "Completed. Check $OUTPUT_FILE for the results." diff --git a/bin/rftw_ip_portscan b/bin/rftw_ip_portscan new file mode 100755 index 00000000..b751ebaf --- /dev/null +++ b/bin/rftw_ip_portscan @@ -0,0 +1,100 @@ +#!/bin/bash +# Check manually, the logic for extracting the ips from dnsx results should be on reconftw.sh script, this script should only scan the ips + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help function +help_menu() { + echo "Usage: $0 [OPTIONS] DOMAIN" + echo "" + echo "Port Scan Tool" + echo "" + echo "Options:" + echo " -h, --help Show this help menu" + echo " -d, --domain DOMAIN Specify the domain to process" + echo " -f, --force Force the execution even if already processed" +} + +# Start function +start_func() { + echo "Starting $1..." +} + +# End function +end_func() { + echo "$1" + echo "End of $2..." +} + +# Input validation +if [ -z "$1" ]; then + help_menu + exit 1 +fi + +# Default values +FORCE=false + +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -d|--domain) DOMAIN="$2"; shift ;; + -f|--force) FORCE=true ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +# Validate domain format +if [[ ! "$DOMAIN" =~ ^[a-zA-Z0-9.-]+$ ]] && ! [[ $DOMAIN =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "Invalid domain format." + exit 1 +fi + +if { [ ! -f "$called_fn_dir/.portscan" ] || [ "$FORCE" = true ]; } && [ "$PORTSCANNER" = true ]; then + start_func "portscan" "Port scan" + + if ! [[ $DOMAIN =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + # The following checks and actions are based on the original function logic + [ -s "subdomains/subdomains_dnsregs.json" ] && jq -r 'try . | "\(.host) \(.a[0])"' subdomains/subdomains_dnsregs.json | anew -q .tmp/subs_ips.txt + [ -s ".tmp/subs_ips.txt" ] && awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt + [ -s "hosts/subs_ips_vhosts.txt" ] && cut -d ' ' -f1 hosts/subs_ips_vhosts.txt | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt + else + echo $DOMAIN | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt + fi + + [ ! -s "hosts/cdn_providers.txt" ] && [ -s "hosts/ips.txt" ] && cdncheck -silent -resp -nc < hosts/ips.txt 2>/dev/null > hosts/cdn_providers.txt + + [ -s "hosts/ips.txt" ] && comm -23 <(sort -u hosts/ips.txt) <(cut -d'[' -f1 hosts/cdn_providers.txt | sed 's/[[:space:]]*$//' | sort -u) | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u | anew -q .tmp/ips_nocdn.txt + + printf "${bblue}\n Resolved IP addresses (No CDN) ${reset}\n\n" + [ -s ".tmp/ips_nocdn.txt" ] && sort .tmp/ips_nocdn.txt + printf "${bblue}\n Scanning ports... ${reset}\n\n" + + if [ "$PORTSCAN_PASSIVE" = true ] && [ ! -f "hosts/portscan_passive.txt" ] && [ -s ".tmp/ips_nocdn.txt" ]; then + smap -iL .tmp/ips_nocdn.txt > hosts/portscan_passive.txt + fi + + if [ "$PORTSCAN_ACTIVE" = true ]; then + if [ "$AXIOM" = true ]; then + [ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx --top-ports 200 -sV -n -Pn --open --max-retries 2 --script vulners -oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + else + [ -s ".tmp/ips_nocdn.txt" ] && $SUDO nmap --top-ports 200 -sV -n --max-retries 2 -Pn --open --script vulners -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" >/dev/null + fi + fi + + end_func "Results are saved in hosts/portscan_[passive|active].txt" "portscan" +else + if [ "$PORTSCANNER" = false ]; then + printf "\n${yellow} portscan skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} portscan is already processed, to force executing portscan delete\n $called_fn_dir/.portscan ${reset}\n\n" + fi +fi diff --git a/bin/rftw_osint_emails b/bin/rftw_osint_emails new file mode 100755 index 00000000..d8207b02 --- /dev/null +++ b/bin/rftw_osint_emails @@ -0,0 +1,95 @@ +#!/bin/bash + +# Looks good, needs testing + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + + + +function help_menu(){ + echo "Usage: $0 [OPTIONS] [DOMAIN]" + echo "" + echo "Email Finder Tool" + echo "" + echo "Options:" + echo " -h, --help Show this help menu" + echo " -d, --domain DOMAIN Specify the domain target" + echo " -o, --output DIR Specify the output directory" + echo " -f, --force Force the execution even if already processed" +} + +function validate_domain(){ + if [[ ! "$1" =~ ^[a-zA-Z0-9.-]+$ ]]; then + echo "Error: Invalid domain format." + exit 1 + fi +} + +function emails(){ + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$EMAILS" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + echo "Searching emails/users/passwords leaks" + emailfinder -d $domain 2>>"$LOGFILE" | anew -q .tmp/emailfinder.txt || { echo "emailfinder command failed"; exit 1; } + [ -s ".tmp/emailfinder.txt" ] && cat .tmp/emailfinder.txt | grep "@" | grep -iv "|_" | anew -q osint/emails.txt + echo "Results are saved in $domain/osint/emails.txt" + else + if [ "$EMAILS" = false ] || [ "$OSINT" = false ]; then + echo "${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg" + else + echo "${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]}" + fi + fi +} + +function main(){ + local OUTPUT_DIR="./" + local FORCE=false + local DOMAIN="" + + # Parse arguments + while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -d|--domain) DOMAIN="$2"; shift ;; + -o|--output) OUTPUT_DIR="$2"; shift ;; + -f|--force) FORCE=true ;; + *) + # If a domain wasn't set using -d, then set it using the last argument + if [[ -z "$DOMAIN" ]]; then + DOMAIN="$1" + else + echo "Unknown parameter or multiple domains passed: $1"; exit 1; + fi + ;; + esac + shift + done + + if [[ -z "$DOMAIN" ]]; then + echo "No domain provided." + help_menu + exit 1 + fi + + validate_domain "$DOMAIN" + + # Define other variables + called_fn_dir="./called_functions" + DIFF=true + EMAILS=true + OSINT=true + LOGFILE="./log.txt" + + echo "Processing domain: $DOMAIN" + + # Call the emails function + emails +} + +# Execute the main function +main "$@" \ No newline at end of file diff --git a/bin/rftw_osint_ghdorks b/bin/rftw_osint_ghdorks new file mode 100755 index 00000000..7d06cd19 --- /dev/null +++ b/bin/rftw_osint_ghdorks @@ -0,0 +1,67 @@ +#!/bin/bash + +# Default config path +CONFIG_PATH="$RECONFTW_CFG" + +# Check if the config file exists +if [ -f "$CONFIG_PATH" ]; then + source "$CONFIG_PATH" +else + echo "Error: reconftw.cfg not found at $CONFIG_PATH!" + exit 1 +fi + +# Help menu +function help_menu() { + echo -e "Usage: ./github_dorks_script.sh [OPTIONS] [DOMAIN]" + echo -e "Options:" + echo -e " -d, --domain DOMAIN Specify the domain" + echo -e " -g, --github-tokens FILE Specify the path to the GitHub tokens file" + echo -e " -D, --deep Use deep dorking (optional)" + echo -e " -h, --help Display this help menu" + exit 1 +} + +# Main function +function github_dorks() { + if [ -s "${github_tokens}" ]; then + local dorks_file="$tools/gitdorks_go/Dorks/smalldorks.txt" + [ "$deep" = true ] && dorks_file="$tools/gitdorks_go/Dorks/medium_dorks.txt" + gitdorks_go -gd "$dorks_file" -nws 20 -target "$domain" -tf "${github_tokens}" -ew 3 || { echo "gitdorks_go/anew command failed" >&2; exit 1; } + else + echo -e "${bred}Required file ${github_tokens} not exists or empty${reset}" >&2 + fi +} + +# Default values +domain="" +github_tokens="" +deep=false + +# Parse command-line arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -d|--domain) domain="$2"; shift ;; + -g|--github-tokens) github_tokens="$2"; shift ;; + -D|--deep) deep=true ;; + -h|--help) help_menu ;; + *) + if [ -z "$domain" ]; then + domain="$1" + else + echo -e "${bred}Unknown parameter passed: $1${reset}" >&2; help_menu + fi + ;; + esac + shift +done + +# Validate inputs +if [ -z "$domain" ] || [ -z "$github_tokens" ]; then + echo -e "${bred}Error: Missing required parameters.${reset}" >&2 + help_menu + exit 1 +fi + +# Execute the main function +github_dorks \ No newline at end of file diff --git a/bin/rftw_osint_ghrepos b/bin/rftw_osint_ghrepos new file mode 100755 index 00000000..52c138d6 --- /dev/null +++ b/bin/rftw_osint_ghrepos @@ -0,0 +1,89 @@ +#!/bin/bash + +# Default config path +CONFIG_PATH="$RECONFTW_CFG" + +# Check if the config file exists +if [ -f "$CONFIG_PATH" ]; then + source "$CONFIG_PATH" +else + echo "Error: reconftw.cfg not found at $CONFIG_PATH!" + exit 1 +fi + +# Help menu +function help_menu() { + echo -e "Usage: ./github_repos_script.sh [OPTIONS] [DOMAIN]" + echo -e "Options:" + echo -e " -d, --domain DOMAIN Specify the domain" + echo -e " -t, --tools-dir DIRECTORY Specify the tools directory path" + echo -e " -g, --github-tokens FILE Specify the path to the GitHub tokens file" + echo -e " -o, --output FILE (Optional )Specify the output file path (prints to stdout if not set)" + echo -e " -h, --help Display this help menu" + exit 1 +} + +# Main function +function github_repos() { + # Validate tools + mkdir -p .tmp >>"$LOGFILE" 2>&1 + for tool in unfurl enumerepo jq interlace gitleaks trufflehog; do + if ! command -v $tool &> /dev/null; then + echo -e "${bred}Error: ${tool} is not installed.${reset}" >&2 + exit 1 + fi + done + if [ -s "${github_tokens}" ]; then + GH_TOKEN=$(cat ${github_tokens} | head -1) + echo $domain | unfurl format %r > .tmp/company_name.txt + enumerepo -token-string "${GH_TOKEN}" -usernames .tmp/company_name.txt -o .tmp/company_repos.txt + [ -s ".tmp/company_repos.txt" ] && jq -r '.[].repos[]|.url' < .tmp/company_repos.txt > .tmp/company_repos_url.txt + mkdir -p .tmp/github_repos + mkdir -p .tmp/github + [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "git clone _target_ .tmp/github_repos/_cleantarget_" + [ -d ".tmp/github/" ] && ls .tmp/github_repos > .tmp/github_repos_folders.txt + [ -s ".tmp/github_repos_folders.txt" ] && interlace -tL .tmp/github_repos_folders.txt -threads ${INTERLACE_THREADS} -c "gitleaks detect --source .tmp/github_repos/_target_ --no-banner --no-color -r .tmp/github/gh_secret_cleantarget_.json" + [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "trufflehog git _target_ -j 2>&1 | jq -c > _output_/_cleantarget_" -o .tmp/github/ + if [ -d ".tmp/github/" ]; then + if [ -z "$output_file" ]; then + cat .tmp/github/* 2>/dev/null | jq -c | jq -r + else + cat .tmp/github/* 2>/dev/null | jq -c | jq -r > "$output_file" + fi + fi + else + printf "\n${bred} Required file ${GITHUB_TOKENS} not exists or empty${reset}\n" + fi +} + +# Default values +domain="" +github_tokens="" + +# Parse command-line arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -d|--domain) domain="$2"; shift ;; + -g|--github-tokens) github_tokens="$2"; shift ;; + -o|--output) output_file="$2"; shift ;; # Added this line to handle the -o flag + -h|--help) help_menu ;; + *) + if [ -z "$domain" ]; then + domain="$1" + else + echo -e "${bred}Unknown parameter passed: $1${reset}" >&2; help_menu + fi + ;; + esac + shift +done + +# Validate inputs +if [ -z "$domain" ] || [ -z "$github_tokens" ]; then + echo -e "${bred}Error: Missing required parameters.${reset}" >&2 + help_menu + exit 1 +fi + +# Execute the main function +github_repos \ No newline at end of file diff --git a/bin/rftw_osint_googledorks b/bin/rftw_osint_googledorks new file mode 100755 index 00000000..94e31137 --- /dev/null +++ b/bin/rftw_osint_googledorks @@ -0,0 +1,58 @@ +#!/bin/bash + +# Default config path +CONFIG_PATH="$RECONFTW_CFG" + +# Check if the config file exists +if [ -f "$CONFIG_PATH" ]; then + source "$CONFIG_PATH" +else + echo "Error: reconftw.cfg not found at $CONFIG_PATH!" + exit 1 +fi + +# Help menu +function help_menu() { + echo -e "Usage: ./google_dorks_script.sh [OPTIONS]" + echo -e "Options:" + echo -e " -d, --domain DOMAIN Specify the domain" + echo -e " -o, --output FILE (Optional) Specify the output directory file" + echo -e " -h, --help Display this help menu" + exit 1 +} + +# Main function +function google_dorks() { + output_file_arg="" + if [ -n "$output" ]; then + output_file_arg="-o ${output}" + fi + + python3 "${tools}/dorks_hunter/dorks_hunter.py" -d "$domain" $output_file_arg || { + echo -e "${bred}Error: dorks_hunter.py failed.${reset}" >&2; + exit 1; + } +} + +# Parse command-line arguments +domain="" +output_dir="" +while [[ "$#" -gt 0 ]]; do + case $1 in + -d|--domain) domain="$2"; shift ;; + -o|--output) output="$2"; shift ;; + -h|--help) help_menu ;; + *) echo -e "${bred}Unknown parameter passed: $1${reset}" >&2; help_menu ;; + esac + shift +done + +# Validate inputs +if [ -z "$domain" ]; then + echo -e "${bred}Error: Missing required parameters.${reset}" >&2 + help_menu + exit 1 +fi + +# Execute the main function +google_dorks diff --git a/bin/rftw_osint_metadata b/bin/rftw_osint_metadata new file mode 100755 index 00000000..b9be0d0f --- /dev/null +++ b/bin/rftw_osint_metadata @@ -0,0 +1,40 @@ +#!/bin/bash + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Define global variables +called_fn_dir="/path/to/called_fn_dir" +LOGFILE="/path/to/logfile" +METAFINDER_LIMIT=100 +domain=$1 + +function gather_metadata() { + if { [ ! -f "$called_fn_dir/.metadata" ] || [ "$DIFF" = true ]; } && [ "$METADATA" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + echo -e "${yellow}Scanning metadata in public files${reset}" + + metafinder -d "$domain" -l $METAFINDER_LIMIT -o osint -go -bi -ba &>> "$LOGFILE" || { echo "metafinder command failed"; exit 1; } + mv "osint/${domain}/"*".txt" "osint/" 2>>"$LOGFILE" + rm -rf "osint/${domain}" 2>>"$LOGFILE" + echo "Results are saved in $domain/osint/[software/authors/metadata_results].txt" + elif [ "$METADATA" = false ] || [ "$OSINT" = false ]; then + echo -e "${yellow}metadata skipped as defined in reconftw.cfg${reset}" + else + echo -e "${yellow}metadata is already processed or input is an IP. To force, delete\n $called_fn_dir/.metadata${reset}" + fi +} + +function main() { + if [ -z "$domain" ]; then + echo "Usage: $0 " + exit 1 + fi + gather_metadata +} + +main diff --git a/bin/rftw_osint_postleaks b/bin/rftw_osint_postleaks new file mode 100755 index 00000000..bb40b023 --- /dev/null +++ b/bin/rftw_osint_postleaks @@ -0,0 +1,51 @@ +#!/bin/bash + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu function +function help_menu() { + echo -e "Usage: $0 [-d DOMAIN] or $0 [DOMAIN]" + echo -e "Options:" + echo -e " -d DOMAIN Specify the domain to scan for postman leaks." + echo -e " -h, --help Display this help menu" + exit 1 +} + +function postleaks() { + if { [ ! -f "$called_fn_dir/.postleaks" ] || [ "$DIFF" = true ]; } && [ "$POSTMAN_LEAKS" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + echo -e "${yellow}Scanning for leaks in postman public directory${reset}" + + postleaksNg -k "$domain" > .tmp/postleaks.txt || { echo "postleaksNg command failed"; exit 1; } + echo "Results are saved in .tmp/postleaks.txt" + elif [ "$POSTMAN_LEAKS" = false ] || [ "$OSINT" = false ]; then + echo -e "${yellow}postleaks skipped as defined in reconftw.cfg${reset}" + else + echo -e "${yellow}postleaks is already processed or input is an IP. To force, delete\n $called_fn_dir/.postleaks${reset}" + fi +} + +# Main function to execute the postleaks function +function main() { + if [[ -z "$domain" ]]; then + help_menu + fi + postleaks +} + +# Parse command-line arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -d) domain="$2"; shift ;; + -h|--help) help_menu ;; + *) domain="$1" ;; + esac + shift +done + +main diff --git a/bin/rftw_osint_whois b/bin/rftw_osint_whois new file mode 100755 index 00000000..99a6e0a8 --- /dev/null +++ b/bin/rftw_osint_whois @@ -0,0 +1,68 @@ +#!/bin/bash + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Function to display help menu +help_menu() { + echo "Usage: $0 " + echo "Search for domain information." + echo "" + echo "Options:" + echo " -h, --help Display this help menu" + exit 0 +} + +# Parse command-line arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu ;; + *) domain="$1" ;; + esac + shift +done + +# Input validation +if [ -z "$domain" ] || [[ "$domain" == "-h" ]] || [[ "$domain" == "--help" ]]; then + help_menu +fi + +# Function to start the process +start_func() { + echo "[*] $1" +} + +# Function to end the process +end_func() { + echo "[+] $1" +} + +# Main function to gather domain info +domain_info_func() { + if { [ ! -f "$called_fn_dir/.domain_info" ]; } && [ "$DOMAIN_INFO" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + start_func "Searching domain info (whois, registrant name/email domains)" + + # Domain General Info + whois -H $domain > osint/domain_info_general.txt || { echo "whois command failed"; exit 1; } + + # Reverse Whois (if DEEP or REVERSE_WHOIS is true) + if [ "$DEEP" = true ] || [ "$REVERSE_WHOIS" = true ]; then + timeout -k 1m ${AMASS_INTEL_TIMEOUT}m amass intel -d ${domain} -whois -timeout $AMASS_INTEL_TIMEOUT -o osint/domain_info_reverse_whois.txt 2>>"$LOGFILE" &>/dev/null + fi + + # Azure Tenant Domains + curl -s "https://aadinternals.azurewebsites.net/api/tenantinfo?domainName=${domain}" -H "Origin: https://aadinternals.com" | jq -r .domains[].name > osint/azure_tenant_domains.txt + + end_func "Results are saved in $domain/osint/domain_info_[general/name/email/ip].txt" + else + echo "Skipped domain_info due to conditions or configuration." + fi +} + +# Call the main function +domain_info_func diff --git a/bin/rftw_sub_active b/bin/rftw_sub_active new file mode 100755 index 00000000..97e51a6b --- /dev/null +++ b/bin/rftw_sub_active @@ -0,0 +1,75 @@ +#!/bin/bash + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +display_help() { + echo "Usage: $0 [Options]" + echo + echo " -d, --domain Domain for which subdomain enumeration is required" + echo " -l, --log Log file location" + echo " -p, --public-limit PUREDNS Public Limit" + echo " ... Other options here ..." + echo " -h, --help Display this help and exit" + echo + echo "Example: $0 -d example.com -l /path/to/log" + exit 1 +} + +# Input validation +if [ "$#" -eq 0 ]; then + display_help + exit 1 +fi + +# Parse input arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -d|--domain) domain="$2"; shift ;; + -l|--log) LOGFILE="$2"; shift ;; # Add this + -h|--help) display_help ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +# Check if domain has been provided +if [ -z "$domain" ]; then + echo "Error: Domain not specified." + exit 1 +fi + +sub_active() { + if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then + start_subfunc ${FUNCNAME[0]} "Running : Active Subdomain Enumeration" + find .tmp -type f -iname "*_subs.txt" -exec cat {} + | anew -q .tmp/subs_no_resolved.txt + [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt + if [ ! "$AXIOM" = true ]; then + resolvers_update_quick_local + [ -s ".tmp/subs_no_resolved.txt" ] && puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + else + resolvers_update_quick_axiom + [ -s ".tmp/subs_no_resolved.txt" ] && axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + echo $domain | dnsx -retry 3 -silent -r $resolvers_trusted 2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt + if [ "$DEEP" = true ]; then + cat .tmp/subdomains_tmp.txt | tlsx -san -cn -silent -ro -c $TLSX_THREADS -p $TLS_PORTS | anew -q .tmp/subdomains_tmp.txt + else + cat .tmp/subdomains_tmp.txt | tlsx -san -cn -silent -ro -c $TLSX_THREADS | anew -q .tmp/subdomains_tmp.txt + fi + [[ "$INSCOPE" = true ]] && check_inscope .tmp/subdomains_tmp.txt 2>>"$LOGFILE" >/dev/null + NUMOFLINES=$(cat .tmp/subdomains_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) + end_subfunc "${NUMOFLINES} subs DNS resolved from passive" ${FUNCNAME[0]} + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi +} + +# Run the function +sub_active \ No newline at end of file diff --git a/bin/rftw_sub_analytics b/bin/rftw_sub_analytics new file mode 100755 index 00000000..f31f9025 --- /dev/null +++ b/bin/rftw_sub_analytics @@ -0,0 +1,96 @@ +#!/bin/bash + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +function usage() { + echo "Usage: $0 [-h] [-d DOMAIN] [-i INPUT_FILE] [-o OUTPUT_FILE]" + echo " -h Display this help message." + echo " -d DOMAIN Specify the target domain." + echo " -i INPUT_FILE Specify the input file." + echo " -o OUTPUT_FILE Specify the output file." + exit 1 +} + +# Validate input arguments +input_file="" +output_file="" +while getopts ":hd:i:o:" opt; do + case ${opt} in + h) + usage + ;; + d) + domain="$OPTARG" + ;; + i) + input_file="$OPTARG" + if [[ ! -f "$input_file" ]]; then + echo "Error: Input file does not exist." + exit 1 + fi + ;; + o) + output_file="$OPTARG" + touch "$output_file" 2>/dev/null || { echo "Error: Cannot write to the specified output file."; exit 1; } + ;; + *) + echo "Invalid option: -$OPTARG" 1>&2 + usage + ;; + esac +done +shift $((OPTIND -1)) + +if [[ -z "$domain" ]]; then + echo "Error: No domain specified." + usage +fi + +function sub_analytics() { + if { [ ! -f "$called_fn_dir/.sub_analytics" ] || [ "$DIFF" = true ]; } && [ "$SUBANALYTICS" = true ]; then + echo "Running : Analytics Subdomain Enumeration" + + if [ -s ".tmp/probed_tmp_scrap.txt" ]; then + mkdir -p .tmp/output_analytics/ + analyticsrelationships -ch < .tmp/probed_tmp_scrap.txt >> .tmp/analytics_subs_tmp.txt 2>>"$LOGFILE" + + [ -s ".tmp/analytics_subs_tmp.txt" ] && cat .tmp/analytics_subs_tmp.txt | grep "\.$domain$\|^$domain$" | sed "s/|__ //" | anew -q .tmp/analytics_subs_clean.txt + + if [ ! "$AXIOM" = true ]; then + resolvers_update_quick_local + [ -s ".tmp/analytics_subs_clean.txt" ] && puredns resolve .tmp/analytics_subs_clean.txt -w .tmp/analytics_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + else + resolvers_update_quick_axiom + [ -s ".tmp/analytics_subs_clean.txt" ] && axiom-scan .tmp/analytics_subs_clean.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/analytics_subs_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + fi + + [[ "$INSCOPE" = true ]] && check_inscope .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" >/dev/null + NUMOFLINES=$(cat .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) + + echo "${NUMOFLINES} new subs (analytics relationship)" + else + if [ "$SUBANALYTICS" = false ]; then + echo -e "\n${yellow} sub_analytics skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + echo -e "${yellow} sub_analytics is already processed, to force executing sub_analytics delete\n $called_fn_dir/.sub_analytics ${reset}\n\n" + fi + fi +} + +# At the end where you save the output, you can redirect the output to the specified file +if [[ ! -z "$output_file" ]]; then + # For example, if you were saving data like this: + # echo "${NUMOFLINES} new subs (analytics relationship)" + # Change it to: + echo "${NUMOFLINES} new subs (analytics relationship)" > "$output_file" +fi + +# Execute the function +sub_analytics \ No newline at end of file diff --git a/bin/rftw_sub_brute b/bin/rftw_sub_brute new file mode 100755 index 00000000..6e64bf42 --- /dev/null +++ b/bin/rftw_sub_brute @@ -0,0 +1,88 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +display_help() { + echo "Usage: $0 [Options]" + echo "Options:" + echo " -h, --help Display this help and exit" + echo " -i, --input INPUT_FILE Specify the input file" + echo " -o, --output OUTPUT_FILE Specify the output file" + echo + echo "Ensure all required environment variables are set in reconftw.cfg" + exit 1 +} + +# Input validation for environment variables +if [ -z "$domain" ] || [ -z "$resolvers_trusted" ] || [ -z "$LOGFILE" ] || [ -z "$subs_wordlist" ] || [ -z "$subs_wordlist_big" ]; then + echo "Error: One or more required environment variables are missing in reconftw.cfg." + display_help +fi + +# Initialize input_file and output_file variables +input_file="" +output_file="" + +# Parse input arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) display_help ;; + -i|--input) + shift; + input_file="$1"; + if [[ ! -f "$input_file" ]]; then + echo "Error: Input file does not exist." + exit 1 + fi + ;; + -o|--output) + shift; + output_file="$1"; + touch "$output_file" 2>/dev/null || { echo "Error: Cannot write to the specified output file."; exit 1; } + ;; + *) echo "Unknown parameter passed: $1"; display_help ;; + esac + shift +done + +# Main logic +sub_brute() { + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBBRUTE" = true ]; then + start_subfunc ${FUNCNAME[0]} "Running : Bruteforce Subdomain Enumeration" + if [ ! "$AXIOM" = true ]; then + resolvers_update_quick_local + if [ "$DEEP" = true ]; then + puredns bruteforce $subs_wordlist_big $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + else + puredns bruteforce $subs_wordlist $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + fi + [ -s ".tmp/subs_brute.txt" ] && puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + else + resolvers_update_quick_axiom + if [ "$DEEP" = true ]; then + axiom-scan $subs_wordlist_big -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + else + axiom-scan $subs_wordlist -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + [ -s ".tmp/subs_brute.txt" ] && axiom-scan .tmp/subs_brute.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute_valid.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + [[ "$INSCOPE" = true ]] && check_inscope .tmp/subs_brute_valid.txt 2>>"$LOGFILE" >/dev/null + NUMOFLINES=$(cat .tmp/subs_brute_valid.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) + end_subfunc "${NUMOFLINES} new subs (bruteforce)" ${FUNCNAME[0]} + else + if [ "$SUBBRUTE" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + +sub_brute \ No newline at end of file diff --git a/bin/rftw_sub_crt b/bin/rftw_sub_crt new file mode 100755 index 00000000..806e48dd --- /dev/null +++ b/bin/rftw_sub_crt @@ -0,0 +1,75 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help Menu +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo "Options:" + echo " -d, --domain Set the target domain." + echo " -l, --limit Set the crt limit (default 100)." + echo " -D, --diff Set DIFF to true." + echo " -s, --subcrt Run crt subdomain enumeration." + echo " -i, --inscope Check inscope." + echo " -o, --output Specify the output file." + echo " -h, --help Display this help menu." +} + +# Other functions +function start_subfunc() { + echo "[+] Starting $1: $2" +} + +function end_subfunc() { + echo "[+] Ending $1: $2" +} + +function check_inscope() { + echo "Checking inscope for $1" + # TODO: Add your inscope logic here +} +# Variables for input and output +output_file="" + +# Input validation and argument parsing +while [[ "$#" -gt 0 ]]; do + case $1 in + -d|--domain) domain="$2"; shift;; + -l|--limit) CTR_LIMIT="$2"; shift;; + -D|--diff) DIFF=true;; + -s|--subcrt) SUBCRT=true;; + -i|--inscope) INSCOPE=true;; + -o|--output) + shift; + output_file="$1"; + touch "$output_file" 2>/dev/null || { echo "Error: Cannot write to the specified output file."; exit 1; } + ;; + -h|--help) display_help; exit 0;; + *) echo "Unknown parameter passed: $1"; display_help; exit 1;; + esac + shift +done + +# Main Functionality +script_name=$(basename $0) +if [[ ! -f "$called_fn_dir/.$script_name" ]] || [[ "$DIFF" = true ]] && [[ "$SUBCRT" = true ]]; then + start_subfunc "$script_name" "Running : Crtsh Subdomain Enumeration" + + crt -s -json -l ${CTR_LIMIT} $domain 2>>"$LOGFILE" | jq -r '.[].subdomain' 2>>"$LOGFILE" | sed -e "s/^\\*\\.//" | anew -q .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null + [[ "$INSCOPE" = true ]] && check_inscope .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null + NUMOFLINES=$(cat .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" | sed 's/\*.//g' | anew .tmp/crtsh_subs.txt | sed '/^$/d' | wc -l) + end_subfunc "${NUMOFLINES} new subs (cert transparency)" "$script_name" + +else + if [[ "$SUBCRT" = false ]]; then + echo "${script_name} skipped in this mode or defined in reconftw.cfg" + else + echo "${script_name} is already processed, to force executing ${script_name} delete $called_fn_dir/.${script_name}" + fi +fi \ No newline at end of file diff --git a/bin/rftw_sub_dns b/bin/rftw_sub_dns new file mode 100755 index 00000000..51483efb --- /dev/null +++ b/bin/rftw_sub_dns @@ -0,0 +1,85 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Color definitions for output (optional, you can adjust or remove) +yellow='\033[1;33m' +reset='\033[0m' + +# Help menu +display_help() { + echo "Usage: $0 [OPTIONS]" + echo "Options:" + echo " -d, --domain Set the target domain." + echo " -h, --help Display this help and exit." + echo " -f, --input-file Specify the input file with a list of domains." + echo " -o, --output-file Specify the output file for saving results." + echo + echo "Ensure all required environment variables are set in reconftw.cfg" +} + +# Input validation +if [ -z "$domain" ] || [ -z "$resolvers_trusted" ] || [ -z "$LOGFILE" ]; then + echo "Error: One or more required environment variables are missing in reconftw.cfg." + display_help + exit 1 +fi + +# Default values for input and output files +INPUTFILE="" +OUTPUTFILE="subdomains/subdomains.txt" + +# Parse input arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -d|--domain) domain="$2"; shift;; + -f|--input-file) INPUTFILE="$2"; shift;; + -o|--output-file) OUTPUTFILE="$2"; shift;; + -h|--help) display_help; exit 0;; + *) echo "Unknown parameter passed: $1"; display_help; exit 1;; + esac + shift +done + +# If an input file is provided, validate it +if [ -n "$INPUTFILE" ] && [ ! -f "$INPUTFILE" ]; then + echo "Error: Input file $INPUTFILE not found!" + exit 1 +fi + +# Main logic (Original function content) +sub_dns() { + if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then + start_subfunc ${FUNCNAME[0]} "Running : DNS Subdomain Enumeration and PTR search" + if [ ! "$AXIOM" = true ]; then + [ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | dnsx -r $resolvers_trusted -a -aaaa -cname -ns -ptr -mx -soa -silent -retry 3 -json -o subdomains/subdomains_dnsregs.json 2>>"$LOGFILE" >/dev/null + [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' 2>/dev/null | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt + [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | hakip2host | cut -d' ' -f 3 | unfurl -u domains | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt + [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try "\(.host) - \(.a[])"' 2>/dev/null | sort -u -k2 | anew -q subdomains/subdomains_ips.txt + resolvers_update_quick_local + [ -s ".tmp/subdomains_dns.txt" ] && puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + else + [ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -json -o subdomains/subdomains_dnsregs.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | anew -q .tmp/subdomains_dns_a_records.txt + [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | hakip2host | cut -d' ' -f 3 | unfurl -u domains | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt + [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' 2>/dev/null | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt + [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try "\(.host) - \(.a[])"' 2>/dev/null | sort -u -k2 | anew -q subdomains/subdomains_ips.txt + resolvers_update_quick_axiom + [ -s ".tmp/subdomains_dns.txt" ] && axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_dns_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + [[ "$INSCOPE" = true ]] && check_inscope .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" >/dev/null + NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) + end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]} + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi +} + +# Call the main logic function +sub_dns diff --git a/bin/rftw_sub_full b/bin/rftw_sub_full new file mode 100755 index 00000000..a1e7c442 --- /dev/null +++ b/bin/rftw_sub_full @@ -0,0 +1,130 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Function to display help menu +help_menu() { + echo "Usage: $0 [OPTIONS]" + echo "Options:" + echo " -d, --domain Set the target domain." + echo " -h, --help Display this help and exit." + echo " -f, --input-file Specify the input file with a list of domains." + echo " -o, --output-file Specify the output file for saving results." + echo "Ensure all required environment variables are set in reconftw.cfg" + exit 0 +} + +# Parse input arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -d|--domain) domain="$2"; shift;; + -f|--input-file) INPUTFILE="$2"; shift;; + -o|--output-file) OUTPUTFILE="$2"; shift;; + -h|--help) help_menu;; + *) echo "Unknown parameter passed: $1"; help_menu; exit 1;; + esac + shift +done + +# If an input file is provided, validate it +if [ -n "$INPUTFILE" ] && [ ! -f "$INPUTFILE" ]; then + echo "Error: Input file $INPUTFILE not found!" + exit 1 +fi + +# Input validation +if [ -z "$domain" ] || [[ "$domain" == "-h" ]] || [[ "$domain" == "--help" ]]; then + help_menu +fi + +# Function to start the process +start_func() { + echo "[*] $1" +} + +# Function to end the process +end_func() { + echo "[+] $1" +} + +# Function to notify the user +notification() { + echo "[!] $1" +} + +# Main function to gather subdomain info +subdomains_full_func() { + echo "Starting subdomain enumeration for $domain" + + NUMOFLINES_subs="0" + NUMOFLINES_probed="0" + printf "${bgreen}#######################################################################\n\n" + ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && printf "${bblue} Subdomain Enumeration $domain\n\n" + [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && printf "${bblue} Scanning IP $domain\n\n" + [ -s "subdomains/subdomains.txt" ] && cp subdomains/subdomains.txt .tmp/subdomains_old.txt + [ -s "webs/webs.txt" ] && cp webs/webs.txt .tmp/probed_old.txt + + if ( [ ! -f "$called_fn_dir/.sub_active" ] || [ ! -f "$called_fn_dir/.sub_brute" ] || [ ! -f "$called_fn_dir/.sub_permut" ] || [ ! -f "$called_fn_dir/.sub_recursive_brute" ] ) || [ "$DIFF" = true ] ; then + resolvers_update + fi + + [ -s "${inScope_file}" ] && cat ${inScope_file} | anew -q subdomains/subdomains.txt + + if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && [ "$SUBDOMAINS_GENERAL" = true ]; then + rftw_sub_passive + rftw_sub_crt + rftw_sub_active + rftw_sub_noerror + rftw_sub_brute + rftw_sub_permut + rftw_sub_regex_permut + rftw_sub_recursive_passive + rftw_sub_recursive_brute + rftw_sub_dns + rftw_sub_scraping + rftw_sub_analytics + else + notification "IP/CIDR detected, subdomains search skipped" + echo $domain | anew -q subdomains/subdomains.txt + fi + + webprobe_simple + if [ -s "subdomains/subdomains.txt" ]; then + [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file subdomains/subdomains.txt + NUMOFLINES_subs=$(cat subdomains/subdomains.txt 2>>"$LOGFILE" | anew .tmp/subdomains_old.txt | sed '/^$/d' | wc -l) + fi + if [ -s "webs/webs.txt" ]; then + [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file webs/webs.txt + NUMOFLINES_probed=$(cat webs/webs.txt 2>>"$LOGFILE" | anew .tmp/probed_old.txt | sed '/^$/d' | wc -l) + fi + + if [ -n "$OUTPUTFILE" ]; then + cat subdomains/subdomains.txt > "$OUTPUTFILE" + cat webs/webs.txt >> "$OUTPUTFILE" + fi + + printf "${bblue}\n Total subdomains: ${reset}\n\n" + notification "- ${NUMOFLINES_subs} alive" good + [ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | sort + notification "- ${NUMOFLINES_probed} new web probed" good + [ -s "webs/webs.txt" ] && cat webs/webs.txt | sort + notification "Subdomain Enumeration Finished" good + printf "${bblue} Results are saved in $domain/subdomains/subdomains.txt and webs/webs.txt${reset}\n" + printf "${bgreen}#######################################################################\n\n" +} + +# Call the main function +# Logic to handle multiple domains if INPUTFILE is provided +if [ -n "$INPUTFILE" ]; then + while read -r domain; do + subdomains_full_func + done < "$INPUTFILE" +else + subdomains_full_func +fi diff --git a/bin/rftw_sub_noerror b/bin/rftw_sub_noerror new file mode 100755 index 00000000..c98f3927 --- /dev/null +++ b/bin/rftw_sub_noerror @@ -0,0 +1,74 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +display_help() { + echo "Usage: $0 [Options]" + echo + echo " -d, --domain Domain to check" + echo " -i, --input-file Input file with domains" + echo " -o, --output-file File to save results" + echo " -h, --help Display this help and exit" + echo + echo "Example: $0 -d example.com" + exit 1 +} + +# Parse input arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -d|--domain) domain="$2"; shift ;; + -i|--input-file) INPUTFILE="$2"; shift ;; + -o|--output-file) OUTPUTFILE="$2"; shift ;; + -h|--help) display_help ;; + *) echo "Unknown parameter passed: $1"; display_help ;; + esac + shift +done + +# Input validation +if [ -z "$domain" ] && [ -z "$INPUTFILE" ]; then + echo "Error: Either domain or input file must be specified." + display_help +fi + +sub_noerror() { + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBNOERROR" = true ]; then + start_subfunc ${FUNCNAME[0]} "Running : Checking NOERROR DNS response" + if [[ $(echo "${RANDOM}thistotallynotexist${RANDOM}.$domain" | dnsx -r $resolvers -rcode noerror,nxdomain -retry 3 -silent | cut -d' ' -f2) == "[NXDOMAIN]" ]]; then + resolvers_update_quick_local + if [ "$DEEP" = true ]; then + dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist_big | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null + else + dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null + fi + [[ "$INSCOPE" = true ]] && check_inscope .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null + NUMOFLINES=$(cat .tmp/subs_noerror.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) + end_subfunc "${NUMOFLINES} new subs (DNS noerror)" ${FUNCNAME[0]} + else + printf "\n${yellow} Detected DNSSEC black lies, skipping this technique ${reset}\n" + fi + else + if [ "$SUBBRUTE" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + +# Logic to handle multiple domains if INPUTFILE is provided +if [ -n "$INPUTFILE" ]; then + while read -r domain; do + sub_noerror + done < "$INPUTFILE" +else + sub_noerror +fi \ No newline at end of file diff --git a/bin/rftw_sub_passive b/bin/rftw_sub_passive new file mode 100755 index 00000000..113d2082 --- /dev/null +++ b/bin/rftw_sub_passive @@ -0,0 +1,79 @@ +#!/bin/bash + +# Variables +AMASS_ENUM_TIMEOUT=10m +DIFF=false +SUBPASSIVE=false +RUNAMASS=false +RUNSUBFINDER=false +DEEP=false +INSCOPE=false + +# Help menu +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo "Options:" + echo " -d, --domain Set the target domain." + echo " -a, --amass Run amass." + echo " -s, --subfinder Run subfinder." + echo " -g, --github-tokens Specify GitHub tokens file." + echo " -l, --gitlab-tokens Specify GitLab tokens file." + echo " -D, --deep Run in deep mode." + echo " -i, --inscope Check inscope." + echo " -h, --help Display this help menu." +} + +function start_subfunc() { + echo "[+] Starting $1: $2" +} + +function end_subfunc() { + echo "[+] Ending $1: $2" +} + +function check_inscope() { + echo "Checking inscope for $1" + # TODO: Add your inscope logic here +} + +# Input validation and argument parsing +while [[ "$#" -gt 0 ]]; do + case $1 in + -d|--domain) domain="$2"; shift;; + -a|--amass) RUNAMASS=true;; + -s|--subfinder) RUNSUBFINDER=true;; + -g|--github-tokens) GITHUB_TOKENS="$2"; shift;; + -l|--gitlab-tokens) GITLAB_TOKENS="$2"; shift;; + -D|--deep) DEEP=true;; + -i|--inscope) INSCOPE=true;; + -h|--help) display_help; exit 0;; + *) echo "Unknown parameter passed: $1"; display_help; exit 1;; + esac + shift +done + +# Main functionality +if [[ ! -f "$called_fn_dir/.$0" ]] || [[ "$DIFF" = true ]] && [[ "$SUBPASSIVE" = true ]]; then + start_subfunc $0 "Running : Passive Subdomain Enumeration" + + [[ $RUNAMASS == true ]] && timeout -k 1m ${AMASS_ENUM_TIMEOUT} amass enum -passive -d $domain -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT -json .tmp/amass_json.json 2>>"$LOGFILE" &>/dev/null + [ -s ".tmp/amass_json.json" ] && cat .tmp/amass_json.json | jq -r '.name' | anew -q .tmp/amass_psub.txt + [[ $RUNSUBFINDER == true ]] && subfinder -all -d "$domain" -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null + if [ -s "${GITHUB_TOKENS}" ]; then + if [ "$DEEP" = true ]; then + github-subdomains -d $domain -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null + else + github-subdomains -d $domain -k -q -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null + fi + fi + if [ -s "${GITLAB_TOKENS}" ]; then + gitlab-subdomains -d $domain -t $GITLAB_TOKENS > .tmp/gitlab_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null + fi + +else + if [[ "$SUBPASSIVE" = false ]]; then + echo "${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg" + else + echo "${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]}" + fi +fi diff --git a/bin/rftw_sub_permut b/bin/rftw_sub_permut new file mode 100755 index 00000000..e2077047 --- /dev/null +++ b/bin/rftw_sub_permut @@ -0,0 +1,92 @@ +#!/bin/bash + +# Configuration file loading +CONFIG_FILE="reconftw.cfg" +if [[ -f "$CONFIG_FILE" ]]; then + source "$CONFIG_FILE" +else + echo "Configuration file '$CONFIG_FILE' not found. Exiting." + exit 1 +fi + +# Helper function to display usage/help menu +usage() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Options:" + echo " -h, --help Display this help and exit." + echo + echo "Make sure to set up 'reconftw.cfg' with required environment variables." + exit 1 +} + +# Input validation +if [[ "$#" -eq 1 && ("$1" == "-h" || "$1" == "--help") ]]; then + usage +fi + +# Check for mandatory variables +if [[ -z "$SUBPERMUTE" || -z "$called_fn_dir" || -z "$DIFF" ]]; then + echo "Mandatory variables not set in the configuration file. Exiting." + exit 1 +fi + +sub_permut() { + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPERMUTE" = true ]; then + start_subfunc ${FUNCNAME[0]} "Running : Permutations Subdomain Enumeration" + if [ "$DEEP" = true ] || [ "$(cat subdomains/subdomains.txt | wc -l)" -le $DEEP_LIMIT ] ; then + if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then + [ -s "subdomains/subdomains.txt" ] && gotator -sub subdomains/subdomains.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt + else + [ -s "subdomains/subdomains.txt" ] && ripgen -d subdomains/subdomains.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt + fi + elif [ "$(cat .tmp/subs_no_resolved.txt | wc -l)" -le $DEEP_LIMIT2 ]; then + if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then + [ -s ".tmp/subs_no_resolved.txt" ] && gotator -sub .tmp/subs_no_resolved.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt + else + [ -s ".tmp/subs_no_resolved.txt" ] && ripgen -d .tmp/subs_no_resolved.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt + fi + else + end_subfunc "Skipping Permutations: Too Many Subdomains" ${FUNCNAME[0]} + return 1 + fi + if [ ! "$AXIOM" = true ]; then + resolvers_update_quick_local + [ -s ".tmp/gotator1.txt" ] && puredns resolve .tmp/gotator1.txt -w .tmp/permute1.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + else + resolvers_update_quick_axiom + [ -s ".tmp/gotator1.txt" ] && axiom-scan .tmp/gotator1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + + if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then + [ -s ".tmp/permute1.txt" ] && gotator -sub .tmp/permute1.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2.txt + else + [ -s ".tmp/permute1.txt" ] && ripgen -d .tmp/permute1.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2.txt + fi + + if [ ! "$AXIOM" = true ]; then + [ -s ".tmp/gotator2.txt" ] && puredns resolve .tmp/gotator2.txt -w .tmp/permute2.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + else + [ -s ".tmp/gotator2.txt" ] && axiom-scan .tmp/gotator2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt + + if [ -s ".tmp/permute_subs.txt" ]; then + [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/permute_subs.txt + [[ "$INSCOPE" = true ]] && check_inscope .tmp/permute_subs.txt 2>>"$LOGFILE" >/dev/null + NUMOFLINES=$(cat .tmp/permute_subs.txt 2>>"$LOGFILE" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) + else + NUMOFLINES=0 + fi + end_subfunc "${NUMOFLINES} new subs (permutations)" ${FUNCNAME[0]} + else + if [ "$SUBPERMUTE" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + +# Execute the main function +sub_permut \ No newline at end of file diff --git a/bin/rftw_sub_recbrute b/bin/rftw_sub_recbrute new file mode 100755 index 00000000..ff79892b --- /dev/null +++ b/bin/rftw_sub_recbrute @@ -0,0 +1,130 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +function print_help() { + echo "Usage: $0 [-d ] [-h]" + echo "" + echo "Options:" + echo " -d Specify target domain" + echo " -h Display this help message" + echo "" +} + +function start_subfunc() { + # This function is just a placeholder as the original function details were not provided. + echo "Starting function $1 with message: $2" +} + +function end_subfunc() { + # This function is just a placeholder as the original function details were not provided. + echo "Ending function $2 with result: $1" +} + +function check_inscope() { + # Placeholder for check_inscope function + echo "Checking inscope for file $1" +} + +function resolvers_update_quick_local() { + # Placeholder for resolvers_update_quick_local function + echo "Updating local resolvers" +} + +function resolvers_update_quick_axiom() { + # Placeholder for resolvers_update_quick_axiom function + echo "Updating axiom resolvers" +} + +domain="" +while getopts "d:h" option; do + case "${option}" in + d) domain=${OPTARG};; + h) print_help; exit 0;; + *) print_help; exit 1;; + esac +done + +if [[ -z $domain ]]; then + echo "Error: Domain is required!" + print_help + exit 1 +fi + +# Main function +sub_recursive_brute() { + if { [ ! -f "$called_fn_dir/.$FUNCNAME" ] || [ "$DIFF" = true ]; } && [ "$SUB_RECURSIVE_BRUTE" = true ] && [ -s "subdomains/subdomains.txt" ]; then + start_subfunc ${FUNCNAME[0]} "Running : Subdomains recursive search active" + if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] ; then + [ ! -s ".tmp/subdomains_recurs_top.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE > .tmp/subdomains_recurs_top.txt + ripgen -d .tmp/subdomains_recurs_top.txt -w $subs_wordlist > .tmp/brute_recursive_wordlist.txt + if [ ! "$AXIOM" = true ]; then + resolvers_update_quick_local + [ -s ".tmp/brute_recursive_wordlist.txt" ] && puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -w .tmp/brute_recursive_result.txt 2>>"$LOGFILE" >/dev/null + else + resolvers_update_quick_axiom + [ -s ".tmp/brute_recursive_wordlist.txt" ] && axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_recursive_result.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + [ -s ".tmp/brute_recursive_result.txt" ] && cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt + + if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then + [ -s ".tmp/brute_recursive.txt" ] && gotator -sub .tmp/brute_recursive.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1_recursive.txt + else + [ -s ".tmp/brute_recursive.txt" ] && ripgen -d .tmp/brute_recursive.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1_recursive.txt + fi + + if [ ! "$AXIOM" = true ]; then + [ -s ".tmp/gotator1_recursive.txt" ] && puredns resolve .tmp/gotator1_recursive.txt -w .tmp/permute1_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + else + [ -s ".tmp/gotator1_recursive.txt" ] && axiom-scan .tmp/gotator1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + + if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then + [ -s ".tmp/permute1_recursive.txt" ] && gotator -sub .tmp/permute1_recursive.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2_recursive.txt + else + [ -s ".tmp/permute1_recursive.txt" ] && ripgen -d .tmp/permute1_recursive.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2_recursive.txt + fi + + if [ ! "$AXIOM" = true ]; then + [ -s ".tmp/gotator2_recursive.txt" ] && puredns resolve .tmp/gotator2_recursive.txt -w .tmp/permute2_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + else + [ -s ".tmp/gotator2_recursive.txt" ] && axiom-scan .tmp/gotator2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + cat .tmp/permute1_recursive.txt .tmp/permute2_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/permute_recursive.txt + else + end_subfunc "skipped in this mode or defined in reconftw.cfg" ${FUNCNAME[0]} + fi + if [ "$INSCOPE" = true ]; then + check_inscope .tmp/permute_recursive.txt 2>>"$LOGFILE" >/dev/null + check_inscope .tmp/brute_recursive.txt 2>>"$LOGFILE" >/dev/null + fi + + # Last validation + cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/brute_perm_recursive.txt + if [ ! "$AXIOM" = true ]; then + [ -s ".tmp/brute_recursive.txt" ] && puredns resolve .tmp/brute_perm_recursive.txt -w .tmp/brute_perm_recursive_final.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + else + [ -s ".tmp/brute_recursive.txt" ] && axiom-scan .tmp/brute_perm_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_perm_recursive_final.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + + NUMOFLINES=$(cat .tmp/brute_perm_recursive_final.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l) + end_subfunc "${NUMOFLINES} new subs (recursive active)" ${FUNCNAME[0]} + + else + if [ "$SUB_RECURSIVE_BRUTE" = false ]; then + printf "\n${yellow} ${FUNCNAME} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME} is already processed, to force executing ${FUNCNAME} delete\n $called_fn_dir/.${FUNCNAME} ${reset}\n\n" + fi + fi +} + +# Call main function +sub_recursive_brute \ No newline at end of file diff --git a/bin/rftw_sub_recpassive b/bin/rftw_sub_recpassive new file mode 100755 index 00000000..c2f10282 --- /dev/null +++ b/bin/rftw_sub_recpassive @@ -0,0 +1,68 @@ +#!/bin/bash + +# Load the configuration +CONFIG_FILE="reconftw.cfg" +if [[ -f "$CONFIG_FILE" ]]; then + source "$CONFIG_FILE" +else + echo "Configuration file '$CONFIG_FILE' not found. Exiting." + exit 1 +fi + +# Helper function to display usage/help menu +usage() { + echo "Usage: $0 -d domain" + echo + echo "Options:" + echo " -d, --domain domain Specify the domain to perform recursive passive search." + echo " -h, --help Display this help and exit." + echo + echo "Make sure to set up 'reconftw.cfg' with required environment variables." + exit 1 +} + +# Input validation and parsing +domain="" +while [[ "$#" -gt 0 ]]; do + case $1 in + -d|--domain) domain="$2"; shift ;; + -h|--help) usage ;; + *) echo "Unknown parameter: $1"; usage ;; + esac + shift +done + +if [[ -z "$domain" ]]; then + echo "Domain not specified!" + usage +fi + +sub_recursive_passive() { + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUB_RECURSIVE_PASSIVE" = true ] && [ -s "subdomains/subdomains.txt" ]; then + start_subfunc ${FUNCNAME[0]} "Running : Subdomains recursive search passive" + # Passive recursive + [ -s "subdomains/subdomains.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE > .tmp/subdomains_recurs_top.txt + if [ ! "$AXIOM" = true ]; then + resolvers_update_quick_local + [ -s ".tmp/subdomains_recurs_top.txt" ] && timeout -k 1m ${AMASS_ENUM_TIMEOUT}m amass enum -passive -df .tmp/subdomains_recurs_top.txt -nf subdomains/subdomains.txt -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt + [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null + else + resolvers_update_quick_axiom + [ -s ".tmp/subdomains_recurs_top.txt" ] && axiom-scan .tmp/subdomains_recurs_top.txt -m amass -passive -o .tmp/amass_prec.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/amass_prec.txt" ] && cat .tmp/amass_prec.txt | anew -q .tmp/passive_recursive.txt + [ -s ".tmp/passive_recursive.txt" ] && axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/passive_recurs_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + [[ "$INSCOPE" = true ]] && check_inscope .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" >/dev/null + NUMOFLINES=$(cat .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l) + end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]} + else + if [ "$SUB_RECURSIVE_PASSIVE" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + +# Execute the main function +sub_recursive_passive \ No newline at end of file diff --git a/bin/rftw_sub_regex b/bin/rftw_sub_regex new file mode 100755 index 00000000..1258f47a --- /dev/null +++ b/bin/rftw_sub_regex @@ -0,0 +1,67 @@ +#!/bin/bash + +# Configuration file loading +CONFIG_FILE="reconftw.cfg" +if [[ -f "$CONFIG_FILE" ]]; then + source "$CONFIG_FILE" +else + echo "Configuration file '$CONFIG_FILE' not found. Exiting." + exit 1 +fi + +# Helper function to display usage/help menu +usage() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Options:" + echo " -h, --help Display this help and exit." + echo + echo "Make sure to set up 'reconftw.cfg' with required environment variables." + exit 1 +} + +# Input validation +if [[ "$#" -eq 1 && ("$1" == "-h" || "$1" == "--help") ]]; then + usage +fi + +# Check for mandatory variables +if [[ -z "$SUBREGEXPERMUTE" || -z "$called_fn_dir" || -z "$DIFF" ]]; then + echo "Mandatory variables not set in the configuration file. Exiting." + exit 1 +fi + +sub_regex_permut() { + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBREGEXPERMUTE" = true ]; then + start_subfunc ${FUNCNAME[0]} "Running : Permutations by regex analysis" + cd "$tools/regulator" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + python3 main.py -t $domain -f ${dir}/subdomains/subdomains.txt -o ${dir}/.tmp/${domain}.brute + cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + + if [ ! "$AXIOM" = true ]; then + resolvers_update_quick_local + [ -s ".tmp/${domain}.brute" ] && puredns resolve .tmp/${domain}.brute -w .tmp/regulator.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + else + resolvers_update_quick_axiom + [ -s ".tmp/${domain}.brute" ] && axiom-scan .tmp/${domain}.brute -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/regulator.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + + if [ -s ".tmp/regulator.txt" ]; then + [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/regulator.txt + [[ "$INSCOPE" = true ]] && check_inscope .tmp/regulator.txt 2>>"$LOGFILE" >/dev/null + NUMOFLINES=$(cat .tmp/regulator.txt 2>>"$LOGFILE" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) + else + NUMOFLINES=0 + fi + end_subfunc "${NUMOFLINES} new subs (permutations by regex)" ${FUNCNAME[0]} + else + if [ "$SUBREGEXPERMUTE" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + +# Execute the main function +sub_regex_permut \ No newline at end of file diff --git a/bin/rftw_sub_s3buckets b/bin/rftw_sub_s3buckets new file mode 100755 index 00000000..db6342ff --- /dev/null +++ b/bin/rftw_sub_s3buckets @@ -0,0 +1,79 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +help_menu() { + echo "Usage: $0 [DOMAIN] [OPTIONS]" + echo "AWS S3 buckets and cloud assets checker." + echo + echo "Options:" + echo " -h, --help Display this help menu and exit" + echo " -f, --force Force the execution even if it was already processed" +} + +validate_inputs() { + if [[ -z "$domain" ]]; then + echo -e "${yellow} No domain provided! ${reset}" + exit 1 + fi + + if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + echo -e "${yellow} Invalid domain format: IP address provided instead of a domain name ${reset}" + exit 1 + fi + + if [ "$S3BUCKETS" != true ] && [ "$FORCE_EXECUTION" != true ]; then + echo -e "${yellow} s3buckets skipped in this mode or defined in reconftw.cfg ${reset}" + exit 0 + fi +} + +run_s3buckets() { + start_func "s3buckets" "AWS S3 buckets search" + # S3Scanner + if [ ! "$AXIOM" = true ]; then + [ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt + else + axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/s3buckets_tmp.txt" ] && cat .tmp/s3buckets_tmp.txt .tmp/s3buckets_tmp2.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt && sed -i '/^$/d' .tmp/s3buckets.txt + fi + # Cloudenum + keyword=${domain%%.*} + python3 ~/Tools/cloud_enum/cloud_enum.py -k $keyword -qs -l .tmp/output_cloud.txt 2>>"$LOGFILE" >/dev/null + + NUMOFLINES1=$(cat .tmp/output_cloud.txt 2>>"$LOGFILE" | sed '/^#/d' | sed '/^$/d' | anew subdomains/cloud_assets.txt | wc -l) + if [ "$NUMOFLINES1" -gt 0 ]; then + notification "${NUMOFLINES1} new cloud assets found" info + fi + + NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -aiv "not_exist" | grep -aiv "Warning:" | grep -aiv "invalid_name" | grep -aiv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l) + if [ "$NUMOFLINES2" -gt 0 ]; then + notification "${NUMOFLINES2} new S3 buckets found" info + fi + + end_func "Results are saved in subdomains/s3buckets.txt and subdomains/cloud_assets.txt" "s3buckets" +} + +# Main +FORCE_EXECUTION=false +domain="$1" + +shift +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE_EXECUTION=true; shift ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +validate_inputs +run_s3buckets diff --git a/bin/rftw_sub_scraping b/bin/rftw_sub_scraping new file mode 100755 index 00000000..b378b95f --- /dev/null +++ b/bin/rftw_sub_scraping @@ -0,0 +1,92 @@ +#!/bin/bash + +# Load the environment variables from the configuration file +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Function to display the help menu +help_menu() { + echo "Usage: sub_scraping.sh [OPTIONS]" + echo "" + echo "Options:" + echo " -h, --help Display this help menu" + echo " --deep Run in deep mode" + echo " --no-axiom Disable Axiom" + echo " --diff Enable DIFF mode" +} + +# Input validation and options parsing +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + --deep) DEEP=true; shift ;; + --no-axiom) AXIOM=false; shift ;; + --diff) DIFF=true; shift ;; + *) echo "Unknown parameter: $1"; help_menu; exit 1 ;; + esac +done + +# The main sub_scraping function +sub_scraping() { + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBSCRAPING" = true ]; then + start_subfunc ${FUNCNAME[0]} "Running : Source code scraping subdomain search" + touch .tmp/scrap_subs.txt + if [ -s "$dir/subdomains/subdomains.txt" ]; then + if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] || [ "$DEEP" = true ] ; then + if [ ! "$AXIOM" = true ]; then + resolvers_update_quick_local + cat subdomains/subdomains.txt | httpx -follow-host-redirects -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt + [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt + + if [ "$DEEP" = true ]; then + [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null + else + [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null + fi + else + resolvers_update_quick_axiom + axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt + [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt + if [ "$DEEP" = true ]; then + [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 3 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + else + [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 2 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + fi + sed -i '/^.\{2048\}./d' .tmp/katana.txt + [ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | unfurl -u domains 2>>"$LOGFILE" | grep ".$domain$" | anew -q .tmp/scrap_subs.txt + [ -s ".tmp/scrap_subs.txt" ] && puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + if [ "$INSCOPE" = true ]; then + check_inscope .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" >/dev/null + fi + NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | sed '/^$/d' | wc -l) + [ -s ".tmp/diff_scrap.txt" ] && cat .tmp/diff_scrap.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info3.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/web_full_info3.txt" ] && cat .tmp/web_full_info3.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt + cat .tmp/web_full_info1.txt .tmp/web_full_info2.txt .tmp/web_full_info3.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" > .tmp/web_full_info.txt + end_subfunc "${NUMOFLINES} new subs (code scraping)" ${FUNCNAME[0]} + else + end_subfunc "Skipping Subdomains Web Scraping: Too Many Subdomains" ${FUNCNAME[0]} + fi + else + end_subfunc "No subdomains to search (code scraping)" ${FUNCNAME[0]} + fi + else + if [ "$SUBSCRAPING" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + +# Execute the function +sub_scraping \ No newline at end of file diff --git a/bin/rftw_sub_takeover b/bin/rftw_sub_takeover new file mode 100755 index 00000000..1951ac83 --- /dev/null +++ b/bin/rftw_sub_takeover @@ -0,0 +1,66 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +help_menu() { + echo "Usage: $0 [OPTIONS]" + echo "Subdomain and DNS takeover checker." + echo + echo "Options:" + echo " -h, --help Display this help menu and exit" + echo " -f, --force Force the execution even if it was already processed" +} + +validate_inputs() { + if [ "$SUBTAKEOVER" != true ] && [ "$FORCE_EXECUTION" != true ]; then + echo -e "${yellow} subtakeover skipped in this mode or defined in reconftw.cfg ${reset}" + exit 0 + fi +} + +run_subtakeover() { + start_func ${FUNCNAME[0]} "Looking for possible subdomain and DNS takeover" + touch .tmp/tko.txt + [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + if [ ! "$AXIOM" = true ]; then + nuclei -update 2>>"$LOGFILE" >/dev/null + cat subdomains/subdomains.txt .tmp/webs_all.txt 2>/dev/null | nuclei -silent -nh -tags takeover -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -t ${NUCLEI_TEMPLATES_PATH} -o .tmp/tko.txt + else + cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | sed '/^$/d' | anew -q .tmp/webs_subs.txt + [ -s ".tmp/webs_subs.txt" ] && axiom-scan .tmp/webs_subs.txt -m nuclei --nuclei-templates ${NUCLEI_TEMPLATES_PATH} -tags takeover -nh -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -t ${NUCLEI_TEMPLATES_PATH} -o .tmp/tko.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + + # DNS_TAKEOVER + cat .tmp/subs_no_resolved.txt .tmp/subdomains_dns.txt .tmp/scrap_subs.txt .tmp/analytics_subs_clean.txt .tmp/passive_recursive.txt 2>/dev/null | anew -q .tmp/subs_dns_tko.txt + cat .tmp/subs_dns_tko.txt 2>/dev/null | dnstake -c $DNSTAKE_THREADS -s 2>>"$LOGFILE" | sed '/^$/d' | anew -q .tmp/tko.txt + + sed -i '/^$/d' .tmp/tko.txt + + NUMOFLINES=$(cat .tmp/tko.txt 2>>"$LOGFILE" | anew webs/takeover.txt | sed '/^$/d' | wc -l) + if [ "$NUMOFLINES" -gt 0 ]; then + notification "${NUMOFLINES} new possible takeovers found" info + fi + end_func "Results are saved in $domain/webs/takeover.txt" ${FUNCNAME[0]} +} + +# Main +FORCE_EXECUTION=false + +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE_EXECUTION=true; shift ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +validate_inputs +run_subtakeover \ No newline at end of file diff --git a/bin/rftw_sub_vhosts b/bin/rftw_sub_vhosts new file mode 100755 index 00000000..d6994fb6 --- /dev/null +++ b/bin/rftw_sub_vhosts @@ -0,0 +1,69 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +help_menu() { + echo "Usage: $0 [DOMAIN] [OPTIONS]" + echo "Virtual Hosts discovery tool for the specified domain." + echo + echo "Options:" + echo " -h, --help Display this help menu and exit" + echo " -f, --force Force the execution even if it was already processed" +} + +validate_inputs() { + if [[ -z "$domain" ]]; then + echo -e "${yellow} No domain provided! ${reset}" + exit 1 + fi + + if [ "$VIRTUALHOSTS" != true ] && [ "$FORCE_EXECUTION" != true ]; then + echo -e "${yellow} virtualhosts skipped in this mode or defined in reconftw.cfg ${reset}" + exit 0 + fi +} + +run_virtualhosts() { + start_func "virtualhosts" "Virtual Hosts discovery" + + [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + + if [ -s ".tmp/webs_all.txt" ]; then + mkdir -p $dir/virtualhosts $dir/.tmp/virtualhosts + interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf -ac -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -H \"Host: FUZZ._cleantarget_\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_ -of json -o _output_/_cleantarget_.json" -o $dir/.tmp/virtualhosts 2>>"$LOGFILE" >/dev/null + + for sub in $(cat .tmp/webs_all.txt); do + sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') + [ -s "$dir/.tmp/virtualhosts/${sub_out}.json" ] && cat $dir/.tmp/virtualhosts/${sub_out}.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort | anew -q $dir/virtualhosts/${sub_out}.txt + done + + find $dir/virtualhosts/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | anew -q $dir/virtualhosts/virtualhosts_full.txt + end_func "Results are saved in $domain/virtualhosts/*subdomain*.txt" "virtualhosts" + else + end_func "No $domain/web/webs.txts file found, virtualhosts skipped " "virtualhosts" + fi +} + +# Main +FORCE_EXECUTION=false +domain="$1" + +shift +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE_EXECUTION=true; shift ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +validate_inputs +run_virtualhosts diff --git a/bin/rftw_sub_zonetransfer b/bin/rftw_sub_zonetransfer new file mode 100755 index 00000000..e6b5a5a6 --- /dev/null +++ b/bin/rftw_sub_zonetransfer @@ -0,0 +1,64 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +help_menu() { + echo "Usage: $0 [DOMAIN] [OPTIONS]" + echo "Zone transfer checker." + echo + echo "Options:" + echo " -h, --help Display this help menu and exit" + echo " -f, --force Force the execution even if it was already processed" +} + +validate_inputs() { + if [[ -z "$domain" ]]; then + echo -e "${yellow} No domain provided! ${reset}" + exit 1 + fi + + if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + echo -e "${yellow} Invalid domain format: IP address provided instead of a domain name ${reset}" + exit 1 + fi + + if [ "$ZONETRANSFER" != true ] && [ "$FORCE_EXECUTION" != true ]; then + echo -e "${yellow} zonetransfer skipped in this mode or defined in reconftw.cfg ${reset}" + exit 0 + fi +} + +run_zonetransfer() { + start_func "zonetransfer" "Zone transfer check" + for ns in $(dig +short ns "$domain"); do + dig axfr "$domain" @"$ns" >> subdomains/zonetransfer.txt + done + if [ -s "subdomains/zonetransfer.txt" ] && ! grep -q "Transfer failed" subdomains/zonetransfer.txt ; then + notification "Zone transfer found on ${domain}!" info + fi + end_func "Results are saved in $domain/subdomains/zonetransfer.txt" "zonetransfer" +} + +# Main +FORCE_EXECUTION=false +domain="$1" + +shift +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE_EXECUTION=true; shift ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +validate_inputs +run_zonetransfer diff --git a/bin/rftw_uti_transfer b/bin/rftw_uti_transfer new file mode 100755 index 00000000..4170bbe6 --- /dev/null +++ b/bin/rftw_uti_transfer @@ -0,0 +1,62 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Display help information +function display_help() { + echo "Usage: $0 " + echo " ... | $0 " + echo + echo "Uploads a specified file or directory to https://transfer.sh/" + echo + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Uploads a file or directory to transfer.sh +function transfer() { + if [ $# -eq 0 ]; then + echo "Error: No arguments specified." + display_help + exit 1 + fi + + if tty -s; then + local file="$1" + local file_name=$(basename "$file") + + if [ ! -e "$file" ]; then + echo "Error: $file: No such file or directory" >&2 + exit 1 + fi + + if [ -d "$file" ]; then + # If the given input is a directory, zip and transfer it. + file_name="$file_name.zip" + (cd "$file" && zip -r -q - .) | curl --progress-bar --upload-file "-" "https://transfer.sh/$file_name" | tee /dev/null + else + # If the given input is a file, transfer it. + cat "$file" | curl --progress-bar --upload-file "-" "https://transfer.sh/$file_name" | tee /dev/null + fi + else + # Transfer data from standard input. + local file_name=$1 + curl --progress-bar --upload-file "-" "https://transfer.sh/$file_name" | tee /dev/null + fi +} + +# Main script execution +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +fi + +transfer "$@" diff --git a/bin/rftw_util_ascii b/bin/rftw_util_ascii new file mode 100755 index 00000000..4ffd1156 --- /dev/null +++ b/bin/rftw_util_ascii @@ -0,0 +1,57 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors for console output +green="\033[1;32m" +red="\033[1;31m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [FILE]" + echo + echo "Checks if the provided file contains ASCII text." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate arguments function +function validate_args() { + if [[ -z "$1" ]]; then + echo -e "${red}Error: FILE is required.${reset}" + display_help + exit 1 + fi + if [[ ! -f "$1" ]]; then + echo -e "${red}Error: $1 is not a file.${reset}" + exit 2 + fi +} + +# Check ASCII text function +function is_ascii_text() { + local filepath="$1" + if [[ $(file "$filepath" | grep -o 'ASCII text$') == "ASCII text" ]]; then + echo -e "${green}$filepath contains ASCII text.${reset}" + else + echo -e "${red}$filepath does not contain ASCII text.${reset}" + fi +} + +# Main script execution +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +fi + +validate_args "$1" +is_ascii_text "$1" diff --git a/bin/rftw_util_axiomoff b/bin/rftw_util_axiomoff new file mode 100755 index 00000000..e93754ab --- /dev/null +++ b/bin/rftw_util_axiomoff @@ -0,0 +1,60 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu function +function display_help() { + echo "Usage: $0 [MODE]" + echo + echo "Shut down the Axiom fleet based on configurations in reconftw.cfg." + echo + echo "MODE: Specifies the mode for the shutdown. (e.g., 'subs_menu', 'passive', 'all')" + echo + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Main Axiom shutdown function +function axiom_shutdown() { + local mode="$1" + if [ "$AXIOM_FLEET_LAUNCH" = true ] && [ "$AXIOM_FLEET_SHUTDOWN" = true ] && [ -n "$AXIOM_FLEET_NAME" ]; then + # Check mode conditions + if [[ "$mode" == "subs_menu" || "$mode" == "passive" || "$mode" == "all" ]]; then + # You might want to add a logging function here for "notification" + # notification "Automatic Axiom fleet shutdown is not enabled in this mode" info + echo "Automatic Axiom fleet shutdown is not enabled in mode: $mode" + return + fi + + # Remove the Axiom fleet + eval axiom-rm -f "$AXIOM_FLEET_NAME*" + echo "Axiom fleet $AXIOM_FLEET_NAME shutdown" | $NOTIFY + # Another potential place for a logging function + # notification "Axiom fleet $AXIOM_FLEET_NAME shutdown" info + echo "Axiom fleet $AXIOM_FLEET_NAME shutdown" + else + echo "Axiom fleet conditions not met for shutdown." + fi +} + +# Check arguments +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +else + # Ensure mode is specified + if [ -z "$1" ]; then + echo "Error: Mode not specified." + display_help + exit 1 + fi + axiom_shutdown "$1" +fi diff --git a/bin/rftw_util_axiomon b/bin/rftw_util_axiomon new file mode 100755 index 00000000..c86754f3 --- /dev/null +++ b/bin/rftw_util_axiomon @@ -0,0 +1,64 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +function display_help() { + echo "Usage: $0" + echo + echo "Launch an Axiom fleet based on configurations in reconftw.cfg." + echo + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Main Axiom launch function +function axiom_launch() { + if [ "$AXIOM_FLEET_LAUNCH" = true ] && [ -n "$AXIOM_FLEET_NAME" ] && [ -n "$AXIOM_FLEET_COUNT" ]; then + # Additional logging function can be used here, example: + # start_func ${FUNCNAME[0]} "Launching our Axiom fleet" + + # Ensure the linode-cli tool is up-to-date + python3 -m pip install --upgrade linode-cli 2>>"$LOGFILE" >/dev/null + + # Check the current number of nodes + NUMOFNODES=$(timeout 30 axiom-ls | grep -c "$AXIOM_FLEET_NAME") + + if [[ $NUMOFNODES -ge $AXIOM_FLEET_COUNT ]]; then + axiom-select "$AXIOM_FLEET_NAME*" + # Logging/notification example: + # end_func "Axiom fleet $AXIOM_FLEET_NAME already has $NUMOFNODES instances" + else + [ $NUMOFNODES -eq 0 ] && startcount=$AXIOM_FLEET_COUNT || startcount=$((AXIOM_FLEET_COUNT-NUMOFNODES)) + + AXIOM_ARGS=" -i $startcount" + # Execute the axiom fleet command + axiom-fleet ${AXIOM_FLEET_NAME} ${AXIOM_ARGS} + + axiom-select "$AXIOM_FLEET_NAME*" + + [ -n "$AXIOM_POST_START" ] && eval "$AXIOM_POST_START" 2>>"$LOGFILE" >/dev/null + + NUMOFNODES=$(timeout 30 axiom-ls | grep -c "$AXIOM_FLEET_NAME") + # Notification/logging example: + # echo "Axiom fleet $AXIOM_FLEET_NAME launched w/ $NUMOFNODES instances" | $NOTIFY + # end_func "Axiom fleet $AXIOM_FLEET_NAME launched w/ $NUMOFNODES instances" + fi + fi +} + +# Main execution starts here +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +else + axiom_launch +fi diff --git a/bin/rftw_util_axiomsel b/bin/rftw_util_axiomsel new file mode 100755 index 00000000..7b7f9f13 --- /dev/null +++ b/bin/rftw_util_axiomsel @@ -0,0 +1,50 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu function +function display_help() { + echo "Usage: $0" + echo + echo "Check for running Axiom instances and whether any instances are selected." + echo + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Main Axiom check function +function axiom_selected() { + # Check if there are any running axiom instances + if [[ ! $(axiom-ls | tail -n +2 | sed '$ d' | wc -l) -gt 0 ]]; then + # You might want to add a logging function here for "notification" + # notification "\n\n${bred} No axiom instances running ${reset}\n\n" error + echo -e "\n\n${bred} No axiom instances running ${reset}\n\n" + exit 1 + fi + + # Check if there are any selected axiom instances + if [[ ! $(cat ~/.axiom/selected.conf | sed '/^\s*$/d' | wc -l) -gt 0 ]]; then + # Again, consider adding a logging function here for "notification" + # notification "\n\n${bred} No axiom instances selected ${reset}\n\n" error + echo -e "\n\n${bred} No axiom instances selected ${reset}\n\n" + exit 1 + fi + + echo "Axiom instances are running and selected." +} + +# Check arguments +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +else + axiom_selected +fi diff --git a/bin/rftw_util_deleteoos b/bin/rftw_util_deleteoos new file mode 100755 index 00000000..31a2d046 --- /dev/null +++ b/bin/rftw_util_deleteoos @@ -0,0 +1,72 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +red="\033[1;31m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS] [SCOPE_FILE] [TARGET_FILE]" + echo + echo "Remove out-of-scope items from a target file based on a scope file." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate arguments function +function validate_args() { + if [[ -z "$1" || -z "$2" ]]; then + echo -e "${red}Error: Both SCOPE_FILE and TARGET_FILE are required.${reset}" + display_help + exit 1 + fi + + if [[ ! -f "$1" ]]; then + echo -e "${red}Error: SCOPE_FILE '$1' does not exist.${reset}" + exit 1 + fi + + if [[ ! -f "$2" ]]; then + echo -e "${red}Error: TARGET_FILE '$2' does not exist.${reset}" + exit 1 + fi +} + +# Delete out-of-scope items function +function delete_out_scoped() { + local scope_file="$1" + local target_file="$2" + + if [ -s "$scope_file" ]; then + while IFS= read -r outscoped + do + if grep -q "^[*]" <<< "$outscoped"; then + outscoped="${outscoped:1}" + sed -i "/$outscoped$/d" "$target_file" + else + sed -i "/$outscoped/d" "$target_file" + fi + done < "$scope_file" + fi +} + +# Main script execution +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +fi + +validate_args "$1" "$2" +delete_out_scoped "$1" "$2" +echo -e "${yellow}Processed $2 based on out-of-scope items from $1.${reset}" diff --git a/bin/rftw_util_gettime b/bin/rftw_util_gettime new file mode 100755 index 00000000..67cc782e --- /dev/null +++ b/bin/rftw_util_gettime @@ -0,0 +1,65 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +red="\033[1;31m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [START_TIME] [END_TIME]" + echo + echo "Calculate the elapsed time between two time values in seconds." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate arguments function +function validate_args() { + if [[ -z "$1" || -z "$2" ]]; then + echo -e "${red}Error: Both START_TIME and END_TIME are required.${reset}" + display_help + exit 1 + fi + + if ! [[ "$1" =~ ^[0-9]+$ && "$2" =~ ^[0-9]+$ ]]; then + echo -e "${red}Error: Both START_TIME and END_TIME should be numeric.${reset}" + exit 1 + fi +} + +# Get elapsed time function +function get_elapsed_time() { + local start_time="$1" + local end_time="$2" + runtime="" + local T=$((end_time - start_time)) + local D=$((T/60/60/24)) + local H=$((T/60/60%24)) + local M=$((T/60%60)) + local S=$((T%60)) + (( D > 0 )) && runtime="$runtime$D days, " + (( H > 0 )) && runtime="$runtime$H hours, " + (( M > 0 )) && runtime="$runtime$M minutes, " + runtime="$runtime$S seconds." + echo -e "${yellow}$runtime${reset}" +} + +# Main script execution +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +fi + +validate_args "$1" "$2" +get_elapsed_time "$1" "$2" diff --git a/bin/rftw_util_ipcidr b/bin/rftw_util_ipcidr new file mode 100755 index 00000000..3f6f4770 --- /dev/null +++ b/bin/rftw_util_ipcidr @@ -0,0 +1,61 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +function display_help() { + echo "Usage: $0 [IP CIDR] [OPTIONAL FILE]" + echo + echo "Processes an IP CIDR, maps it, and optionally saves the output to a file." + echo + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +function process_ip_cidr() { + local IP_CIDR=$1 + local OUTFILE=$2 + local IP_CIDR_REGEX='(((25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?))(\/([8-9]|[1-2][0-9]|3[0-2]))([^0-9.]|$)|(((25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)$)' + + if [[ $IP_CIDR =~ ^$IP_CIDR_REGEX ]]; then + echo $IP_CIDR | mapcidr -silent | anew -q target_reconftw_ipcidr.txt + + if [ -s "./target_reconftw_ipcidr.txt" ]; then + [ "$REVERSE_IP" = true ] && cat ./target_reconftw_ipcidr.txt | hakip2host | cut -d' ' -f 3 | unfurl -u domains 2>/dev/null | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | anew -q ./target_reconftw_ipcidr.txt + + if [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -eq 1 ]]; then + domain=$(cat ./target_reconftw_ipcidr.txt) + elif [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -gt 1 ]]; then + unset domain + list=${PWD}/target_reconftw_ipcidr.txt + fi + fi + + if [ -n "$OUTFILE" ]; then + cat $list | anew -q $OUTFILE + sed -i '/\/[0-9]*$/d' $OUTFILE + fi + else + echo "Invalid IP CIDR format provided. Please check the input." + exit 1 + fi +} + +# Main execution starts here +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +elif [[ -z "$1" ]]; then + echo "Error: No IP CIDR provided. Use -h or --help for more information." + exit 1 +else + process_ip_cidr "$1" "$2" +fi diff --git a/bin/rftw_util_notification b/bin/rftw_util_notification new file mode 100755 index 00000000..cc91501d --- /dev/null +++ b/bin/rftw_util_notification @@ -0,0 +1,74 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors for console output +bblue="\033[1;34m" +yellow="\033[1;33m" +reset="\033[0m" +bred="\033[1;31m" +bgreen="\033[1;32m" + +# Help menu function +function display_help() { + echo "Usage: $0 " + echo + echo "Sends notification based on the given message and type." + echo "Types:" + echo " info - Informational message (Blue)" + echo " warn - Warning message (Yellow)" + echo " error - Error message (Red)" + echo " good - Positive message (Green)" + echo + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Send notification function +function notification() { + local message=$1 + local type=$2 + local text="" + + if [[ -z "$message" || -z "$type" ]]; then + echo -e "${bred}Error: Both message and type are required.${reset}" + exit 2 + fi + + case $type in + info) + text="\n${bblue} ${message} ${reset}" + ;; + warn) + text="\n${yellow} ${message} ${reset}" + ;; + error) + text="\n${bred} ${message} ${reset}" + ;; + good) + text="\n${bgreen} ${message} ${reset}" + ;; + *) + echo -e "${bred}Error: Invalid type. Valid types are: info, warn, error, good.${reset}" + exit 2 + ;; + esac + + printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY +} + +# Main script execution +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +fi + +notification "$1" "$2" diff --git a/bin/rftw_util_output b/bin/rftw_util_output new file mode 100755 index 00000000..d783ff03 --- /dev/null +++ b/bin/rftw_util_output @@ -0,0 +1,56 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors for console output +green="\033[1;32m" +red="\033[1;31m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0" + echo + echo "Copies content of directory specified in 'dir' variable to 'dir_output' variable." + echo "If 'dir' is different from 'dir_output', then 'dir' will be deleted." + echo "Both 'dir' and 'dir_output' are sourced from the reconftw.cfg configuration file." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate directory existence function +function validate_dirs() { + if [[ ! -d "$dir" ]]; then + echo -e "${red}Error: '$dir' directory does not exist.${reset}" + exit 2 + fi +} + +# Output function +function output() { + mkdir -p "$dir_output" + cp -r "$dir" "$dir_output" + if [[ "$(dirname "$dir")" != "$dir_output" ]]; then + rm -rf "$dir" + echo -e "${green}Content copied and original directory deleted.${reset}" + else + echo -e "${green}Content copied.${reset}" + fi +} + +# Main script execution +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +fi + +validate_dirs +output diff --git a/bin/rftw_util_removebig b/bin/rftw_util_removebig new file mode 100755 index 00000000..3c806ede --- /dev/null +++ b/bin/rftw_util_removebig @@ -0,0 +1,53 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors for console output +green="\033[1;32m" +red="\033[1;31m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0" + echo + echo "Removes specific files and any files larger than 200MB from .tmp directory." + echo "Logs errors to 'LOGFILE' specified in reconftw.cfg." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate logfile existence function +function validate_logfile() { + if [[ ! -w "$(dirname "$LOGFILE")" ]]; then + echo -e "${red}Error: Unable to write to LOGFILE directory: $(dirname "$LOGFILE").${reset}" + exit 2 + fi +} + +# Remove big files function +function remove_big_files() { + rm -rf .tmp/gotator*.txt 2>>"$LOGFILE" + rm -rf .tmp/brute_recursive_wordlist.txt 2>>"$LOGFILE" + rm -rf .tmp/subs_dns_tko.txt 2>>"$LOGFILE" + rm -rf .tmp/subs_no_resolved.txt .tmp/subdomains_dns.txt .tmp/brute_dns_tko.txt .tmp/scrap_subs.txt .tmp/analytics_subs_clean.txt .tmp/gotator1.txt .tmp/gotator2.txt .tmp/passive_recursive.txt .tmp/brute_recursive_wordlist.txt .tmp/gotator1_recursive.txt .tmp/gotator2_recursive.txt 2>>"$LOGFILE" + find .tmp -type f -size +200M -exec rm -f {} + 2>>"$LOGFILE" + echo -e "${green}Specified files and large files in .tmp directory removed successfully.${reset}" +} + +# Main script execution +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +fi + +validate_logfile +remove_big_files diff --git a/bin/rftw_util_resolver b/bin/rftw_util_resolver new file mode 100755 index 00000000..cfcee0eb --- /dev/null +++ b/bin/rftw_util_resolver @@ -0,0 +1,63 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Display help information +function display_help() { + echo "Usage: $0" + echo + echo "Update resolvers for reconftw." + echo + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +function update_resolvers() { + if [ "$generate_resolvers" = true ]; then + if [ ! "$AXIOM" = true ]; then + if [ ! -s "$resolvers" ] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then + notification "Resolvers seem older than 1 day\n Generating custom resolvers..." warn + rm -f $resolvers 2>>"$LOGFILE" + dnsvalidator -tL https://public-dns.info/nameservers.txt -threads $DNSVALIDATOR_THREADS -o $resolvers 2>>"$LOGFILE" >/dev/null + dnsvalidator -tL https://raw.githubusercontent.com/blechschmidt/massdns/master/lists/resolvers.txt -threads $DNSVALIDATOR_THREADS -o tmp_resolvers 2>>"$LOGFILE" >/dev/null + + [ -s "tmp_resolvers" ] && cat tmp_resolvers | anew -q $resolvers + [ -s "tmp_resolvers" ] && rm -f tmp_resolvers 2>>"$LOGFILE" >/dev/null + + [ ! -s "$resolvers" ] && wget -q -O - ${resolvers_url} > $resolvers + [ ! -s "$resolvers_trusted" ] && wget -q -O - ${resolvers_trusted_url} > $resolvers_trusted + notification "Updated\n" good + fi + else + notification "Checking resolvers lists...\n Accurate resolvers are the key to great results\n This may take around 10 minutes if it's not updated" warn + axiom-exec 'if [ $(find "/home/op/lists/resolvers.txt" -mtime +1 -print) ] || [ $(cat /home/op/lists/resolvers.txt | wc -l) -le 40 ] ; then dnsvalidator -tL https://public-dns.info/nameservers.txt -threads 200 -o /home/op/lists/resolvers.txt ; fi' &>/dev/null + axiom-exec "wget -q -O - ${resolvers_url} > /home/op/lists/resolvers.txt" 2>>"$LOGFILE" >/dev/null + axiom-exec "wget -q -O - ${resolvers_trusted_url} > /home/op/lists/resolvers_trusted.txt" 2>>"$LOGFILE" >/dev/null + notification "Updated\n" good + fi + generate_resolvers=false + else + if [ ! -s "$resolvers" ] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then + notification "Resolvers seem older than 1 day\n Downloading new resolvers..." warn + wget -q -O - ${resolvers_url} > $resolvers + wget -q -O - ${resolvers_trusted_url} > $resolvers_trusted + notification "Resolvers updated\n" good + fi + fi +} + +# Check arguments and call the main function +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +else + update_resolvers +fi diff --git a/bin/rftw_util_sendnotify b/bin/rftw_util_sendnotify new file mode 100755 index 00000000..81de22d5 --- /dev/null +++ b/bin/rftw_util_sendnotify @@ -0,0 +1,78 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Display help information +function display_help() { + echo "Usage: $0 " + echo + echo "Send a specified file to notify providers based on configurations." + echo + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Main sendToNotify function +function sendToNotify() { + local file="$1" + + if [[ -z "$file" ]]; then + printf "\n${yellow}Error: No file provided to send${reset}\n" + display_help + exit 1 + fi + + if [[ -z "$NOTIFY_CONFIG" ]]; then + NOTIFY_CONFIG=~/.config/notify/provider-config.yaml + fi + + if [ -n "$(find "$file" -prune -size +8000000c)" ]; then + printf '%s is larger than 8MB, sending over transfer.sh\n' "$file" + transfer "$file" | notify + return 0 + fi + + local config_value + for provider in telegram discord slack; do + if grep -q -E "^( )?${provider}" "$NOTIFY_CONFIG"; then + case $provider in + telegram) + notification "Sending ${domain} data over Telegram" info + config_value=$(grep -E "^( )?telegram_(chat_id|api_key)" "$NOTIFY_CONFIG" | xargs) + telegram_chat_id=$(echo "$config_value" | cut -d' ' -f2) + telegram_key=$(echo "$config_value" | cut -d' ' -f4) + curl -F document=@"$file" "https://api.telegram.org/bot${telegram_key}/sendDocument?chat_id=${telegram_chat_id}" 2>>"$LOGFILE" >/dev/null + ;; + + discord) + notification "Sending ${domain} data over Discord" info + discord_url=$(grep -E "^( )?discord_webhook_url" "$NOTIFY_CONFIG" | xargs | cut -d' ' -f2) + curl -v -i -H "Accept: application/json" -H "Content-Type: multipart/form-data" -X POST -F file1=@"$file" "$discord_url" 2>>"$LOGFILE" >/dev/null + ;; + + slack) + if [[ -n "$slack_channel" ]] && [[ -n "$slack_auth" ]]; then + notification "Sending ${domain} data over Slack" info + curl -F file=@"$file" -F "initial_comment=reconftw zip file" -F channels="$slack_channel" -H "Authorization: Bearer ${slack_auth}" https://slack.com/api/files.upload 2>>"$LOGFILE" >/dev/null + fi + ;; + esac + fi + done +} + +# Check arguments and call the main function +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +else + sendToNotify "$1" +fi diff --git a/bin/rftw_util_tools b/bin/rftw_util_tools new file mode 100755 index 00000000..9ebcbbe7 --- /dev/null +++ b/bin/rftw_util_tools @@ -0,0 +1,142 @@ +#!/bin/bash + +# Default config path +CONFIG_PATH="$RECONFTW_CFG" + +# Check if the config file exists +if [ -f "$CONFIG_PATH" ]; then + source "$CONFIG_PATH" +else + echo "Error: reconftw.cfg not found at $CONFIG_PATH!" + exit 1 +fi + +# Help menu +function help_menu() { + echo -e "Usage: ./tools_installed_script.sh [OPTIONS]" + echo -e "Options:" + echo -e " -t, --tools-dir DIRECTORY Specify the tools directory path" + echo -e " -h, --help Display this help menu" + exit 1 +} + +# Main function +function tools_installed() { + printf "\n\n${bgreen}#######################################################################${reset}\n" + printf "${bblue} Checking installed tools ${reset}\n\n" + + allinstalled=true + + [ -n "$GOPATH" ] || { printf "${bred} [*] GOPATH var [NO]${reset}\n"; allinstalled=false;} + [ -n "$GOROOT" ] || { printf "${bred} [*] GOROOT var [NO]${reset}\n"; allinstalled=false;} + [ -n "$PATH" ] || { printf "${bred} [*] PATH var [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/dorks_hunter/dorks_hunter.py" ] || { printf "${bred} [*] dorks_hunter [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/brutespray/brutespray.py" ] || { printf "${bred} [*] brutespray [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/fav-up/favUp.py" ] || { printf "${bred} [*] fav-up [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/Corsy/corsy.py" ] || { printf "${bred} [*] Corsy [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/testssl.sh/testssl.sh" ] || { printf "${bred} [*] testssl [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/CMSeeK/cmseek.py" ] || { printf "${bred} [*] CMSeeK [NO]${reset}\n"; allinstalled=false;} + [ -f "${fuzz_wordlist}" ] || { printf "${bred} [*] OneListForAll [NO]${reset}\n"; allinstalled=false;} + [ -f "${lfi_wordlist}" ] || { printf "${bred} [*] lfi_wordlist [NO]${reset}\n"; allinstalled=false;} + [ -f "${ssti_wordlist}" ] || { printf "${bred} [*] ssti_wordlist [NO]${reset}\n"; allinstalled=false;} + [ -f "${subs_wordlist}" ] || { printf "${bred} [*] subs_wordlist [NO]${reset}\n"; allinstalled=false;} + [ -f "${subs_wordlist_big}" ] || { printf "${bred} [*] subs_wordlist_big [NO]${reset}\n"; allinstalled=false;} + [ -f "${resolvers}" ] || { printf "${bred} [*] resolvers [NO]${reset}\n"; allinstalled=false;} + [ -f "${resolvers_trusted}" ] || { printf "${bred} [*] resolvers_trusted [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/xnLinkFinder/xnLinkFinder.py" ] || { printf "${bred} [*] xnLinkFinder [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/waymore/waymore.py" ] || { printf "${bred} [*] waymore [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/commix/commix.py" ] || { printf "${bred} [*] commix [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/getjswords.py" ] || { printf "${bred} [*] getjswords [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/JSA/jsa.py" ] || { printf "${bred} [*] JSA [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/cloud_enum/cloud_enum.py" ] || { printf "${bred} [*] cloud_enum [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/ultimate-nmap-parser/ultimate-nmap-parser.sh" ] || { printf "${bred} [*] nmap-parse-output [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/pydictor/pydictor.py" ] || { printf "${bred} [*] pydictor [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/urless/urless/urless.py" ] || { printf "${bred} [*] urless [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/smuggler/smuggler.py" ] || { printf "${bred} [*] smuggler [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/regulator/main.py" ] || { printf "${bred} [*] regulator [NO]${reset}\n"; allinstalled=false;} + which github-endpoints &>/dev/null || { printf "${bred} [*] github-endpoints [NO]${reset}\n"; allinstalled=false;} + which github-subdomains &>/dev/null || { printf "${bred} [*] github-subdomains [NO]${reset}\n"; allinstalled=false;} + which gitlab-subdomains &>/dev/null || { printf "${bred} [*] gitlab-subdomains [NO]${reset}\n"; allinstalled=false;} + which katana &>/dev/null || { printf "${bred} [*] katana [NO]${reset}\n"; allinstalled=false;} + which wafw00f &>/dev/null || { printf "${bred} [*] wafw00f [NO]${reset}\n"; allinstalled=false;} + which dnsvalidator &>/dev/null || { printf "${bred} [*] dnsvalidator [NO]${reset}\n"; allinstalled=false;} + which gowitness &>/dev/null || { printf "${bred} [*] gowitness [NO]${reset}\n"; allinstalled=false;} + which amass &>/dev/null || { printf "${bred} [*] Amass [NO]${reset}\n"; allinstalled=false;} + which dnsx &>/dev/null || { printf "${bred} [*] dnsx [NO]${reset}\n"; allinstalled=false;} + which gotator &>/dev/null || { printf "${bred} [*] gotator [NO]${reset}\n"; allinstalled=false;} + which nuclei &>/dev/null || { printf "${bred} [*] Nuclei [NO]${reset}\n"; allinstalled=false;} + [ -d ${NUCLEI_TEMPLATES_PATH} ] || { printf "${bred} [*] Nuclei templates [NO]${reset}\n"; allinstalled=false;} + [ -d ${tools}/fuzzing-templates ] || { printf "${bred} [*] Fuzzing templates [NO]${reset}\n"; allinstalled=false;} + which gf &>/dev/null || { printf "${bred} [*] Gf [NO]${reset}\n"; allinstalled=false;} + which Gxss &>/dev/null || { printf "${bred} [*] Gxss [NO]${reset}\n"; allinstalled=false;} + which subjs &>/dev/null || { printf "${bred} [*] subjs [NO]${reset}\n"; allinstalled=false;} + which ffuf &>/dev/null || { printf "${bred} [*] ffuf [NO]${reset}\n"; allinstalled=false;} + which massdns &>/dev/null || { printf "${bred} [*] Massdns [NO]${reset}\n"; allinstalled=false;} + which qsreplace &>/dev/null || { printf "${bred} [*] qsreplace [NO]${reset}\n"; allinstalled=false;} + which interlace &>/dev/null || { printf "${bred} [*] interlace [NO]${reset}\n"; allinstalled=false;} + which anew &>/dev/null || { printf "${bred} [*] Anew [NO]${reset}\n"; allinstalled=false;} + which unfurl &>/dev/null || { printf "${bred} [*] unfurl [NO]${reset}\n"; allinstalled=false;} + which crlfuzz &>/dev/null || { printf "${bred} [*] crlfuzz [NO]${reset}\n"; allinstalled=false;} + which httpx &>/dev/null || { printf "${bred} [*] Httpx [NO]${reset}\n${reset}"; allinstalled=false;} + which jq &>/dev/null || { printf "${bred} [*] jq [NO]${reset}\n${reset}"; allinstalled=false;} + which notify &>/dev/null || { printf "${bred} [*] notify [NO]${reset}\n${reset}"; allinstalled=false;} + which dalfox &>/dev/null || { printf "${bred} [*] dalfox [NO]${reset}\n${reset}"; allinstalled=false;} + which puredns &>/dev/null || { printf "${bred} [*] puredns [NO]${reset}\n${reset}"; allinstalled=false;} + which emailfinder &>/dev/null || { printf "${bred} [*] emailfinder [NO]${reset}\n"; allinstalled=false;} + which analyticsrelationships &>/dev/null || { printf "${bred} [*] analyticsrelationships [NO]${reset}\n"; allinstalled=false;} + which mapcidr &>/dev/null || { printf "${bred} [*] mapcidr [NO]${reset}\n"; allinstalled=false;} + which ppfuzz &>/dev/null || { printf "${bred} [*] ppfuzz [NO]${reset}\n"; allinstalled=false;} + which cdncheck &>/dev/null || { printf "${bred} [*] cdncheck [NO]${reset}\n"; allinstalled=false;} + which interactsh-client &>/dev/null || { printf "${bred} [*] interactsh-client [NO]${reset}\n"; allinstalled=false;} + which tlsx &>/dev/null || { printf "${bred} [*] tlsx [NO]${reset}\n"; allinstalled=false;} + which smap &>/dev/null || { printf "${bred} [*] smap [NO]${reset}\n"; allinstalled=false;} + which gitdorks_go &>/dev/null || { printf "${bred} [*] gitdorks_go [NO]${reset}\n"; allinstalled=false;} + which ripgen &>/dev/null || { printf "${bred} [*] ripgen [NO]${reset}\n${reset}"; allinstalled=false;} + which dsieve &>/dev/null || { printf "${bred} [*] dsieve [NO]${reset}\n${reset}"; allinstalled=false;} + which inscope &>/dev/null || { printf "${bred} [*] inscope [NO]${reset}\n${reset}"; allinstalled=false;} + which enumerepo &>/dev/null || { printf "${bred} [*] enumerepo [NO]${reset}\n${reset}"; allinstalled=false;} + which Web-Cache-Vulnerability-Scanner &>/dev/null || { printf "${bred} [*] Web-Cache-Vulnerability-Scanner [NO]${reset}\n"; allinstalled=false;} + which subfinder &>/dev/null || { printf "${bred} [*] subfinder [NO]${reset}\n${reset}"; allinstalled=false;} + which byp4xx &>/dev/null || { printf "${bred} [*] byp4xx [NO]${reset}\n${reset}"; allinstalled=false;} + which ghauri &>/dev/null || { printf "${bred} [*] ghauri [NO]${reset}\n${reset}"; allinstalled=false;} + which hakip2host &>/dev/null || { printf "${bred} [*] hakip2host [NO]${reset}\n${reset}"; allinstalled=false;} + which gau &>/dev/null || { printf "${bred} [*] gau [NO]${reset}\n${reset}"; allinstalled=false;} + which crt &>/dev/null || { printf "${bred} [*] crt [NO]${reset}\n${reset}"; allinstalled=false;} + which gitleaks &>/dev/null || { printf "${bred} [*] gitleaks [NO]${reset}\n${reset}"; allinstalled=false;} + which trufflehog &>/dev/null || { printf "${bred} [*] trufflehog [NO]${reset}\n${reset}"; allinstalled=false;} + which s3scanner &>/dev/null || { printf "${bred} [*] s3scanner [NO]${reset}\n${reset}"; allinstalled=false;} + + if [ "${allinstalled}" = true ]; then + printf "${bgreen} Good! All installed! ${reset}\n\n" + else + printf "\n${yellow} Try running the installer script again ./install.sh" + printf "\n${yellow} If it fails for any reason try to install manually the tools missed" + printf "\n${yellow} Finally remember to set the ${bred}\$tools${yellow} variable at the start of this script" + printf "\n${yellow} If nothing works and the world is gonna end you can always ping me :D ${reset}\n\n" + fi + + printf "${bblue} Tools check finished\n" + printf "${bgreen}#######################################################################\n${reset}" + +} + +# Parse command-line arguments +tools_dir="" +while [[ "$#" -gt 0 ]]; do + case $1 in + -t|--tools-dir) tools_dir="$2"; shift ;; + -h|--help) help_menu ;; + *) echo -e "${bred}Unknown parameter passed: $1${reset}" >&2; help_menu ;; + esac + shift +done + +# Validate tools directory +if [ -z "$tools_dir" ]; then + echo -e "${bred}Error: Tools directory not specified.${reset}" >&2 + help_menu + exit 1 +fi + +# Execute the main function +tools_installed \ No newline at end of file diff --git a/bin/rftw_util_version b/bin/rftw_util_version new file mode 100755 index 00000000..5c540742 --- /dev/null +++ b/bin/rftw_util_version @@ -0,0 +1,56 @@ +#!/bin/bash + + +# Default config path +CONFIG_PATH="$RECONFTW_CFG" + +# Check if the config file exists +if [ -f "$CONFIG_PATH" ]; then + source "$CONFIG_PATH" +else + echo "Error: reconftw.cfg not found at $CONFIG_PATH!" + exit 1 +fi + +# Help menu +function help_menu() { + echo -e "Usage: ./check_version_script.sh" + echo -e "Checks for updates to the current git repository." + echo -e "Options:" + echo -e " -h, --help Display this help menu" + exit 1 +} + +# Main function +function check_version() { + # Check if current directory is a git repository + if ! git rev-parse --is-inside-work-tree > /dev/null 2>&1; then + echo -e "${bred}Error: This is not a git repository.${reset}" >&2 + exit 1 + fi + + timeout 10 git fetch + exit_status=$? + if [ $exit_status -eq 0 ]; then + BRANCH=$(git rev-parse --abbrev-ref HEAD) + HEADHASH=$(git rev-parse HEAD) + UPSTREAMHASH=$(git rev-parse "${BRANCH}"@\{upstream\}) + if [ "$HEADHASH" != "$UPSTREAMHASH" ]; then + echo -e "\n${yellow} There is a new version, run ./install.sh to get the latest version${reset}\n\n" + fi + else + echo -e "\n${bred} Unable to check updates ${reset}\n\n" >&2 + fi +} + +# Parse command-line arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu ;; + *) echo -e "${bred}Unknown parameter passed: $1${reset}" >&2; help_menu ;; + esac + shift +done + +# Execute the main function +check_version \ No newline at end of file diff --git a/bin/rftw_util_zipfolder b/bin/rftw_util_zipfolder new file mode 100755 index 00000000..5f171513 --- /dev/null +++ b/bin/rftw_util_zipfolder @@ -0,0 +1,60 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors for console output +yellow="\033[1;33m" +red="\033[1;31m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [DOMAIN]" + echo + echo "Zips the specified domain's output folder and sends the zip file." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate arguments function +function validate_args() { + if [[ -z "$1" ]]; then + echo -e "${red}Error: DOMAIN is required.${reset}" + display_help + exit 1 + fi +} + +# Main zipping and sending function +function zip_send_output_folder() { + local domain="$1" + local zip_name1=$(date +"%Y_%m_%d-%H.%M.%S") + local zip_name="${zip_name1}_${domain}.zip" + (cd "$dir" && zip -r "$zip_name" .) + + echo "Creating and sending zip file "${dir}/${zip_name}"" + + if [ -s "${dir}/$zip_name" ]; then + sendToNotify "$dir/$zip_name" + rm -f "${dir}/$zip_name" + else + notification "No Zip file to send" warn + fi +} + +# Main script execution +if [[ "$1" == "-h" || "$1" == "--help" ]]; then + display_help + exit 0 +fi + +validate_args "$1" +zip_send_output_folder "$1" diff --git a/bin/rftw_vuln_4xx b/bin/rftw_vuln_4xx new file mode 100755 index 00000000..e7b202b0 --- /dev/null +++ b/bin/rftw_vuln_4xx @@ -0,0 +1,77 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform 4XX Bypass checks on provided inputs." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v byp4xx)" ]; then + echo "Error: byp4xx not found." >&2 + exit 1 + fi + if ! [ -f "fuzzing/fuzzing_full.txt" ]; then + echo "Error: fuzzing_full.txt not found." >&2 + exit 1 + fi +} + +# 4XX Bypass test function +function bypass_4xx_test() { + if { [ ! -f "$called_fn_dir/.4xxbypass" ] || [ "$DIFF" = true ]; } && [ "$BYPASSER4XX" = true ]; then + if [[ $(cat fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 | wc -l) -le 1000 ]] || [ "$DEEP" = true ]; then + echo "[*] Starting 403 bypass" + cat $dir/fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 > $dir/.tmp/403test.txt + cd "$tools/byp4xx" || { echo "Failed to cd directory in 4xxbypass @ line ${LINENO}"; exit 1; } + byp4xx -threads $BYP4XX_THREADS $dir/.tmp/403test.txt > $dir/.tmp/byp4xx.txt + cd "$dir" || { echo "Failed to cd directory in 4xxbypass @ line ${LINENO}"; exit 1; } + [ -s ".tmp/byp4xx.txt" ] && cat .tmp/byp4xx.txt | anew -q vulns/byp4xx.txt + echo "[+] Results are saved in vulns/byp4xx.txt" + else + echo "[!] Too many URLs to bypass, skipping" + fi + else + if [ "$BYPASSER4XX" = false ]; then + echo -e "\n${yellow} 4xxbypass skipped in this mode or defined in reconftw.cfg ${reset}" + else + echo -e "${yellow} 4xxbypass is already processed, to force executing 4xxbypass delete\n $called_fn_dir/.4xxbypass ${reset}\n" + fi + fi +} + +# Main script execution +while (( "$#" )); do + case "$1" in + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +bypass_4xx_test diff --git a/bin/rftw_vuln_brokenlink b/bin/rftw_vuln_brokenlink new file mode 100755 index 00000000..56fc9b8b --- /dev/null +++ b/bin/rftw_vuln_brokenlink @@ -0,0 +1,68 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +help_menu() { + echo "Usage: $0 [OPTIONS]" + echo "Broken Links Check using katana and axiom" + echo "" + echo "Options:" + echo " -d, --deep Set deep mode (optional)" + echo " -h, --help Display this help message and exit" + exit 0 +} + +# Input validation +DEEP_MODE=false +while [ "$1" != "" ]; do + case $1 in + -d | --deep ) DEEP_MODE=true + ;; + -h | --help ) help_menu + exit + ;; + * ) echo "Unknown option: $1" + exit 1 + esac + shift +done + +broken_links_check() { + local deep_mode=$1 + + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$BROKENLINKS" = true ]; then + start_func ${FUNCNAME[0]} "Broken links checks" + + # The code remains mostly unchanged, with only minor adjustments for clarity. + [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + if [ ! "$AXIOM" = true ]; then + depth_level=$([ "$deep_mode" = true ] && echo 3 || echo 2) + [ -s ".tmp/webs_all.txt" ] && katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d $depth_level -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt + else + depth_level=$([ "$deep_mode" = true ] && echo 3 || echo 2) + [ -s ".tmp/webs_all.txt" ] && axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d $depth_level -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt + fi + [ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | sort -u | httpx -follow-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | grep "\[4" | cut -d ' ' -f1 | anew -q .tmp/brokenLinks_total.txt + NUMOFLINES=$(cat .tmp/brokenLinks_total.txt 2>>"$LOGFILE" | anew vulns/brokenLinks.txt | sed '/^$/d' | wc -l) + notification "${NUMOFLINES} new broken links found" info + end_func "Results are saved in vulns/brokenLinks.txt" ${FUNCNAME[0]} + else + if [ "$BROKENLINKS" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + +broken_links_check $DEEP_MODE diff --git a/bin/rftw_vuln_comminject b/bin/rftw_vuln_comminject new file mode 100755 index 00000000..a641bfbe --- /dev/null +++ b/bin/rftw_vuln_comminject @@ -0,0 +1,76 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform Command Injection checks on provided inputs." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v python3)" ]; then + echo "Error: python3 not found." >&2 + exit 1 + fi + if ! [ -f "gf/rce.txt" ]; then + echo "Error: rce.txt not found." >&2 + exit 1 + fi +} + +# Command Injection test function +function command_injection_test() { + if { [ ! -f "$called_fn_dir/.command_injection" ] || [ "$DIFF" = true ]; } && [ "$COMM_INJ" = true ] && [ -s "gf/rce.txt" ]; then + echo "[*] Starting Command Injection checks" + [ -s "gf/rce.txt" ] && cat gf/rce.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_rce.txt + if [ "$DEEP" = true ] || [[ $(cat .tmp/tmp_rce.txt | wc -l) -le $DEEP_LIMIT ]]; then + [ -s ".tmp/tmp_rce.txt" ] && python3 $tools/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection.txt 2>>"$LOGFILE" >/dev/null + echo "[+] Results are saved in vulns/command_injection folder" + else + echo "[!] Skipping Command injection: Too many URLs to test, try with --deep flag" + fi + else + if [ "$COMM_INJ" = false ]; then + echo -e "\n${yellow} command_injection_test skipped in this mode or defined in reconftw.cfg ${reset}" + elif [ ! -s "gf/rce.txt" ]; then + echo -e "\n${yellow} command_injection_test No URLs potentially vulnerables to Command Injection ${reset}\n" + else + echo -e "${yellow} command_injection_test is already processed, to force executing command_injection_test delete\n $called_fn_dir/.command_injection ${reset}\n" + fi + fi +} + +# Main script execution +while (( "$#" )); do + case "$1" in + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +command_injection_test diff --git a/bin/rftw_vuln_cors b/bin/rftw_vuln_cors new file mode 100755 index 00000000..1e11c67b --- /dev/null +++ b/bin/rftw_vuln_cors @@ -0,0 +1,82 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform CORS scan on provided inputs." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate the required tools and files +function validate_requirements() { + if ! [ -x "$(command -v python3)" ]; then + echo "Error: python3 is not installed." >&2 + exit 1 + fi + if [ ! -f "$tools/Corsy/corsy.py" ]; then + echo "Error: Corsy tool is missing." >&2 + exit 1 + fi + if [ ! -f "webs/webs.txt" ] || [ ! -f "webs/webs_uncommon_ports.txt" ]; then + echo "Error: Required input files are missing." >&2 + exit 1 + fi +} + +# Main CORS function +function cors_scan() { + if { [ ! -f "$called_fn_dir/.cors_scan" ] || [ "$DIFF" = true ]; } && [ "$CORS" = true ]; then + echo "[*] Starting CORS Scan" + + # Check and consolidate input files + [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + + # Perform CORS scan + [ -s ".tmp/webs_all.txt" ] && python3 $tools/Corsy/corsy.py -i .tmp/webs_all.txt -o vulns/cors.txt 2>>"$LOGFILE" >/dev/null + + echo "[+] Results are saved in vulns/cors.txt" + else + if [ "$CORS" = false ]; then + echo -e "\n${yellow} cors_scan skipped in this mode or defined in reconftw.cfg ${reset}" + else + echo -e "${yellow} cors_scan is already processed, to force executing cors_scan delete\n $called_fn_dir/.cors_scan ${reset}\n" + fi + fi +} + +# Main script execution +if [ "$#" -eq 0 ]; then + display_help + exit 0 +fi + +while (( "$#" )); do + case "$1" in + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac + shift +done + +validate_requirements +cors_scan + diff --git a/bin/rftw_vuln_crlf b/bin/rftw_vuln_crlf new file mode 100755 index 00000000..2e7c559d --- /dev/null +++ b/bin/rftw_vuln_crlf @@ -0,0 +1,83 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform CRLF checks on provided inputs." + echo "Options:" + echo " -d, --deep Perform deep scanning" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v crlfuzz)" ]; then + echo "Error: crlfuzz is not installed." >&2 + exit 1 + fi +} + +# CRLF checks function +function crlf_checks() { + local deep_flag=$1 + + if { [ ! -f "$called_fn_dir/.crlf_checks" ] || [ "$DIFF" = true ]; } && [ "$CRLF_CHECKS" = true ]; then + echo "[*] Starting CRLF checks" + + if [ ! -s ".tmp/webs_all.txt" ]; then + cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + fi + + if [ "$deep_flag" = true ] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then + crlfuzz -l .tmp/webs_all.txt -o vulns/crlf.txt 2>>"$LOGFILE" >/dev/null + echo "[+] Results are saved in vulns/crlf.txt" + else + echo "[!] Skipping CRLF: Too many URLs to test, try with --deep flag" + fi + else + if [ "$CRLF_CHECKS" = false ]; then + echo -e "\n${yellow} crlf_checks skipped in this mode or defined in reconftw.cfg ${reset}" + else + echo -e "${yellow} crlf_checks is already processed, to force executing crlf_checks delete\n $called_fn_dir/.crlf_checks ${reset}\n" + fi + fi +} + +# Main script execution +deep_flag=false + +while (( "$#" )); do + case "$1" in + -d|--deep) + deep_flag=true + shift + ;; + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +crlf_checks $deep_flag diff --git a/bin/rftw_vuln_fuzzparam b/bin/rftw_vuln_fuzzparam new file mode 100755 index 00000000..2e479cf4 --- /dev/null +++ b/bin/rftw_vuln_fuzzparam @@ -0,0 +1,81 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform Fuzzing params values checks on provided inputs." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v nuclei)" ]; then + echo "Error: nuclei not found." >&2 + exit 1 + fi + if ! [ -d "$tools/fuzzing-templates" ]; then + echo "Error: fuzzing-templates directory not found." >&2 + exit 1 + fi +} + +# Fuzzing params values test function +function fuzzparams_test() { + if { [ ! -f "$called_fn_dir/.fuzzparams" ] || [ "$DIFF" = true ]; } && [ "$FUZZPARAMS" = true ]; then + echo "[*] Starting Fuzzing params values checks" + if [ "$DEEP" = true ] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + if [ ! "$AXIOM" = true ]; then + nuclei -update 2>>"$LOGFILE" >/dev/null + git -C $tools/fuzzing-templates pull + cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t $tools/fuzzing-templates -o .tmp/fuzzparams.txt + else + axiom-exec "git clone https://github.com/projectdiscovery/fuzzing-templates /home/op/fuzzing-templates" &>/dev/null + axiom-scan webs/url_extract.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -rl $NUCLEI_RATELIMIT -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + [ -s ".tmp/fuzzparams.txt" ] && cat .tmp/fuzzparams.txt | anew -q vulns/fuzzparams.txt + echo "[+] Results are saved in vulns/fuzzparams.txt" + else + echo "[!] Fuzzing params values: Too many entries to test, try with --deep flag" + fi + else + if [ "$FUZZPARAMS" = false ]; then + echo -e "\n${yellow} fuzzparams skipped in this mode or defined in reconftw.cfg ${reset}" + else + echo -e "${yellow} fuzzparams is already processed, to force executing fuzzparams delete\n $called_fn_dir/.fuzzparams ${reset}\n" + fi + fi +} + +# Main script execution +while (( "$#" )); do + case "$1" in + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +fuzzparams_test diff --git a/bin/rftw_vuln_lfi b/bin/rftw_vuln_lfi new file mode 100755 index 00000000..209d35b7 --- /dev/null +++ b/bin/rftw_vuln_lfi @@ -0,0 +1,83 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform LFI checks on provided inputs." + echo "Options:" + echo " -d, --deep Perform deep scanning" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v ffuf)" ] || ! [ -x "$(command -v interlace)" ]; then + echo "Error: ffuf and/or interlace are not installed." >&2 + exit 1 + fi +} + +# LFI checks function +function lfi_checks() { + local deep_flag=$1 + + if { [ ! -f "$called_fn_dir/.lfi_checks" ] || [ "$DIFF" = true ]; } && [ "$LFI" = true ] && [ -s "gf/lfi.txt" ]; then + echo "[*] Starting LFI checks" + + cat gf/lfi.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_lfi.txt + + if [ "$deep_flag" = true ] || [[ $(cat .tmp/tmp_lfi.txt | wc -l) -le $DEEP_LIMIT ]]; then + interlace -tL .tmp/tmp_lfi.txt -threads ${INTERLACE_THREADS} -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${lfi_wordlist} -u \"_target_\" -mr \"root:\" " 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt + echo "[+] Results are saved in vulns/lfi.txt" + else + echo "[!] Skipping LFI: Too many URLs to test, try with --deep flag" + fi + else + if [ "$LFI" = false ]; then + echo -e "\n${yellow} lfi_checks skipped in this mode or defined in reconftw.cfg ${reset}" + elif [ ! -s "gf/lfi.txt" ]; then + echo -e "\n${yellow} lfi_checks No URLs potentially vulnerables to LFI ${reset}\n" + else + echo -e "${yellow} lfi_checks is already processed, to force executing lfi_checks delete\n $called_fn_dir/.lfi_checks ${reset}\n" + fi + fi +} + +# Main script execution +deep_flag=false + +while (( "$#" )); do + case "$1" in + -d|--deep) + deep_flag=true + shift + ;; + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +lfi_checks $deep_flag diff --git a/bin/rftw_vuln_openredir b/bin/rftw_vuln_openredir new file mode 100755 index 00000000..53bc5fd6 --- /dev/null +++ b/bin/rftw_vuln_openredir @@ -0,0 +1,89 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +bgreen="\033[1;32m" +reset="\033[0m" + +# Help menu +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform Open Redirects scan on provided inputs." + echo "Options:" + echo " -d, --deep Perform deep scanning" + echo " -h, --help Display this help and exit" + echo +} + +# Validate the required tools and files +function validate_requirements() { + if ! [ -x "$(command -v python3)" ]; then + echo "Error: python3 is not installed." >&2 + exit 1 + fi + if [ ! -f "$tools/Oralyzer/oralyzer.py" ] || [ ! -f "$tools/Oralyzer/payloads.txt" ]; then + echo "Error: Oralyzer tool or its payloads are missing." >&2 + exit 1 + fi +} + +# Main Open Redirect function +function open_redirect_scan() { + local deep_flag=$1 + + if { [ ! -f "$called_fn_dir/.open_redirect_scan" ] || [ "$DIFF" = true ]; } && [ "$OPEN_REDIRECT" = true ] && [ -s "gf/redirect.txt" ]; then + echo "[*] Starting Open Redirects checks" + + if [ "$deep_flag" = true ] || [[ $(cat gf/redirect.txt | wc -l) -le $DEEP_LIMIT ]]; then + cat gf/redirect.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_redirect.txt + python3 $tools/Oralyzer/oralyzer.py -l .tmp/tmp_redirect.txt -p $tools/Oralyzer/payloads.txt > vulns/redirect.txt + sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" vulns/redirect.txt + echo "[+] Results are saved in vulns/redirect.txt" + else + echo "[!] Skipping Open redirects: Too many URLs to test, try with --deep flag" + fi + else + if [ "$OPEN_REDIRECT" = false ]; then + echo -e "\n${yellow} open_redirect_scan skipped in this mode or defined in reconftw.cfg ${reset}" + elif [ ! -s "gf/redirect.txt" ]; then + echo -e "\n${yellow} open_redirect_scan No URLs potentially vulnerables to Open Redirect ${reset}\n" + else + echo -e "${yellow} open_redirect_scan is already processed, to force executing open_redirect_scan delete\n $called_fn_dir/.open_redirect_scan ${reset}\n" + fi + fi +} + +# Main script execution +deep_flag=false + +while (( "$#" )); do + case "$1" in + -d|--deep) + deep_flag=true + shift + ;; + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +open_redirect_scan $deep_flag + diff --git a/bin/rftw_vuln_protpollut b/bin/rftw_vuln_protpollut new file mode 100755 index 00000000..7f9d9c60 --- /dev/null +++ b/bin/rftw_vuln_protpollut @@ -0,0 +1,74 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform Prototype Pollution checks on provided inputs." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v ppfuzz)" ]; then + echo "Error: ppfuzz not found." >&2 + exit 1 + fi + if ! [ -f "webs/url_extract.txt" ]; then + echo "Error: url_extract.txt not found." >&2 + exit 1 + fi +} + +# Prototype Pollution test function +function prototype_pollution_test() { + if { [ ! -f "$called_fn_dir/.prototype_pollution" ] || [ "$DIFF" = true ]; } && [ "$PROTO_POLLUTION" = true ]; then + echo "[*] Starting Prototype Pollution checks" + if [ "$DEEP" = true ] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT ]]; then + [ -s "webs/url_extract.txt" ] && ppfuzz -l webs/url_extract.txt -c $PPFUZZ_THREADS 2>/dev/null | anew -q .tmp/prototype_pollution.txt + [ -s ".tmp/prototype_pollution.txt" ] && cat .tmp/prototype_pollution.txt | sed -e '1,8d' | sed '/^\[ERR/d' | anew -q vulns/prototype_pollution.txt + echo "[+] Results are saved in vulns/prototype_pollution.txt" + else + echo "[!] Skipping Prototype Pollution: Too many URLs to test, try with --deep flag" + fi + else + if [ "$PROTO_POLLUTION" = false ]; then + echo -e "\n${yellow} prototype_pollution skipped in this mode or defined in reconftw.cfg ${reset}" + else + echo -e "${yellow} prototype_pollution is already processed, to force executing prototype_pollution delete\n $called_fn_dir/.prototype_pollution ${reset}\n" + fi + fi +} + +# Main script execution +while (( "$#" )); do + case "$1" in + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +prototype_pollution_test diff --git a/bin/rftw_vuln_smuggling b/bin/rftw_vuln_smuggling new file mode 100755 index 00000000..828416f5 --- /dev/null +++ b/bin/rftw_vuln_smuggling @@ -0,0 +1,77 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform HTTP Request Smuggling checks on provided inputs." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v python3)" ]; then + echo "Error: python3 not found." >&2 + exit 1 + fi + if ! [ -d "$tools/smuggler" ]; then + echo "Error: smuggler tool directory not found." >&2 + exit 1 + fi +} + +# HTTP Request Smuggling test function +function smuggling_test() { + if { [ ! -f "$called_fn_dir/.smuggling" ] || [ "$DIFF" = true ]; } && [ "$SMUGGLING" = true ]; then + echo "[*] Starting HTTP Request Smuggling checks" + [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + if [ "$DEEP" = true ] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then + cd "$tools/smuggler" || { echo "Failed to cd directory in smuggling @ line $LINENO"; exit 1; } + cat $dir/.tmp/webs_all.txt | python3 smuggler.py -q --no-color 2>/dev/null | anew -q $dir/.tmp/smuggling.txt + cd "$dir" || { echo "Failed to cd to $dir in smuggling @ line $LINENO"; exit 1; } + [ -s ".tmp/smuggling.txt" ] && cat .tmp/smuggling.txt | anew -q vulns/smuggling.txt + echo "[+] Results are saved in vulns/smuggling.txt" + else + echo "[!] Skipping HTTP Request Smuggling: Too many webs to test, try with --deep flag" + fi + else + if [ "$SMUGGLING" = false ]; then + echo -e "\n${yellow} smuggling skipped in this mode or defined in reconftw.cfg ${reset}" + else + echo -e "${yellow} smuggling is already processed, to force executing smuggling delete\n $called_fn_dir/.smuggling ${reset}\n" + fi + fi +} + +# Main script execution +while (( "$#" )); do + case "$1" in + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +smuggling_test diff --git a/bin/rftw_vuln_spray b/bin/rftw_vuln_spray new file mode 100755 index 00000000..e087b345 --- /dev/null +++ b/bin/rftw_vuln_spray @@ -0,0 +1,71 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform Password spraying tests on provided inputs." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v python3)" ]; then + echo "Error: python3 not found." >&2 + exit 1 + fi + if ! [ -f "$dir/hosts/portscan_active.gnmap" ]; then + echo "Error: portscan_active.gnmap not found." >&2 + exit 1 + fi +} + +# Password spraying test function +function spraying_test() { + if { [ ! -f "$called_fn_dir/.spraying" ] || [ "$DIFF" = true ]; } && [ "$SPRAY" = true ]; then + echo "[*] Starting Password spraying" + cd "$tools/brutespray" || { echo "Failed to cd directory in spraying_test @ line ${LINENO}"; exit 1; } + python3 brutespray.py --file $dir/hosts/portscan_active.gnmap --threads $BRUTESPRAY_THREADS --hosts $BRUTESPRAY_CONCURRENCE -o $dir/vulns/brutespray 2>>"$LOGFILE" >/dev/null + cd "$dir" || { echo "Failed to cd directory in spraying_test @ line ${LINENO}"; exit 1; } + echo "[+] Results are saved in vulns/brutespray folder" + else + if [ "$SPRAY" = false ]; then + echo -e "\n${yellow} spraying_test skipped in this mode or defined in reconftw.cfg ${reset}" + else + echo -e "${yellow} spraying_test is already processed, to force executing spraying_test delete\n $called_fn_dir/.spraying ${reset}\n" + fi + fi +} + +# Main script execution +while (( "$#" )); do + case "$1" in + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +spraying_test diff --git a/bin/rftw_vuln_sqli b/bin/rftw_vuln_sqli new file mode 100755 index 00000000..adb97d68 --- /dev/null +++ b/bin/rftw_vuln_sqli @@ -0,0 +1,92 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform SQLi checks on provided inputs." + echo "Options:" + echo " -d, --deep Perform deep scanning" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v python3)" ] || ! [ -x "$(command -v interlace)" ]; then + echo "Error: python3 and/or interlace are not installed." >&2 + exit 1 + fi + if [ "$SQLMAP" = true ] && ! [ -f "$tools/sqlmap/sqlmap.py" ]; then + echo "Error: sqlmap.py not found at specified location." >&2 + exit 1 + fi +} + +# SQLi checks function +function sqli_checks() { + local deep_flag=$1 + + if { [ ! -f "$called_fn_dir/.sqli_checks" ] || [ "$DIFF" = true ]; } && [ "$SQLI" = true ] && [ -s "gf/sqli.txt" ]; then + echo "[*] Starting SQLi checks" + + cat gf/sqli.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_sqli.txt + + if [ "$deep_flag" = true ] || [[ $(cat .tmp/tmp_sqli.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [ "$SQLMAP" = true ]; then + python3 $tools/sqlmap/sqlmap.py -m .tmp/tmp_sqli.txt -b -o --smart --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap 2>>"$LOGFILE" >/dev/null + fi + if [ "$GHAURI" = true ]; then + interlace -tL .tmp/tmp_sqli.txt -threads ${INTERLACE_THREADS} -c "ghauri -u _target_ --batch -H \"${HEADER}\" --force-ssl >> vulns/ghauri_log.txt" 2>>"$LOGFILE" >/dev/null + fi + echo "[+] Results are saved in vulns/sqlmap folder" + else + echo "[!] Skipping SQLi: Too many URLs to test, try with --deep flag" + fi + else + if [ "$SQLI" = false ]; then + echo -e "\n${yellow} sqli_checks skipped in this mode or defined in reconftw.cfg ${reset}" + elif [ ! -s "gf/sqli.txt" ]; then + echo -e "\n${yellow} sqli_checks No URLs potentially vulnerables to SQLi ${reset}\n" + else + echo -e "${yellow} sqli_checks is already processed, to force executing sqli_checks delete\n $called_fn_dir/.sqli_checks ${reset}\n" + fi + fi +} + +# Main script execution +deep_flag=false + +while (( "$#" )); do + case "$1" in + -d|--deep) + deep_flag=true + shift + ;; + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +sqli_checks $deep_flag diff --git a/bin/rftw_vuln_ssrf b/bin/rftw_vuln_ssrf new file mode 100755 index 00000000..6f70f378 --- /dev/null +++ b/bin/rftw_vuln_ssrf @@ -0,0 +1,108 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +bgreen="\033[1;32m" +reset="\033[0m" + +# Help menu +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform SSRF checks on provided inputs." + echo "Options:" + echo " -d, --deep Perform deep scanning" + echo " -h, --help Display this help and exit" + echo +} + +# Validate the required tools and files +function validate_requirements() { + if ! [ -x "$(command -v python3)" ]; then + echo "Error: python3 is not installed." >&2 + exit 1 + fi + if [ ! -f "$tools/Oralyzer/oralyzer.py" ]; then + echo "Error: Oralyzer tool is missing." >&2 + exit 1 + fi +} + +# Main SSRF checks function +function ssrf_checks() { + local deep_flag=$1 + + if { [ ! -f "$called_fn_dir/.ssrf_checks" ] || [ "$DIFF" = true ]; } && [ "$SSRF_CHECKS" = true ] && [ -s "gf/ssrf.txt" ]; then + echo "[*] Starting SSRF checks" + + if [ -z "$COLLAB_SERVER" ]; then + interactsh-client &>.tmp/ssrf_callback.txt & + sleep 2 + COLLAB_SERVER_FIX="FFUFHASH.$(cat .tmp/ssrf_callback.txt | tail -n1 | cut -c 16-)" + COLLAB_SERVER_URL="http://$COLLAB_SERVER_FIX" + INTERACT=true + else + COLLAB_SERVER_FIX="FFUFHASH.$(echo ${COLLAB_SERVER} | sed -r "s/https?:\/\///")" + INTERACT=false + fi + if [ "$deep_flag" = true ] || [[ $(cat gf/ssrf.txt | wc -l) -le $DEEP_LIMIT ]]; then + cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_FIX} | anew -q .tmp/tmp_ssrf.txt + cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_URL} | anew -q .tmp/tmp_ssrf.txt + ffuf -v -H "${HEADER}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -w .tmp/tmp_ssrf.txt -u FUZZ 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf_requested_url.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -u W1 2>/dev/null | anew -q vulns/ssrf_requested_headers.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -u W1 2>/dev/null | anew -q vulns/ssrf_requested_headers.txt + sleep 5 + if [ -s ".tmp/ssrf_callback.txt" ]; then + cat .tmp/ssrf_callback.txt | tail -n+11 | anew -q vulns/ssrf_callback.txt + NUMOFLINES=$(cat .tmp/ssrf_callback.txt | tail -n+12 | sed '/^$/d' | wc -l) + [ "$INTERACT" = true ] && echo "SSRF: ${NUMOFLINES} callbacks received" + fi + echo "[+] Results are saved in vulns/ssrf_*" + else + echo "[!] Skipping SSRF: Too many URLs to test, try with --deep flag" + fi + pkill -f interactsh-client & + else + if [ "$SSRF_CHECKS" = false ]; then + echo -e "\n${yellow} ssrf_checks skipped in this mode or defined in reconftw.cfg ${reset}" + elif [ ! -s "gf/ssrf.txt" ]; then + echo -e "\n${yellow} ssrf_checks No URLs potentially vulnerables to SSRF ${reset}\n" + else + echo -e "${yellow} ssrf_checks is already processed, to force executing ssrf_checks delete\n $called_fn_dir/.ssrf_checks ${reset}\n" + fi + fi +} + +# Main script execution +deep_flag=false + +while (( "$#" )); do + case "$1" in + -d|--deep) + deep_flag=true + shift + ;; + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +ssrf_checks $deep_flag + diff --git a/bin/rftw_vuln_ssti b/bin/rftw_vuln_ssti new file mode 100755 index 00000000..d57ba1f5 --- /dev/null +++ b/bin/rftw_vuln_ssti @@ -0,0 +1,83 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform SSTI checks on provided inputs." + echo "Options:" + echo " -d, --deep Perform deep scanning" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v ffuf)" ] || ! [ -x "$(command -v interlace)" ]; then + echo "Error: ffuf and/or interlace are not installed." >&2 + exit 1 + fi +} + +# SSTI checks function +function ssti_checks() { + local deep_flag=$1 + + if { [ ! -f "$called_fn_dir/.ssti_checks" ] || [ "$DIFF" = true ]; } && [ "$SSTI" = true ] && [ -s "gf/ssti.txt" ]; then + echo "[*] Starting SSTI checks" + + cat gf/ssti.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_ssti.txt + + if [ "$deep_flag" = true ] || [[ $(cat .tmp/tmp_ssti.txt | wc -l) -le $DEEP_LIMIT ]]; then + interlace -tL .tmp/tmp_ssti.txt -threads ${INTERLACE_THREADS} -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${ssti_wordlist} -u \"_target_\" -mr \"ssti49\" " 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt + echo "[+] Results are saved in vulns/ssti.txt" + else + echo "[!] Skipping SSTI: Too many URLs to test, try with --deep flag" + fi + else + if [ "$SSTI" = false ]; then + echo -e "\n${yellow} ssti_checks skipped in this mode or defined in reconftw.cfg ${reset}" + elif [ ! -s "gf/ssti.txt" ]; then + echo -e "\n${yellow} ssti_checks No URLs potentially vulnerables to SSTI ${reset}\n" + else + echo -e "${yellow} ssti_checks is already processed, to force executing ssti_checks delete\n $called_fn_dir/.ssti_checks ${reset}\n" + fi + fi +} + +# Main script execution +deep_flag=false + +while (( "$#" )); do + case "$1" in + -d|--deep) + deep_flag=true + shift + ;; + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +ssti_checks $deep_flag diff --git a/bin/rftw_vuln_testssl b/bin/rftw_vuln_testssl new file mode 100755 index 00000000..63237794 --- /dev/null +++ b/bin/rftw_vuln_testssl @@ -0,0 +1,69 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform SSL tests on provided inputs." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v $tools/testssl.sh/testssl.sh)" ]; then + echo "Error: testssl.sh not found at specified location." >&2 + exit 1 + fi + if ! [ -f "hosts/ips.txt" ]; then + echo "Error: hosts/ips.txt not found." >&2 + exit 1 + fi +} + +# SSL test function +function ssl_test() { + if { [ ! -f "$called_fn_dir/.ssl_test" ] || [ "$DIFF" = true ]; } && [ "$TEST_SSL" = true ]; then + echo "[*] Starting SSL Test" + $tools/testssl.sh/testssl.sh --quiet --color 0 -U -iL hosts/ips.txt 2>>"$LOGFILE" > vulns/testssl.txt + echo "[+] Results are saved in vulns/testssl.txt" + else + if [ "$TEST_SSL" = false ]; then + echo -e "\n${yellow} ssl_test skipped in this mode or defined in reconftw.cfg ${reset}" + else + echo -e "${yellow} ssl_test is already processed, to force executing ssl_test delete\n $called_fn_dir/.ssl_test ${reset}\n" + fi + fi +} + +# Main script execution +while (( "$#" )); do + case "$1" in + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +ssl_test diff --git a/bin/rftw_vuln_webcache b/bin/rftw_vuln_webcache new file mode 100755 index 00000000..bd517459 --- /dev/null +++ b/bin/rftw_vuln_webcache @@ -0,0 +1,77 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Colors +yellow="\033[1;33m" +reset="\033[0m" + +# Help menu function +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform Web Cache Poisoning checks on provided inputs." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate requirements function +function validate_requirements() { + if ! [ -x "$(command -v anew)" ]; then + echo "Error: anew not found." >&2 + exit 1 + fi + if ! [ -d "$tools/Web-Cache-Vulnerability-Scanner" ]; then + echo "Error: Web-Cache-Vulnerability-Scanner directory not found." >&2 + exit 1 + fi +} + +# Web Cache Poisoning test function +function webcache_test() { + if { [ ! -f "$called_fn_dir/.webcache" ] || [ "$DIFF" = true ]; } && [ "$WEBCACHE" = true ]; then + echo "[*] Starting Web Cache Poisoning checks" + [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + if [ "$DEEP" = true ] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then + cd "$tools/Web-Cache-Vulnerability-Scanner" || { echo "Failed to cd directory in webcache @ line $LINENO"; exit 1; } + Web-Cache-Vulnerability-Scanner -u file:$dir/.tmp/webs_all.txt -v 0 2>/dev/null | anew -q $dir/.tmp/webcache.txt + cd "$dir" || { echo "Failed to cd to $dir in webcache @ line $LINENO"; exit 1; } + [ -s ".tmp/webcache.txt" ] && cat .tmp/webcache.txt | anew -q vulns/webcache.txt + echo "[+] Results are saved in vulns/webcache.txt" + else + echo "[!] Skipping Web Cache Poisoning: Too many webs to test, try with --deep flag" + fi + else + if [ "$WEBCACHE" = false ]; then + echo -e "\n${yellow} webcache skipped in this mode or defined in reconftw.cfg ${reset}" + else + echo -e "${yellow} webcache is already processed, to force executing webcache delete\n $called_fn_dir/.webcache ${reset}\n" + fi + fi +} + +# Main script execution +while (( "$#" )); do + case "$1" in + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac +done + +validate_requirements +webcache_test diff --git a/bin/rftw_vuln_xss b/bin/rftw_vuln_xss new file mode 100755 index 00000000..033b5baf --- /dev/null +++ b/bin/rftw_vuln_xss @@ -0,0 +1,120 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +function display_help() { + echo "Usage: $0 [OPTIONS]" + echo + echo "Perform XSS analysis on provided inputs." + echo "Options:" + echo " -h, --help Display this help and exit" + echo +} + +# Validate the required tools and files +function validate_requirements() { + if ! [ -x "$(command -v dalfox)" ]; then + echo "Error: dalfox is not installed." >&2 + exit 1 + fi + if [ ! -f "gf/xss.txt" ]; then + echo "Error: gf/xss.txt does not exist." >&2 + exit 1 + fi +} + +# Main xss function +function xss_analysis() { + if { [ ! -f "$called_fn_dir/.xss_analysis" ] || [ "$DIFF" = true ]; } && [ "$XSS" = true ] && [ -s "gf/xss.txt" ]; then + echo "[*] Starting XSS Analysis" + + # Check if gf/xss.txt exists and process it + [ -s "gf/xss.txt" ] && cat gf/xss.txt | qsreplace FUZZ | sed '/FUZZ/!d' | Gxss -c 100 -p Xss | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/xss_reflected.txt + + # Check if AXIOM is true or not and perform the corresponding operations + if [ ! "$AXIOM" = true ]; then + if [ "$DEEP" = true ]; then + if [ -n "$XSS_SERVER" ]; then + [ -s ".tmp/xss_reflected.txt" ] && cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --only-poc r --ignore-return 302,404,403 --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt + else + printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" + [ -s ".tmp/xss_reflected.txt" ] && cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --only-poc r --ignore-return 302,404,403 --skip-bav -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt + fi + else + if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [ -n "$XSS_SERVER" ]; then + cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --skip-bav --skip-mining-dom --skip-mining-dict --only-poc r --ignore-return 302,404,403 -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt + else + printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" + cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --skip-bav --skip-mining-dom --skip-mining-dict --only-poc r --ignore-return 302,404,403 -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt + fi + else + printf "${bred} Skipping XSS: Too many URLs to test, try with --deep flag${reset}\n" + fi + fi + else + if [ "$DEEP" = true ]; then + if [ -n "$XSS_SERVER" ]; then + [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + else + printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" + [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + else + if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [ -n "$XSS_SERVER" ]; then + axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + else + printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" + axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + else + printf "${bred} Skipping XSS: Too many URLs to test, try with --deep flag${reset}\n" + fi + fi + fi + echo "[+] Results are saved in vulns/xss.txt" + end_func "Results are saved in vulns/xss.txt" ${FUNCNAME[0]} + else + if [ "$XSS" = false ]; then + echo -e "\n${yellow} xss_analysis skipped in this mode or defined in reconftw.cfg ${reset}" + elif [ ! -s "gf/xss.txt" ]; then + echo -e "\n${yellow} xss_analysis: No URLs potentially vulnerable to XSS ${reset}\n" + else + echo -e "${yellow} xss_analysis is already processed, to force executing xss_analysis delete\n $called_fn_dir/.xss_analysis ${reset}\n" + fi + fi +} + +# Main script execution +if [ "$#" -eq 0 ]; then + display_help + exit 0 +fi + +while (( "$#" )); do + case "$1" in + -h|--help) + display_help + exit 0 + ;; + *) + echo "Unknown parameter passed: $1" + display_help + exit 1 + ;; + esac + shift +done + +validate_requirements +xss_analysis + diff --git a/bin/rftw_web_cms b/bin/rftw_web_cms new file mode 100755 index 00000000..df1631a9 --- /dev/null +++ b/bin/rftw_web_cms @@ -0,0 +1,86 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help function +help_menu() { + echo "Usage: $0 [OPTIONS]" + echo "" + echo "CMS Scanner" + echo "" + echo "Options:" + echo " -h, --help Show this help menu" + echo " -f, --force Force the execution even if already processed" +} + +# Start function +start_func() { + echo "Starting $1: $2..." +} + +# End function +end_func() { + echo "$1" + echo "End of $2..." +} + +# Default values +FORCE=false + +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE=true ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +if { [ ! -f "$called_fn_dir/.cms_scanner" ] || [ "$FORCE" = true ]; } && [ "$CMS_SCANNER" = true ]; then + start_func "cms_scanner" "CMS Scanner" + + mkdir -p $dir/cms && rm -rf $dir/cms/* + anew -q .tmp/webs_all.txt < <(cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null) + + if [ -s ".tmp/webs_all.txt" ]; then + tr '\n' ',' < .tmp/webs_all.txt > .tmp/cms.txt + timeout -k 1m ${CMSSCAN_TIMEOUT}s python3 $tools/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r 2>>"$LOGFILE" &>/dev/null + exit_status=$? + if [[ $exit_status -eq 125 ]]; then + echo "TIMEOUT cmseek.py - investigate manually for $dir" >> "$LOGFILE" + end_func "TIMEOUT cmseek.py - investigate manually for $dir" "cms_scanner" + exit 1 + elif [[ $exit_status -ne 0 ]]; then + echo "ERROR cmseek.py - investigate manually for $dir" >> "$LOGFILE" + end_func "ERROR cmseek.py - investigate manually for $dir" "cms_scanner" + exit 1 + fi + while read -r sub; do + sub_out=$(echo "$sub" | sed -e 's|^[^/]*//||' -e 's|/.*$||') + cms_id=$(jq -r 'try .cms_id' "$tools/CMSeeK/Result/${sub_out}/cms.json" 2>/dev/null) + if [ -z "$cms_id" ]; then + rm -rf "$tools/CMSeeK/Result/${sub_out}" + else + mv -f "$tools/CMSeeK/Result/${sub_out}" $dir/cms/ 2>>"$LOGFILE" + fi + done < .tmp/webs_all.txt + + end_func "Results are saved in $domain/cms/*subdomain* folder" "cms_scanner" + else + end_func "No $domain/web/webs.txts file found, cms scanner skipped" "cms_scanner" + fi +else + if [ "$CMS_SCANNER" = false ]; then + printf "\n${yellow} cms_scanner skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} cms_scanner is already processed. To force executing cms_scanner, delete\n $called_fn_dir/.cms_scanner ${reset}\n\n" + fi +fi diff --git a/bin/rftw_web_fuzz b/bin/rftw_web_fuzz new file mode 100755 index 00000000..39ddb39a --- /dev/null +++ b/bin/rftw_web_fuzz @@ -0,0 +1,92 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help function +help_menu() { + echo "Usage: $0 [OPTIONS]" + echo "" + echo "Web Directory Fuzzing" + echo "" + echo "Options:" + echo " -h, --help Show this help menu" + echo " -f, --force Force the execution even if already processed" +} + +# Start function +start_func() { + echo "Starting $1: $2..." +} + +# End function +end_func() { + echo "$1" + echo "End of $2..." +} + +# Default values +FORCE=false + +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE=true ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +if { [ ! -f "$called_fn_dir/.fuzz" ] || [ "$FORCE" = true ]; } && [ "$FUZZ" = true ]; then + start_func "fuzz" "Web directory fuzzing" + + anew -q .tmp/webs_all.txt < <(cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null) + + if [ -s ".tmp/webs_all.txt" ]; then + mkdir -p $dir/fuzzing $dir/.tmp/fuzzing + + if [ ! "$AXIOM" = true ]; then + interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -o _output_/_cleantarget_.json" -o $dir/.tmp/fuzzing 2>>"$LOGFILE" >/dev/null + + while read -r sub; do + sub_out=$(echo "$sub" | sed -e 's|^[^/]*//||' -e 's|/.*$||') + if [ -s "$dir/.tmp/fuzzing/${sub_out}.json" ]; then + jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' < "$dir/.tmp/fuzzing/${sub_out}.json" | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt + fi + done < .tmp/webs_all.txt + + sort -k1 < <(find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE") | anew -q $dir/fuzzing/fuzzing_full.txt + else + axiom-exec "mkdir -p /home/op/lists/seclists/Discovery/Web-Content/" &>/dev/null + axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/fuzz_wordlist.txt" &>/dev/null + axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/seclists/Discovery/Web-Content/big.txt" &>/dev/null + axiom-scan .tmp/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + + while read -r sub; do + sub_out=$(echo "$sub" | sed -e 's|^[^/]*//||' -e 's|/.*$||') + if [ -s "$dir/.tmp/ffuf-content.json" ]; then + grep "$sub" < <(jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' .tmp/ffuf-content.json) | sort -k1 | anew -q fuzzing/${sub_out}.txt + fi + done < .tmp/webs_all.txt + + sort -k1 < <(find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE") | anew -q $dir/fuzzing/fuzzing_full.txt + fi + + end_func "Results are saved in $domain/fuzzing/*subdomain*.txt" "fuzz" + else + end_func "No $domain/web/webs.txts file found, fuzzing skipped " "fuzz" + fi +else + if [ "$FUZZ" = false ]; then + printf "\n${yellow} fuzz skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} fuzz is already processed. To force executing fuzz, delete\n $called_fn_dir/.fuzz ${reset}\n\n" + fi +fi diff --git a/bin/rftw_web_jschecks b/bin/rftw_web_jschecks new file mode 100755 index 00000000..001cc6a0 --- /dev/null +++ b/bin/rftw_web_jschecks @@ -0,0 +1,98 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +help_menu() { + echo "Usage: $0 [OPTIONS]" + echo "Perform Javascript Checks" + echo "" + echo "Options:" + echo " -d, --domain DOMAIN Set domain (required)" + echo " -h, --help Display this help message and exit" + exit 0 +} + +# Input validation +if [ $# -eq 0 ]; then + help_menu +fi + +DOMAIN="" +while [ "$1" != "" ]; do + case $1 in + -d | --domain ) shift + DOMAIN=$1 + ;; + -h | --help ) help_menu + exit + ;; + * ) echo "Unknown option: $1" + exit 1 + esac + shift +done + +if [ -z "$DOMAIN" ]; then + echo "Error: Domain is required!" + exit 1 +fi + +jschecks() { + local domain=$1 + + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$JSCHECKS" = true ]; then + start_func ${FUNCNAME[0]} "Javascript Scan" + if [ -s ".tmp/url_extract_js.txt" ]; then + printf "${yellow} Running : Fetching Urls 1/5${reset}\n" + if [ ! "$AXIOM" = true ]; then + cat .tmp/url_extract_js.txt | subjs -ua "Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" -c 40 | grep "$domain" | anew -q .tmp/subjslinks.txt + else + axiom-scan .tmp/url_extract_js.txt -m subjs -o .tmp/subjslinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + [ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | egrep -iv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)" | anew -q js/nojs_links.txt + [ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | grep -iE "\.js($|\?)" | anew -q .tmp/url_extract_js.txt + cat .tmp/url_extract_js.txt | python3 $tools/urless/urless/urless.py | anew -q js/url_extract_js.txt 2>>"$LOGFILE" >/dev/null + printf "${yellow} Running : Resolving JS Urls 2/5${reset}\n" + if [ ! "$AXIOM" = true ]; then + [ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -status-code -content-type -retries 2 -no-color | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt + else + [ -s "js/url_extract_js.txt" ] && axiom-scan js/url_extract_js.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -content-type -retries 2 -no-color -o .tmp/js_livelinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/js_livelinks.txt" ] && cat .tmp/js_livelinks.txt | anew .tmp/web_full_info.txt | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt + fi + printf "${yellow} Running : Gathering endpoints 3/5${reset}\n" + [ -s "js/js_livelinks.txt" ] && python3 $tools/xnLinkFinder/xnLinkFinder.py -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d $XNLINKFINDER_DEPTH -o .tmp/js_endpoints.txt 2>>"$LOGFILE" >/dev/null + [ -s "parameters.txt" ] && rm -f parameters.txt 2>>"$LOGFILE" >/dev/null + if [ -s ".tmp/js_endpoints.txt" ]; then + sed -i '/^\//!d' .tmp/js_endpoints.txt + cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt + fi + printf "${yellow} Running : Gathering secrets 4/5${reset}\n" + if [ ! "$AXIOM" = true ]; then + [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | Mantra -ua ${HEADER} -s | anew -q js/js_secrets.txt + else + [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua ${HEADER} -s -o js/js_secrets.txt $AXIOM_EXTRA_ARGS &>/dev/null + fi + [ -s "js/js_secrets.txt" ] && sed -r "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2};?)?)?[mGK]//g" -i js/js_secrets.txt + printf "${yellow} Running : Building wordlist 5/5${reset}\n" + [ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "python3 $tools/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" >/dev/null + else + end_func "No JS urls found for $domain, function skipped" ${FUNCNAME[0]} + fi + else + if [ "$JSCHECKS" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + +jschecks $DOMAIN diff --git a/bin/rftw_web_nucleicheck b/bin/rftw_web_nucleicheck new file mode 100755 index 00000000..463bf60a --- /dev/null +++ b/bin/rftw_web_nucleicheck @@ -0,0 +1,82 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help function +help_menu() { + echo "Usage: $0 [OPTIONS]" + echo "" + echo "Templates Based Web Scanner" + echo "" + echo "Options:" + echo " -h, --help Show this help menu" + echo " -f, --force Force the execution even if already processed" +} + +# Start function +start_func() { + echo "Starting $1: $2..." +} + +# End function +end_func() { + echo "$1" + echo "End of $2..." +} + +# Default values +FORCE=false + +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE=true ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +if { [ ! -f "$called_fn_dir/.nuclei_check" ] || [ "$FORCE" = true ]; } && [ "$NUCLEICHECK" = true ]; then + start_func "nuclei_check" "Templates based web scanner" + nuclei -update 2>>"$LOGFILE" >/dev/null + mkdir -p nuclei_output + + anew -q .tmp/webs_all.txt < <(cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null) + anew -q .tmp/webs_subs.txt < <(cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE") + + if [ ! "$AXIOM" = true ]; then + IFS=',' read -ra severity_array <<< "$NUCLEI_SEVERITY" + for crit in "${severity_array[@]}" + do + printf "${yellow}\n Running : Nuclei $crit ${reset}\n\n" + nuclei $NUCLEI_FLAGS -severity $crit -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt < .tmp/webs_subs.txt + done + printf "\n\n" + else + if [ -s ".tmp/webs_subs.txt" ]; then + IFS=',' read -ra severity_array <<< "$NUCLEI_SEVERITY" + for crit in "${severity_array[@]}" + do + printf "${yellow}\n Running : Nuclei $crit, check results on nuclei_output folder${reset}\n\n" + axiom-scan .tmp/webs_subs.txt -m nuclei --nuclei-templates ${NUCLEI_TEMPLATES_PATH} -severity ${crit} -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s "nuclei_output/${crit}.txt" ] && cat nuclei_output/${crit}.txt + done + printf "\n\n" + fi + fi + end_func "Results are saved in $domain/nuclei_output folder" "nuclei_check" +else + if [ "$NUCLEICHECK" = false ]; then + printf "\n${yellow} nuclei_check skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} nuclei_check is already processed. To force executing nuclei_check, delete\n $called_fn_dir/.nuclei_check ${reset}\n\n" + fi +fi diff --git a/bin/rftw_web_passdict b/bin/rftw_web_passdict new file mode 100755 index 00000000..1a7a749d --- /dev/null +++ b/bin/rftw_web_passdict @@ -0,0 +1,67 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +help_menu() { + echo "Usage: $0 [OPTIONS]" + echo "Password Dictionary Generation using pydictor" + echo "" + echo "Options:" + echo " -d, --domain DOMAIN Set domain (required)" + echo " -h, --help Display this help message and exit" + exit 0 +} + +# Input validation +if [ $# -eq 0 ]; then + help_menu +fi + +DOMAIN="" +while [ "$1" != "" ]; do + case $1 in + -d | --domain ) shift + DOMAIN=$1 + ;; + -h | --help ) help_menu + exit + ;; + * ) echo "Unknown option: $1" + exit 1 + esac + shift +done + +if [ -z "$DOMAIN" ]; then + echo "Error: Domain is required!" + exit 1 +fi + +password_dict() { + local domain=$1 + + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PASSWORD_DICT" = true ]; then + start_func ${FUNCNAME[0]} "Password dictionary generation" + + local word=${domain%%.*} + python3 $tools/pydictor/pydictor.py -extend $word --leet 0 1 2 11 21 --len ${PASSWORD_MIN_LENGTH} ${PASSWORD_MAX_LENGTH} -o webs/password_dict.txt 2>>"$LOGFILE" >/dev/null + + end_func "Results are saved in $domain/webs/password_dict.txt" ${FUNCNAME[0]} + else + if [ "$PASSWORD_DICT" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + +password_dict $DOMAIN diff --git a/bin/rftw_web_probecommon b/bin/rftw_web_probecommon new file mode 100755 index 00000000..934f86ab --- /dev/null +++ b/bin/rftw_web_probecommon @@ -0,0 +1,70 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +help_menu() { + echo "Usage: $0 [DOMAIN] [OPTIONS]" + echo "Web probing tool for specified domain." + echo + echo "Options:" + echo " -h, --help Display this help menu and exit" + echo " -f, --force Force the execution even if it was already processed" +} + +validate_inputs() { + if [[ -z "$domain" ]]; then + echo -e "${yellow} No domain provided! ${reset}" + exit 1 + fi + + if [ "$WEBPROBESIMPLE" != true ] && [ "$FORCE_EXECUTION" != true ]; then + echo -e "${yellow} webprobe_simple skipped in this mode or defined in reconftw.cfg ${reset}" + exit 0 + fi +} + +run_webprobe_simple() { + start_subfunc "webprobe_simple" "Running : Http probing $domain" + if [ ! "$AXIOM" = true ]; then + cat subdomains/subdomains.txt | httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt 2>>"$LOGFILE" >/dev/null + else + axiom-scan subdomains/subdomains.txt -m httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + + cat .tmp/web_full_info.txt .tmp/web_full_info_probe.txt webs/web_full_info.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" > webs/web_full_info.txt + [ -s "webs/web_full_info.txt" ] && cat webs/web_full_info.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew -q .tmp/probed_tmp.txt + [ -s "webs/web_full_info.txt" ] && cat webs/web_full_info.txt | jq -r 'try . |"\(.url) [\(.status_code)] [\(.title)] [\(.webserver)] \(.tech)"' | grep "$domain" | anew -q webs/web_full_info_plain.txt + [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/probed_tmp.txt + NUMOFLINES=$(cat .tmp/probed_tmp.txt 2>>"$LOGFILE" | anew webs/webs.txt | sed '/^$/d' | wc -l) + cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + end_subfunc "${NUMOFLINES} new websites resolved" "webprobe_simple" + + if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs.txt| wc -l) -le $DEEP_LIMIT2 ]]; then + notification "Sending websites to proxy" info + ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null + fi +} + +# Main +FORCE_EXECUTION=false +domain="$1" + +shift +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE_EXECUTION=true; shift ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +validate_inputs +run_webprobe_simple \ No newline at end of file diff --git a/bin/rftw_web_probeuncommon b/bin/rftw_web_probeuncommon new file mode 100755 index 00000000..3e1e936d --- /dev/null +++ b/bin/rftw_web_probeuncommon @@ -0,0 +1,81 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +help_menu() { + echo "Usage: $0 [DOMAIN] [OPTIONS]" + echo "Web probing tool for non-standard ports for the specified domain." + echo + echo "Options:" + echo " -h, --help Display this help menu and exit" + echo " -f, --force Force the execution even if it was already processed" +} + +validate_inputs() { + if [[ -z "$domain" ]]; then + echo -e "${yellow} No domain provided! ${reset}" + exit 1 + fi + + if [ "$WEBPROBEFULL" != true ] && [ "$FORCE_EXECUTION" != true ]; then + echo -e "${yellow} webprobe_full skipped in this mode or defined in reconftw.cfg ${reset}" + exit 0 + fi +} + +run_webprobe_full() { + start_func "webprobe_full" "Http probing non-standard ports" + if [ -s "subdomains/subdomains.txt" ]; then + if [ ! "$AXIOM" = true ]; then + cat subdomains/subdomains.txt | httpx -follow-host-redirects -random-agent -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" >/dev/null + else + axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + fi + + if [ -s ".tmp/web_full_info_uncommon.txt" ]; then + cat .tmp/web_full_info_uncommon.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew -q .tmp/probed_uncommon_ports_tmp.txt + cat .tmp/web_full_info_uncommon.txt | jq -r 'try . |"\(.url) [\(.status_code)] [\(.title)] [\(.webserver)] \(.tech)"' | anew -q webs/web_full_info_uncommon_plain.txt + + if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + cat .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" | anew -q webs/web_full_info_uncommon.txt + else + cat .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" | grep "$domain" | anew -q webs/web_full_info_uncommon.txt + fi + fi + + NUMOFLINES=$(cat .tmp/probed_uncommon_ports_tmp.txt 2>>"$LOGFILE" | anew webs/webs_uncommon_ports.txt | sed '/^$/d' | wc -l) + notification "Uncommon web ports: ${NUMOFLINES} new websites" good + [ -s "webs/webs_uncommon_ports.txt" ] && cat webs/webs_uncommon_ports.txt + cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" "webprobe_full" + + if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs_uncommon_ports.txt| wc -l) -le $DEEP_LIMIT2 ]]; then + notification "Sending websites with uncommon ports to proxy" info + ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null + fi +} + +# Main +FORCE_EXECUTION=false +domain="$1" + +shift +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE_EXECUTION=true; shift ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +validate_inputs +run_webprobe_full diff --git a/bin/rftw_web_roboxtractor b/bin/rftw_web_roboxtractor new file mode 100755 index 00000000..ec5aa1c3 --- /dev/null +++ b/bin/rftw_web_roboxtractor @@ -0,0 +1,69 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +help_menu() { + echo "Usage: $0 [OPTIONS]" + echo "Robots Wordlist Generation using roboxtractor" + echo "" + echo "Options:" + echo " -d, --domain DOMAIN Set domain (required)" + echo " -h, --help Display this help message and exit" + exit 0 +} + +# Input validation +if [ $# -eq 0 ]; then + help_menu +fi + +DOMAIN="" +while [ "$1" != "" ]; do + case $1 in + -d | --domain ) shift + DOMAIN=$1 + ;; + -h | --help ) help_menu + exit + ;; + * ) echo "Unknown option: $1" + exit 1 + esac + shift +done + +if [ -z "$DOMAIN" ]; then + echo "Error: Domain is required!" + exit 1 +fi + +wordlist_gen_roboxtractor() { + local domain=$1 + + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$ROBOTSWORDLIST" = true ]; then + start_func ${FUNCNAME[0]} "Robots wordlist generation" + + [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + if [ -s ".tmp/webs_all.txt" ]; then + cat .tmp/webs_all.txt | roboxtractor -m 1 -wb 2>/dev/null | anew -q webs/robots_wordlist.txt + fi + + end_func "Results are saved in $domain/webs/robots_wordlist.txt" ${FUNCNAME[0]} + else + if [ "$ROBOTSWORDLIST" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + +wordlist_gen_roboxtractor $DOMAIN diff --git a/bin/rftw_web_screenshot b/bin/rftw_web_screenshot new file mode 100755 index 00000000..81935dfc --- /dev/null +++ b/bin/rftw_web_screenshot @@ -0,0 +1,65 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +help_menu() { + echo "Usage: $0 [DOMAIN] [OPTIONS]" + echo "Web Screenshots tool for the specified domain." + echo + echo "Options:" + echo " -h, --help Display this help menu and exit" + echo " -f, --force Force the execution even if it was already processed" +} + +validate_inputs() { + if [[ -z "$domain" ]]; then + echo -e "${yellow} No domain provided! ${reset}" + exit 1 + fi + + if [ "$WEBSCREENSHOT" != true ] && [ "$FORCE_EXECUTION" != true ]; then + echo -e "${yellow} screenshot skipped in this mode or defined in reconftw.cfg ${reset}" + exit 0 + fi +} + +run_screenshot() { + start_func "screenshot" "Web Screenshots" + + [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + + num_lines=$(wc -l < .tmp/webs_all.txt) + dynamic_gowitness_timeout=$(expr $num_lines \* $GOWITNESS_TIMEOUT_PER_SITE) + + if [ ! "$AXIOM" = true ]; then + [ -s ".tmp/webs_all.txt" ] && timeout -k 1m ${dynamic_gowitness_timeout}s gowitness file -f .tmp/webs_all.txt -t $GOWITNESS_THREADS $GOWITNESS_FLAGS 2>>"$LOGFILE" + else + timeout -k 1m ${dynamic_gowitness_timeout}s axiom-scan .tmp/webs_all.txt -m gowitness -t $GOWITNESS_THREADS $GOWITNESS_FLAGS -o screenshots $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + + end_func "Results are saved in $domain/screenshots folder" "screenshot" +} + +# Main +FORCE_EXECUTION=false +domain="$1" + +shift +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE_EXECUTION=true; shift ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +validate_inputs +run_screenshot diff --git a/bin/rftw_web_urlchecks b/bin/rftw_web_urlchecks new file mode 100755 index 00000000..a83bb38f --- /dev/null +++ b/bin/rftw_web_urlchecks @@ -0,0 +1,133 @@ +#!/bin/bash + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu function +help_menu() { + echo "Usage: urlchecks.sh [OPTIONS]" + echo "" + echo "Options:" + echo " -d, --domain Specify the domain to scan." + echo " -h, --help Display this help menu." + echo "" + echo "Example:" + echo " urlchecks.sh -d example.com" +} + +# Input validation function +validate_input() { + if [ -z "$domain" ]; then + echo "Error: Domain not specified." + help_menu + exit 1 + fi +} + +# URL checks function +urlchecks() { + if { [ ! -f "$called_fn_dir/.urlchecks" ] || [ "$DIFF" = true ]; } && [ "$URL_CHECK" = true ]; then + echo "[+] Starting URL Extraction" + + mkdir -p js + [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt + if [ -s ".tmp/webs_all.txt" ]; then + if [ ! "$AXIOM" = true ]; then + if [ "$URL_CHECK_PASSIVE" = true ]; then + if [ "$DEEP" = true ]; then + cat .tmp/webs_all.txt | unfurl -u domains > .tmp/waymore_input.txt + python3 ${tools}/waymore/waymore.py -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null + else + cat .tmp/webs_all.txt | gau --threads $GAU_THREADS | anew -q .tmp/url_extract_tmp.txt + fi + if [ -s "${GITHUB_TOKENS}" ]; then + github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt + fi + fi + diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt 2>>"$LOGFILE") <(sort -u .tmp/webs_all.txt 2>>"$LOGFILE") | wc -l) + if [ $diff_webs != "0" ] || [ ! -s ".tmp/katana.txt" ]; then + if [ "$URL_CHECK_ACTIVE" = true ]; then + if [ "$DEEP" = true ]; then + katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null + else + katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null + fi + fi + fi + else + if [ "$URL_CHECK_PASSIVE" = true ]; then + if [ "$DEEP" = true ]; then + cat .tmp/webs_all.txt | unfurl -u domains > .tmp/waymore_input.txt + axiom-scan .tmp/waymore_input.txt -m waymore -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + else + axiom-scan .tmp/webs_all.txt -m gau -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + if [ -s "${GITHUB_TOKENS}" ]; then + github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt + fi + fi + diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u .tmp/webs_all.txt) | wc -l) + if [ $diff_webs != "0" ] || [ ! -s ".tmp/katana.txt" ]; then + if [ "$URL_CHECK_ACTIVE" = true ]; then + if [ "$DEEP" = true ]; then + axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 3 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + else + axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 2 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + fi + fi + fi + [ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt + [ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | anew -q .tmp/url_extract_tmp.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -aEi "\.(js)" | anew -q .tmp/url_extract_js.txt + if [ "$DEEP" = true ]; then + [ -s ".tmp/url_extract_js.txt" ] && interlace -tL .tmp/url_extract_js.txt -threads 10 -c "python3 $tools/JSA/jsa.py -f target | anew -q .tmp/url_extract_tmp.txt" &>/dev/null + fi + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt + [ -s ".tmp/url_extract_tmp2.txt" ] && cat .tmp/url_extract_tmp2.txt | python3 $tools/urless/urless/urless.py | anew -q .tmp/url_extract_uddup.txt 2>>"$LOGFILE" >/dev/null + NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | sed '/^$/d' | wc -l) + notification "${NUMOFLINES} new urls with params" info + end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} + if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + notification "Sending urls to proxy" info + ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null + fi + fi + else + if [ "$URL_CHECK" = false ]; then + echo "[!] URL checks skipped in this mode or defined in reconftw.cfg" + else + echo "[!] URL checks are already processed, to force executing delete $called_fn_dir/.urlchecks" + fi + fi +} + +# Parse command-line arguments +while [ "$#" -gt 0 ]; do + case "$1" in + -d|--domain) + domain="$2" + shift 2 + ;; + -h|--help) + help_menu + exit 0 + ;; + *) + echo "Unknown parameter: $1" + help_menu + exit 1 + ;; + esac +done + +# Validate input and start main function +validate_input +urlchecks diff --git a/bin/rftw_web_urlext b/bin/rftw_web_urlext new file mode 100755 index 00000000..620dab22 --- /dev/null +++ b/bin/rftw_web_urlext @@ -0,0 +1,97 @@ +#!/bin/bash + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu function +help_menu() { + echo "Usage: url_ext.sh [OPTIONS]" + echo "" + echo "Options:" + echo " -i, --input-file Specify the input URL file for extraction." + echo " -o, --output-file Specify the output file for URLs by extension." + echo " -h, --help Display this help menu." + echo "" + echo "Example:" + echo " url_ext.sh -i .tmp/url_extract_tmp.txt -o webs/urls_by_ext.txt" +} + +# Input validation function +validate_input() { + if [ -z "$input_file" ]; then + echo "Error: Input file not specified." + help_menu + exit 1 + elif [ ! -f "$input_file" ]; then + echo "Error: Specified input file does not exist." + exit 1 + fi + + if [ -z "$output_file" ]; then + echo "Error: Output file not specified." + help_menu + exit 1 + fi +} + +# Extract URLs by extension function +url_ext() { + if { [ ! -f "$called_fn_dir/.url_ext" ] || [ "$DIFF" = true ]; } && [ "$URL_EXT" = true ]; then + if [ -s "$input_file" ]; then + echo "[+] Extracting URLs by extension..." + + ext=("7z" "achee" "action" "adr" "apk" "arj" "ascx" "asmx" "asp" "aspx" "axd" "backup" "bak" "bat" "bin" "bkf" "bkp" "bok" "cab" "cer" "cfg" "cfm" "cfml" "cgi" "cnf" "conf" "config" "cpl" "crt" "csr" "csv" "dat" "db" "dbf" "deb" "dmg" "dmp" "doc" "docx" "drv" "email" "eml" "emlx" "env" "exe" "gadget" "gz" "html" "ica" "inf" "ini" "iso" "jar" "java" "jhtml" "json" "jsp" "key" "log" "lst" "mai" "mbox" "mbx" "md" "mdb" "msg" "msi" "nsf" "ods" "oft" "old" "ora" "ost" "pac" "passwd" "pcf" "pdf" "pem" "pgp" "php" "php3" "php4" "php5" "phtm" "phtml" "pkg" "pl" "plist" "pst" "pwd" "py" "rar" "rb" "rdp" "reg" "rpm" "rtf" "sav" "sh" "shtm" "shtml" "skr" "sql" "swf" "sys" "tar" "tar.gz" "tmp" "toast" "tpl" "txt" "url" "vcd" "vcf" "wml" "wpd" "wsdl" "wsf" "xls" "xlsm" "xlsx" "xml" "xsd" "yaml" "yml" "z" "zip") + + # Truncate or create the output file + echo "" > "$output_file" + + for t in "${ext[@]}"; do + NUMOFLINES=$(grep -aEi "\.(${t})($|\/|\?)" "$input_file" | sort -u | sed '/^$/d' | wc -l) + if [[ $NUMOFLINES -gt 0 ]]; then + echo -e "\n############################\n + ${t} + \n############################\n" >> "$output_file" + grep -aEi "\.(${t})($|\/|\?)" "$input_file" >> "$output_file" + fi + done + + echo "[+] Results are saved in $output_file" + fi + else + if [ "$URL_EXT" = false ]; then + echo "[!] URL_Ext skipped in this mode or defined in reconftw.cfg" + else + echo "[!] URL_Ext is already processed, to force executing delete $called_fn_dir/.url_ext" + fi + fi +} + +# Parse command-line arguments +while [ "$#" -gt 0 ]; do + case "$1" in + -i|--input-file) + input_file="$2" + shift 2 + ;; + -o|--output-file) + output_file="$2" + shift 2 + ;; + -h|--help) + help_menu + exit 0 + ;; + *) + echo "Unknown parameter: $1" + help_menu + exit 1 + ;; + esac +done + +# Validate input and start the main function +validate_input +url_ext diff --git a/bin/rftw_web_urlgf b/bin/rftw_web_urlgf new file mode 100755 index 00000000..d2f002ed --- /dev/null +++ b/bin/rftw_web_urlgf @@ -0,0 +1,84 @@ +#!/bin/bash + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu function +help_menu() { + echo "Usage: url_gf.sh [OPTIONS]" + echo "" + echo "Options:" + echo " -u, --url-file Specify the URL file to scan." + echo " -h, --help Display this help menu." + echo "" + echo "Example:" + echo " url_gf.sh -u webs/url_extract.txt" +} + +# Input validation function +validate_input() { + if [ -z "$url_file" ]; then + echo "Error: URL file not specified." + help_menu + exit 1 + elif [ ! -f "$url_file" ]; then + echo "Error: Specified URL file does not exist." + exit 1 + fi +} + +# Vulnerable pattern search function +url_gf() { + if { [ ! -f "$called_fn_dir/.url_gf" ] || [ "$DIFF" = true ]; } && [ "$URL_GF" = true ]; then + echo "[+] Starting Vulnerable Pattern Search" + + mkdir -p gf + if [ -s "$url_file" ]; then + gf xss "$url_file" | anew -q gf/xss.txt + gf ssti "$url_file" | anew -q gf/ssti.txt + gf ssrf "$url_file" | anew -q gf/ssrf.txt + gf sqli "$url_file" | anew -q gf/sqli.txt + gf redirect "$url_file" | anew -q gf/redirect.txt + [ -s "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt + gf rce "$url_file" | anew -q gf/rce.txt + gf potential "$url_file" | cut -d ':' -f3-5 | anew -q gf/potential.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q gf/endpoints.txt + gf lfi "$url_file" | anew -q gf/lfi.txt + fi + echo "[+] Results are saved in $domain/gf folder" + else + if [ "$URL_GF" = false ]; then + echo "[!] URL_Gf skipped in this mode or defined in reconftw.cfg" + else + echo "[!] URL_Gf is already processed, to force executing delete $called_fn_dir/.url_gf" + fi + fi +} + +# Parse command-line arguments +while [ "$#" -gt 0 ]; do + case "$1" in + -u|--url-file) + url_file="$2" + shift 2 + ;; + -h|--help) + help_menu + exit 0 + ;; + *) + echo "Unknown parameter: $1" + help_menu + exit 1 + ;; + esac +done + +# Validate input and start main function +validate_input +url_gf diff --git a/bin/rftw_web_wafcheck b/bin/rftw_web_wafcheck new file mode 100755 index 00000000..3ae55f00 --- /dev/null +++ b/bin/rftw_web_wafcheck @@ -0,0 +1,82 @@ +#!/bin/bash + +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help function +help_menu() { + echo "Usage: $0 [OPTIONS]" + echo "" + echo "Website's WAF Detection Tool" + echo "" + echo "Options:" + echo " -h, --help Show this help menu" + echo " -f, --force Force the execution even if already processed" +} + +# Start function +start_func() { + echo "Starting $1: $2..." +} + +# End function +end_func() { + echo "$1" + echo "End of $2..." +} + +# Notification function +notification() { + echo "$1" +} + +# Default values +FORCE=false + +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + -h|--help) help_menu; exit 0 ;; + -f|--force) FORCE=true ;; + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done + +if { [ ! -f "$called_fn_dir/.waf_checks" ] || [ "$FORCE" = true ]; } && [ "$WAF_DETECTION" = true ]; then + start_func "waf_checks" "Website's WAF detection" + + if [ ! -s ".tmp/webs_all.txt" ]; then + anew -q .tmp/webs_all.txt < <(cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null) + fi + + if [ -s ".tmp/webs_all.txt" ]; then + if [ ! "$AXIOM" = true ]; then + wafw00f -i .tmp/webs_all.txt -o .tmp/wafs.txt 2>>"$LOGFILE" >/dev/null + else + axiom-scan .tmp/webs_all.txt -m wafw00f -o .tmp/wafs.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + fi + + if [ -s ".tmp/wafs.txt" ]; then + sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' .tmp/wafs.txt | tr -s "\t" ";" > webs/webs_wafs.txt + NUMOFLINES=$(sed '/^$/d' webs/webs_wafs.txt 2>>"$LOGFILE" | wc -l) + notification "${NUMOFLINES} websites protected by waf" info + end_func "Results are saved in $domain/webs/webs_wafs.txt" "waf_checks" + else + end_func "No results found" "waf_checks" + fi + else + end_func "No websites to scan" "waf_checks" + fi +else + if [ "$WAF_DETECTION" = false ]; then + printf "\n${yellow} waf_checks skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} waf_checks is already processed. To force executing waf_checks, delete\n $called_fn_dir/.waf_checks ${reset}\n\n" + fi +fi diff --git a/bin/rftw_web_wordlists b/bin/rftw_web_wordlists new file mode 100755 index 00000000..234a5054 --- /dev/null +++ b/bin/rftw_web_wordlists @@ -0,0 +1,73 @@ +#!/bin/bash + +# Load environment variables from reconftw.cfg +# Load environment variables +if [ -f "reconftw.cfg" ]; then + source reconftw.cfg +else + echo "Error: reconftw.cfg not found!" + exit 1 +fi + +# Help menu +help_menu() { + echo "Usage: $0 [OPTIONS]" + echo "Wordlist Generation" + echo "" + echo "Options:" + echo " -d, --domain DOMAIN Set domain (required)" + echo " -h, --help Display this help message and exit" + exit 0 +} + +# Input validation +if [ $# -eq 0 ]; then + help_menu +fi + +DOMAIN="" +while [ "$1" != "" ]; do + case $1 in + -d | --domain ) shift + DOMAIN=$1 + ;; + -h | --help ) help_menu + exit + ;; + * ) echo "Unknown option: $1" + exit 1 + esac + shift +done + +if [ -z "$DOMAIN" ]; then + echo "Error: Domain is required!" + exit 1 +fi + +wordlist_gen() { + local domain=$1 + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WORDLIST" = true ]; then + start_func ${FUNCNAME[0]} "Wordlist generation" + if [ -s ".tmp/url_extract_tmp.txt" ]; then + cat .tmp/url_extract_tmp.txt | unfurl -u keys 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_params.txt + cat .tmp/url_extract_tmp.txt | unfurl -u values 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_values.txt + cat .tmp/url_extract_tmp.txt | tr "[:punct:]" "\n" | anew -q webs/dict_words.txt + fi + [ -s ".tmp/js_endpoints.txt" ] && cat .tmp/js_endpoints.txt | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q webs/all_paths.txt + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q webs/all_paths.txt + end_func "Results are saved in $domain/webs/dict_[words|paths].txt" ${FUNCNAME[0]} + if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/all_paths.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + notification "Sending urls to proxy" info + ffuf -mc all -w webs/all_paths.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null + fi + else + if [ "$WORDLIST" = false ]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi +} + +wordlist_gen $DOMAIN diff --git a/images/banner.png b/images/banner.png old mode 100644 new mode 100755 diff --git a/images/docker.png b/images/docker.png old mode 100644 new mode 100755 diff --git a/images/reconFTW.gif b/images/reconFTW.gif old mode 100644 new mode 100755 diff --git a/install.sh b/install.sh index 9a7090f1..6723704a 100755 --- a/install.sh +++ b/install.sh @@ -37,7 +37,7 @@ gotools["amass"]="go install -v github.com/owasp-amass/amass/v3/...@master" gotools["ffuf"]="go install -v github.com/ffuf/ffuf/v2@latest" gotools["github-subdomains"]="go install -v github.com/gwen001/github-subdomains@latest" gotools["gitlab-subdomains"]="go install -v github.com/gwen001/gitlab-subdomains@latest" -gotools["nuclei"]="go install -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei@latest" +gotools["nuclei"]="go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest" gotools["anew"]="go install -v github.com/tomnomnom/anew@latest" gotools["notify"]="go install -v github.com/projectdiscovery/notify/cmd/notify@latest" gotools["unfurl"]="go install -v github.com/tomnomnom/unfurl@v0.3.0" diff --git a/reconftw.cfg b/reconftw.cfg index 40be4fdc..fee6c739 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -6,7 +6,7 @@ tools=~/Tools # Path installed tools SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" # Get current script's path profile_shell=".$(basename $(echo $SHELL))rc" # Get current shell profile -reconftw_version=$(git rev-parse --abbrev-ref HEAD)-$(git describe --tags) # Fetch current reconftw version +#reconftw_version=$(git rev-parse --abbrev-ref HEAD)-$(git describe --tags) # Fetch current reconftw version generate_resolvers=false # Generate custom resolvers with dnsvalidator update_resolvers=true # Fetch and rewrite resolvers from trickest/resolvers before DNS resolution resolvers_url="https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt" diff --git a/reconftw.sh b/reconftw.sh index 779be012..d62b2c98 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1,5 +1,18 @@ #!/usr/bin/env bash +# Welcome to reconFTW main script +# ██▀███ ▓█████ ▄████▄ ▒█████ ███▄ █ █████▒▄▄▄█████▓ █ █░ +# ▓██ ▒ ██▒▓█ ▀ ▒██▀ ▀█ ▒██▒ ██▒ ██ ▀█ █ ▓██ ▒ ▓ ██▒ ▓▒▓█░ █ ░█░ +# ▓██ ░▄█ ▒▒███ ▒▓█ ▄ ▒██░ ██▒▓██ ▀█ ██▒▒████ ░ ▒ ▓██░ ▒░▒█░ █ ░█ +# ▒██▀▀█▄ ▒▓█ ▄ ▒▓▓▄ ▄██▒▒██ ██░▓██▒ ▐▌██▒░▓█▒ ░ ░ ▓██▓ ░ ░█░ █ ░█ +# ░██▓ ▒██▒░▒████▒▒ ▓███▀ ░░ ████▓▒░▒██░ ▓██░░▒█░ ▒██▒ ░ ░░██▒██▓ +# ░ ▒▓ ░▒▓░░░ ▒░ ░░ ░▒ ▒ ░░ ▒░▒░▒░ ░ ▒░ ▒ ▒ ▒ ░ ▒ ░░ ░ ▓░▒ ▒ +# ░▒ ░ ▒░ ░ ░ ░ ░ ▒ ░ ▒ ▒░ ░ ░░ ░ ▒░ ░ ░ ▒ ░ ░ +# ░░ ░ ░ ░ ░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ░ ░ +# ░ ░ ░░ ░ ░ ░ ░ ░ +# +# by @six2dez + function banner_graber(){ source "${SCRIPTPATH}"/banners.txt randx=$(shuf -i 1-23 -n 1) @@ -17,119 +30,9 @@ function banner(){ ################################################### TOOLS ##################################################### ############################################################################################################### -function check_version(){ - timeout 10 git fetch - exit_status=$? - if [ $exit_status -eq 0 ]; then - BRANCH=$(git rev-parse --abbrev-ref HEAD) - HEADHASH=$(git rev-parse HEAD) - UPSTREAMHASH=$(git rev-parse "${BRANCH}"@\{upstream\}) - if [ "$HEADHASH" != "$UPSTREAMHASH" ]; then - printf "\n${yellow} There is a new version, run ./install.sh to get latest version${reset}\n\n" - fi - else - printf "\n${bred} Unable to check updates ${reset}\n\n" - fi -} - -function tools_installed(){ - - printf "\n\n${bgreen}#######################################################################${reset}\n" - printf "${bblue} Checking installed tools ${reset}\n\n" - - allinstalled=true - - [ -n "$GOPATH" ] || { printf "${bred} [*] GOPATH var [NO]${reset}\n"; allinstalled=false;} - [ -n "$GOROOT" ] || { printf "${bred} [*] GOROOT var [NO]${reset}\n"; allinstalled=false;} - [ -n "$PATH" ] || { printf "${bred} [*] PATH var [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/dorks_hunter/dorks_hunter.py" ] || { printf "${bred} [*] dorks_hunter [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/brutespray/brutespray.py" ] || { printf "${bred} [*] brutespray [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/fav-up/favUp.py" ] || { printf "${bred} [*] fav-up [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/Corsy/corsy.py" ] || { printf "${bred} [*] Corsy [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/testssl.sh/testssl.sh" ] || { printf "${bred} [*] testssl [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/CMSeeK/cmseek.py" ] || { printf "${bred} [*] CMSeeK [NO]${reset}\n"; allinstalled=false;} - [ -f "${fuzz_wordlist}" ] || { printf "${bred} [*] OneListForAll [NO]${reset}\n"; allinstalled=false;} - [ -f "${lfi_wordlist}" ] || { printf "${bred} [*] lfi_wordlist [NO]${reset}\n"; allinstalled=false;} - [ -f "${ssti_wordlist}" ] || { printf "${bred} [*] ssti_wordlist [NO]${reset}\n"; allinstalled=false;} - [ -f "${subs_wordlist}" ] || { printf "${bred} [*] subs_wordlist [NO]${reset}\n"; allinstalled=false;} - [ -f "${subs_wordlist_big}" ] || { printf "${bred} [*] subs_wordlist_big [NO]${reset}\n"; allinstalled=false;} - [ -f "${resolvers}" ] || { printf "${bred} [*] resolvers [NO]${reset}\n"; allinstalled=false;} - [ -f "${resolvers_trusted}" ] || { printf "${bred} [*] resolvers_trusted [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/xnLinkFinder/xnLinkFinder.py" ] || { printf "${bred} [*] xnLinkFinder [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/waymore/waymore.py" ] || { printf "${bred} [*] waymore [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/commix/commix.py" ] || { printf "${bred} [*] commix [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/getjswords.py" ] || { printf "${bred} [*] getjswords [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/JSA/jsa.py" ] || { printf "${bred} [*] JSA [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/cloud_enum/cloud_enum.py" ] || { printf "${bred} [*] cloud_enum [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/ultimate-nmap-parser/ultimate-nmap-parser.sh" ] || { printf "${bred} [*] nmap-parse-output [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/pydictor/pydictor.py" ] || { printf "${bred} [*] pydictor [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/urless/urless/urless.py" ] || { printf "${bred} [*] urless [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/smuggler/smuggler.py" ] || { printf "${bred} [*] smuggler [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/regulator/main.py" ] || { printf "${bred} [*] regulator [NO]${reset}\n"; allinstalled=false;} - which github-endpoints &>/dev/null || { printf "${bred} [*] github-endpoints [NO]${reset}\n"; allinstalled=false;} - which github-subdomains &>/dev/null || { printf "${bred} [*] github-subdomains [NO]${reset}\n"; allinstalled=false;} - which gitlab-subdomains &>/dev/null || { printf "${bred} [*] gitlab-subdomains [NO]${reset}\n"; allinstalled=false;} - which katana &>/dev/null || { printf "${bred} [*] katana [NO]${reset}\n"; allinstalled=false;} - which wafw00f &>/dev/null || { printf "${bred} [*] wafw00f [NO]${reset}\n"; allinstalled=false;} - which dnsvalidator &>/dev/null || { printf "${bred} [*] dnsvalidator [NO]${reset}\n"; allinstalled=false;} - which gowitness &>/dev/null || { printf "${bred} [*] gowitness [NO]${reset}\n"; allinstalled=false;} - which amass &>/dev/null || { printf "${bred} [*] Amass [NO]${reset}\n"; allinstalled=false;} - which dnsx &>/dev/null || { printf "${bred} [*] dnsx [NO]${reset}\n"; allinstalled=false;} - which gotator &>/dev/null || { printf "${bred} [*] gotator [NO]${reset}\n"; allinstalled=false;} - which nuclei &>/dev/null || { printf "${bred} [*] Nuclei [NO]${reset}\n"; allinstalled=false;} - [ -d ${NUCLEI_TEMPLATES_PATH} ] || { printf "${bred} [*] Nuclei templates [NO]${reset}\n"; allinstalled=false;} - [ -d ${tools}/fuzzing-templates ] || { printf "${bred} [*] Fuzzing templates [NO]${reset}\n"; allinstalled=false;} - which gf &>/dev/null || { printf "${bred} [*] Gf [NO]${reset}\n"; allinstalled=false;} - which Gxss &>/dev/null || { printf "${bred} [*] Gxss [NO]${reset}\n"; allinstalled=false;} - which subjs &>/dev/null || { printf "${bred} [*] subjs [NO]${reset}\n"; allinstalled=false;} - which ffuf &>/dev/null || { printf "${bred} [*] ffuf [NO]${reset}\n"; allinstalled=false;} - which massdns &>/dev/null || { printf "${bred} [*] Massdns [NO]${reset}\n"; allinstalled=false;} - which qsreplace &>/dev/null || { printf "${bred} [*] qsreplace [NO]${reset}\n"; allinstalled=false;} - which interlace &>/dev/null || { printf "${bred} [*] interlace [NO]${reset}\n"; allinstalled=false;} - which anew &>/dev/null || { printf "${bred} [*] Anew [NO]${reset}\n"; allinstalled=false;} - which unfurl &>/dev/null || { printf "${bred} [*] unfurl [NO]${reset}\n"; allinstalled=false;} - which crlfuzz &>/dev/null || { printf "${bred} [*] crlfuzz [NO]${reset}\n"; allinstalled=false;} - which httpx &>/dev/null || { printf "${bred} [*] Httpx [NO]${reset}\n${reset}"; allinstalled=false;} - which jq &>/dev/null || { printf "${bred} [*] jq [NO]${reset}\n${reset}"; allinstalled=false;} - which notify &>/dev/null || { printf "${bred} [*] notify [NO]${reset}\n${reset}"; allinstalled=false;} - which dalfox &>/dev/null || { printf "${bred} [*] dalfox [NO]${reset}\n${reset}"; allinstalled=false;} - which puredns &>/dev/null || { printf "${bred} [*] puredns [NO]${reset}\n${reset}"; allinstalled=false;} - which emailfinder &>/dev/null || { printf "${bred} [*] emailfinder [NO]${reset}\n"; allinstalled=false;} - which analyticsrelationships &>/dev/null || { printf "${bred} [*] analyticsrelationships [NO]${reset}\n"; allinstalled=false;} - which mapcidr &>/dev/null || { printf "${bred} [*] mapcidr [NO]${reset}\n"; allinstalled=false;} - which ppfuzz &>/dev/null || { printf "${bred} [*] ppfuzz [NO]${reset}\n"; allinstalled=false;} - which cdncheck &>/dev/null || { printf "${bred} [*] cdncheck [NO]${reset}\n"; allinstalled=false;} - which interactsh-client &>/dev/null || { printf "${bred} [*] interactsh-client [NO]${reset}\n"; allinstalled=false;} - which tlsx &>/dev/null || { printf "${bred} [*] tlsx [NO]${reset}\n"; allinstalled=false;} - which smap &>/dev/null || { printf "${bred} [*] smap [NO]${reset}\n"; allinstalled=false;} - which gitdorks_go &>/dev/null || { printf "${bred} [*] gitdorks_go [NO]${reset}\n"; allinstalled=false;} - which ripgen &>/dev/null || { printf "${bred} [*] ripgen [NO]${reset}\n${reset}"; allinstalled=false;} - which dsieve &>/dev/null || { printf "${bred} [*] dsieve [NO]${reset}\n${reset}"; allinstalled=false;} - which inscope &>/dev/null || { printf "${bred} [*] inscope [NO]${reset}\n${reset}"; allinstalled=false;} - which enumerepo &>/dev/null || { printf "${bred} [*] enumerepo [NO]${reset}\n${reset}"; allinstalled=false;} - which Web-Cache-Vulnerability-Scanner &>/dev/null || { printf "${bred} [*] Web-Cache-Vulnerability-Scanner [NO]${reset}\n"; allinstalled=false;} - which subfinder &>/dev/null || { printf "${bred} [*] subfinder [NO]${reset}\n${reset}"; allinstalled=false;} - which byp4xx &>/dev/null || { printf "${bred} [*] byp4xx [NO]${reset}\n${reset}"; allinstalled=false;} - which ghauri &>/dev/null || { printf "${bred} [*] ghauri [NO]${reset}\n${reset}"; allinstalled=false;} - which hakip2host &>/dev/null || { printf "${bred} [*] hakip2host [NO]${reset}\n${reset}"; allinstalled=false;} - which gau &>/dev/null || { printf "${bred} [*] gau [NO]${reset}\n${reset}"; allinstalled=false;} - which crt &>/dev/null || { printf "${bred} [*] crt [NO]${reset}\n${reset}"; allinstalled=false;} - which gitleaks &>/dev/null || { printf "${bred} [*] gitleaks [NO]${reset}\n${reset}"; allinstalled=false;} - which trufflehog &>/dev/null || { printf "${bred} [*] trufflehog [NO]${reset}\n${reset}"; allinstalled=false;} - which s3scanner &>/dev/null || { printf "${bred} [*] s3scanner [NO]${reset}\n${reset}"; allinstalled=false;} - - if [ "${allinstalled}" = true ]; then - printf "${bgreen} Good! All installed! ${reset}\n\n" - else - printf "\n${yellow} Try running the installer script again ./install.sh" - printf "\n${yellow} If it fails for any reason try to install manually the tools missed" - printf "\n${yellow} Finally remember to set the ${bred}\$tools${yellow} variable at the start of this script" - printf "\n${yellow} If nothing works and the world is gonna end you can always ping me :D ${reset}\n\n" - fi +rftw_util_version - printf "${bblue} Tools check finished\n" - printf "${bgreen}#######################################################################\n${reset}" -} +rftw_util_tools -t $tools ############################################################################################################### ################################################### OSINT ##################################################### @@ -137,7 +40,7 @@ function tools_installed(){ function google_dorks(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$GOOGLE_DORKS" = true ] && [ "$OSINT" = true ]; then - python3 $tools/dorks_hunter/dorks_hunter.py -d "$domain" -o osint/dorks.txt || { echo "dorks_hunter command failed"; exit 1; } + rftw_osint_googledorks -d "$domain" -o osint/dorks.txt || { echo "dorks_hunter command failed"; exit 1; } 2>>"$LOGFILE" >/dev/null 2>&1 end_func "Results are saved in $domain/osint/dorks.txt" "${FUNCNAME[0]}" else if [ "$GOOGLE_DORKS" = false ] || [ "$OSINT" = false ]; then @@ -151,15 +54,7 @@ function google_dorks(){ function github_dorks(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$GITHUB_DORKS" = true ] && [ "$OSINT" = true ]; then start_func "${FUNCNAME[0]}" "Github Dorks in process" - if [ -s "${GITHUB_TOKENS}" ]; then - if [ "$DEEP" = true ]; then - gitdorks_go -gd $tools/gitdorks_go/Dorks/medium_dorks.txt -nws 20 -target "$domain" -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { echo "gitdorks_go/anew command failed"; exit 1; } - else - gitdorks_go -gd $tools/gitdorks_go/Dorks/smalldorks.txt -nws 20 -target $domain -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { echo "gitdorks_go/anew command failed"; exit 1; } - fi - else - printf "\n${bred} Required file ${GITHUB_TOKENS} not exists or empty${reset}\n" - fi + rftw_osint_gh_dorks -d "$domain" -t "${GITHUB_TOKENS}" -D "$DEEP" | anew -q osint/gitdorks.txt || { echo "gitdorks_go command failed"; exit 1; } 2>>"$LOGFILE" >/dev/null 2>&1 end_func "Results are saved in $domain/osint/gitdorks.txt" "${FUNCNAME[0]}" else if [ "$GITHUB_DORKS" = false ] || [ "$OSINT" = false ]; then @@ -1090,7 +985,7 @@ function cdnprovider(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CDN_IP" = true ]; then start_func ${FUNCNAME[0]} "CDN provider check" [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try . | .a[]' | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u > .tmp/ips_cdn.txt - [ -s ".tmp/ips_cdn.txt" ] && cat .tmp/ips_cdn.txt | cdncheck -silent -resp -nc | anew -q $dir/hosts/cdn_providers.txt + [ -s ".tmp/ips_cdn.txt" ] && cat .tmp/ips_cdn.txt | rftw_ip_cdnprovider | anew -q $dir/hosts/cdn_providers.txt end_func "Results are saved in hosts/cdn_providers.txt" ${FUNCNAME[0]} else if [ "$CDN_IP" = false ]; then From 70a7d8470bbd52b4eeac66dcf4a50e6d7305718e Mon Sep 17 00:00:00 2001 From: six2dez Date: Wed, 8 Nov 2023 12:02:28 +0100 Subject: [PATCH 02/17] pip requirements fix --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e36da7fc..758b2755 100644 --- a/requirements.txt +++ b/requirements.txt @@ -34,4 +34,4 @@ tldextract # dorks_hunter tqdm # multiple ujson # multiple urllib3 # multiple -postleaksNeg # Tool +postleaksNg # Tool From 5ed1e0a4af3ae22184cf41e8993418ca30313823 Mon Sep 17 00:00:00 2001 From: six2dez Date: Wed, 8 Nov 2023 14:15:27 +0100 Subject: [PATCH 03/17] Revert "First commit for v3.0 dev" This reverts commit dedb585bf878990e2aa24e8927b4d34b1622d709. --- bin/rftw_ip_cdnprovider | 65 ----------------- bin/rftw_ip_favicon | 86 ---------------------- bin/rftw_ip_info | 101 -------------------------- bin/rftw_ip_portscan | 100 -------------------------- bin/rftw_osint_emails | 95 ------------------------- bin/rftw_osint_ghdorks | 67 ----------------- bin/rftw_osint_ghrepos | 89 ----------------------- bin/rftw_osint_googledorks | 58 --------------- bin/rftw_osint_metadata | 40 ----------- bin/rftw_osint_postleaks | 51 ------------- bin/rftw_osint_whois | 68 ------------------ bin/rftw_sub_active | 75 -------------------- bin/rftw_sub_analytics | 96 ------------------------- bin/rftw_sub_brute | 88 ----------------------- bin/rftw_sub_crt | 75 -------------------- bin/rftw_sub_dns | 85 ---------------------- bin/rftw_sub_full | 130 --------------------------------- bin/rftw_sub_noerror | 74 ------------------- bin/rftw_sub_passive | 79 --------------------- bin/rftw_sub_permut | 92 ------------------------ bin/rftw_sub_recbrute | 130 --------------------------------- bin/rftw_sub_recpassive | 68 ------------------ bin/rftw_sub_regex | 67 ----------------- bin/rftw_sub_s3buckets | 79 --------------------- bin/rftw_sub_scraping | 92 ------------------------ bin/rftw_sub_takeover | 66 ----------------- bin/rftw_sub_vhosts | 69 ------------------ bin/rftw_sub_zonetransfer | 64 ----------------- bin/rftw_uti_transfer | 62 ---------------- bin/rftw_util_ascii | 57 --------------- bin/rftw_util_axiomoff | 60 ---------------- bin/rftw_util_axiomon | 64 ----------------- bin/rftw_util_axiomsel | 50 ------------- bin/rftw_util_deleteoos | 72 ------------------- bin/rftw_util_gettime | 65 ----------------- bin/rftw_util_ipcidr | 61 ---------------- bin/rftw_util_notification | 74 ------------------- bin/rftw_util_output | 56 --------------- bin/rftw_util_removebig | 53 -------------- bin/rftw_util_resolver | 63 ---------------- bin/rftw_util_sendnotify | 78 -------------------- bin/rftw_util_tools | 142 ------------------------------------- bin/rftw_util_version | 56 --------------- bin/rftw_util_zipfolder | 60 ---------------- bin/rftw_vuln_4xx | 77 -------------------- bin/rftw_vuln_brokenlink | 68 ------------------ bin/rftw_vuln_comminject | 76 -------------------- bin/rftw_vuln_cors | 82 --------------------- bin/rftw_vuln_crlf | 83 ---------------------- bin/rftw_vuln_fuzzparam | 81 --------------------- bin/rftw_vuln_lfi | 83 ---------------------- bin/rftw_vuln_openredir | 89 ----------------------- bin/rftw_vuln_protpollut | 74 ------------------- bin/rftw_vuln_smuggling | 77 -------------------- bin/rftw_vuln_spray | 71 ------------------- bin/rftw_vuln_sqli | 92 ------------------------ bin/rftw_vuln_ssrf | 108 ---------------------------- bin/rftw_vuln_ssti | 83 ---------------------- bin/rftw_vuln_testssl | 69 ------------------ bin/rftw_vuln_webcache | 77 -------------------- bin/rftw_vuln_xss | 120 ------------------------------- bin/rftw_web_cms | 86 ---------------------- bin/rftw_web_fuzz | 92 ------------------------ bin/rftw_web_jschecks | 98 ------------------------- bin/rftw_web_nucleicheck | 82 --------------------- bin/rftw_web_passdict | 67 ----------------- bin/rftw_web_probecommon | 70 ------------------ bin/rftw_web_probeuncommon | 81 --------------------- bin/rftw_web_roboxtractor | 69 ------------------ bin/rftw_web_screenshot | 65 ----------------- bin/rftw_web_urlchecks | 133 ---------------------------------- bin/rftw_web_urlext | 97 ------------------------- bin/rftw_web_urlgf | 84 ---------------------- bin/rftw_web_wafcheck | 82 --------------------- bin/rftw_web_wordlists | 73 ------------------- images/banner.png | Bin images/docker.png | Bin images/reconFTW.gif | Bin install.sh | 2 +- reconftw.cfg | 2 +- reconftw.sh | 141 +++++++++++++++++++++++++++++++----- 81 files changed, 125 insertions(+), 5931 deletions(-) delete mode 100755 bin/rftw_ip_cdnprovider delete mode 100755 bin/rftw_ip_favicon delete mode 100755 bin/rftw_ip_info delete mode 100755 bin/rftw_ip_portscan delete mode 100755 bin/rftw_osint_emails delete mode 100755 bin/rftw_osint_ghdorks delete mode 100755 bin/rftw_osint_ghrepos delete mode 100755 bin/rftw_osint_googledorks delete mode 100755 bin/rftw_osint_metadata delete mode 100755 bin/rftw_osint_postleaks delete mode 100755 bin/rftw_osint_whois delete mode 100755 bin/rftw_sub_active delete mode 100755 bin/rftw_sub_analytics delete mode 100755 bin/rftw_sub_brute delete mode 100755 bin/rftw_sub_crt delete mode 100755 bin/rftw_sub_dns delete mode 100755 bin/rftw_sub_full delete mode 100755 bin/rftw_sub_noerror delete mode 100755 bin/rftw_sub_passive delete mode 100755 bin/rftw_sub_permut delete mode 100755 bin/rftw_sub_recbrute delete mode 100755 bin/rftw_sub_recpassive delete mode 100755 bin/rftw_sub_regex delete mode 100755 bin/rftw_sub_s3buckets delete mode 100755 bin/rftw_sub_scraping delete mode 100755 bin/rftw_sub_takeover delete mode 100755 bin/rftw_sub_vhosts delete mode 100755 bin/rftw_sub_zonetransfer delete mode 100755 bin/rftw_uti_transfer delete mode 100755 bin/rftw_util_ascii delete mode 100755 bin/rftw_util_axiomoff delete mode 100755 bin/rftw_util_axiomon delete mode 100755 bin/rftw_util_axiomsel delete mode 100755 bin/rftw_util_deleteoos delete mode 100755 bin/rftw_util_gettime delete mode 100755 bin/rftw_util_ipcidr delete mode 100755 bin/rftw_util_notification delete mode 100755 bin/rftw_util_output delete mode 100755 bin/rftw_util_removebig delete mode 100755 bin/rftw_util_resolver delete mode 100755 bin/rftw_util_sendnotify delete mode 100755 bin/rftw_util_tools delete mode 100755 bin/rftw_util_version delete mode 100755 bin/rftw_util_zipfolder delete mode 100755 bin/rftw_vuln_4xx delete mode 100755 bin/rftw_vuln_brokenlink delete mode 100755 bin/rftw_vuln_comminject delete mode 100755 bin/rftw_vuln_cors delete mode 100755 bin/rftw_vuln_crlf delete mode 100755 bin/rftw_vuln_fuzzparam delete mode 100755 bin/rftw_vuln_lfi delete mode 100755 bin/rftw_vuln_openredir delete mode 100755 bin/rftw_vuln_protpollut delete mode 100755 bin/rftw_vuln_smuggling delete mode 100755 bin/rftw_vuln_spray delete mode 100755 bin/rftw_vuln_sqli delete mode 100755 bin/rftw_vuln_ssrf delete mode 100755 bin/rftw_vuln_ssti delete mode 100755 bin/rftw_vuln_testssl delete mode 100755 bin/rftw_vuln_webcache delete mode 100755 bin/rftw_vuln_xss delete mode 100755 bin/rftw_web_cms delete mode 100755 bin/rftw_web_fuzz delete mode 100755 bin/rftw_web_jschecks delete mode 100755 bin/rftw_web_nucleicheck delete mode 100755 bin/rftw_web_passdict delete mode 100755 bin/rftw_web_probecommon delete mode 100755 bin/rftw_web_probeuncommon delete mode 100755 bin/rftw_web_roboxtractor delete mode 100755 bin/rftw_web_screenshot delete mode 100755 bin/rftw_web_urlchecks delete mode 100755 bin/rftw_web_urlext delete mode 100755 bin/rftw_web_urlgf delete mode 100755 bin/rftw_web_wafcheck delete mode 100755 bin/rftw_web_wordlists mode change 100755 => 100644 images/banner.png mode change 100755 => 100644 images/docker.png mode change 100755 => 100644 images/reconFTW.gif diff --git a/bin/rftw_ip_cdnprovider b/bin/rftw_ip_cdnprovider deleted file mode 100755 index 0da6e308..00000000 --- a/bin/rftw_ip_cdnprovider +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/bash -# Check manually, the logic for extracting the ips from dnsx results should be on reconftw.sh script, this script should only scan the ips - -# Default config path -CONFIG_PATH="$RECONFTW_CFG" - -# Check if the config file exists -if [ -f "$CONFIG_PATH" ]; then - source "$CONFIG_PATH" -else - echo "Error: reconftw.cfg not found at $CONFIG_PATH!" - exit 1 -fi - -# Help function -help_menu() { - cat <<- EOF -Usage: $0 [OPTIONS] - -CDN Provider Check Tool - -Options: - -h, --help Show this help menu - -i, --input Specify the input IP lists file (required) - -f, --force Force the execution even if already processed - -o, --output Specify the output file (default is print to stdout) -EOF -} - -# Default values -FORCE=false -INPUT_FILE="" -OUTPUT_FILE="" - -# Parse arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -i|--input) INPUT_FILE="$2"; shift ;; - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE=true; shift ;; - -o|--output) OUTPUT_FILE="$2"; shift ;; - *) - if [ -z "$INPUT_FILE" ]; then - INPUT_FILE=$1 - else - echo "Unknown parameter passed: $1"; exit 1; - fi - ;; - esac - shift -done - -# Validate input file -if [ ! -f "$INPUT_FILE" ] || [ ! -s "$INPUT_FILE" ]; then - echo "Invalid or empty input file!" - help_menu - exit 1 -fi - -# Handle the output of the cdncheck command based on the presence of the -o flag -if [ -z "$OUTPUT_FILE" ]; then - cdncheck -silent -resp -nc < "$INPUT_FILE" -else - cdncheck -silent -resp -nc < "$INPUT_FILE" | sort -u > "$OUTPUT_FILE" -fi diff --git a/bin/rftw_ip_favicon b/bin/rftw_ip_favicon deleted file mode 100755 index ec07f8bb..00000000 --- a/bin/rftw_ip_favicon +++ /dev/null @@ -1,86 +0,0 @@ -#!/bin/bash -# Looks good, needs testing - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -help_menu() { - echo "Usage: $0 [OPTIONS] DOMAIN" - echo "" - echo "Favicon Ip Lookup Tool" - echo "" - echo "Options:" - echo " -h, --help Show this help menu" - echo " -d, --domain DOMAIN Specify the domain to process" - echo " -f, --force Force the execution even if already processed" -} - -# Start function -start_func() { - echo "Starting $1..." -} - -# End function -end_func() { - echo "$1" - echo "End of $2..." -} - -# Default values -FORCE=false -DOMAIN="" - -# Parse arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -d|--domain) DOMAIN="$2"; shift ;; - -f|--force) FORCE=true ;; - *) - # Set the domain if it's not specified with -d - [ -z "$DOMAIN" ] && DOMAIN="$1" || { echo "Unknown parameter passed: $1"; exit 1; } - ;; - esac - shift -done - -# Input validation -if [ -z "$DOMAIN" ]; then - help_menu - exit 1 -fi - -# Validate domain format -if [[ ! "$DOMAIN" =~ ^[a-zA-Z0-9.-]+$ ]]; then - echo "Invalid domain format." - exit 1 -fi - -if { [ ! -f "$called_fn_dir/.favicon" ] || [ "$FORCE" = true ]; } && [ "$FAVICON" = true ] && ! [[ $DOMAIN =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - start_func "Favicon Ip Lookup" - - cd "$tools/fav-up" || { echo "Failed to cd to $tools/fav-up in favicon"; exit 1; } - python3 favUp.py -w "$DOMAIN" -sc -o favicontest.json 2>>"$LOGFILE" >/dev/null - - if [ -s "favicontest.json" ]; then - jq -r 'try .found_ips' favicontest.json 2>>"$LOGFILE" | grep -v "not-found" > favicontest.txt - sed -i "s/|/\n/g" favicontest.txt - cat favicontest.txt 2>>"$LOGFILE" - mv favicontest.txt $dir/hosts/favicontest.txt 2>>"$LOGFILE" - rm -f favicontest.json 2>>"$LOGFILE" - fi - - cd "$dir" || { echo "Failed to cd to $dir in favicon"; exit 1; } - end_func "Results are saved in hosts/favicontest.txt" "favicon" -else - if [ "$FAVICON" = false ] || [[ $DOMAIN =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - echo -e "\n${yellow} favicon skipped in this mode, defined in reconftw.cfg, or the domain is an IP address${reset}\n" - else - echo -e "${yellow} favicon is already processed. To force executing favicon, delete\n $called_fn_dir/.favicon ${reset}\n\n" - fi -fi diff --git a/bin/rftw_ip_info b/bin/rftw_ip_info deleted file mode 100755 index 240e6d92..00000000 --- a/bin/rftw_ip_info +++ /dev/null @@ -1,101 +0,0 @@ -#!/bin/bash -# Looks good, needs testing - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Variables -called_fn_dir="./called_functions" -LOGFILE="./log.txt" -IP_INFO=true -OSINT=true -WHOISXML_API_LOCAL="YOUR_API_KEY_HERE" # Replace with your actual API key - -# If WHOISXML_API is not in reconftw.cfg the use the one in the script -if $WHOISXML_API; then - WHOISXML_API=$WHOISXML_API_LOCAL -fi - -# If WHOISXML_API is not present or empty, exit the program -if [ -z "$WHOISXML_API" ]; then - echo "No WHOISXML_API var defined, exiting..." - exit 1 -fi - -# Function to display help menu -help_menu() { - cat <<- EOF -Usage: $0 -i -o - -Search for IP information. - -Options: - -i, --input Target IP address - -o, --output Output file to save results - -h, --help Display this help menu -EOF - exit 0 -} - -# Function to start the process -start_func() { - echo "[*] $1" -} - -# Function to end the process -end_func() { - echo "[+] $1" -} - -# Main function to gather IP info -ip_info_func() { - local ip="$1" - if { [ ! -f "$called_fn_dir/.ip_info" ]; } && [ "$IP_INFO" = true ] && [ "$OSINT" = true ] && [[ $ip =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - start_func "Searching IP info for $ip" - - if [ -n "$WHOISXML_API" ]; then - # IP Relations - curl "https://reverse-ip.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ip=${ip}" 2>/dev/null | jq -r '.result[].name' 2>>"$LOGFILE" >> "$OUTPUT_FILE" - # IP Whois - curl "https://www.whoisxmlapi.com/whoisserver/WhoisService?apiKey=${WHOISXML_API}&domainName=${ip}&outputFormat=json&da=2®istryRawText=1®istrarRawText=1&ignoreRawTexts=1" 2>/dev/null | jq 2>>"$LOGFILE" >> "$OUTPUT_FILE" - # IP Location - curl "https://ip-geolocation.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ipAddress=${ip}" 2>/dev/null | jq -r '.ip,.location' 2>>"$LOGFILE" >> "$OUTPUT_FILE" - else - echo "${yellow} No WHOISXML_API var defined, skipping function ${reset}" - fi - else - echo "Skipped ip_info for $ip due to conditions or configuration." - fi -} - -# Default values -INPUT_FILE="" -OUTPUT_FILE="" - -# Parse arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -i|--input) INPUT_FILE="$2"; shift ;; - -o|--output) OUTPUT_FILE="$2"; shift ;; - -h|--help) help_menu ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -# Input validation -if [[ -z "$INPUT_FILE" || -z "$OUTPUT_FILE" ]]; then - help_menu -fi - -# Process IPs -while IFS= read -r IP; do - ip_info_func "$IP" -done < "$INPUT_FILE" - -echo "Completed. Check $OUTPUT_FILE for the results." diff --git a/bin/rftw_ip_portscan b/bin/rftw_ip_portscan deleted file mode 100755 index b751ebaf..00000000 --- a/bin/rftw_ip_portscan +++ /dev/null @@ -1,100 +0,0 @@ -#!/bin/bash -# Check manually, the logic for extracting the ips from dnsx results should be on reconftw.sh script, this script should only scan the ips - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help function -help_menu() { - echo "Usage: $0 [OPTIONS] DOMAIN" - echo "" - echo "Port Scan Tool" - echo "" - echo "Options:" - echo " -h, --help Show this help menu" - echo " -d, --domain DOMAIN Specify the domain to process" - echo " -f, --force Force the execution even if already processed" -} - -# Start function -start_func() { - echo "Starting $1..." -} - -# End function -end_func() { - echo "$1" - echo "End of $2..." -} - -# Input validation -if [ -z "$1" ]; then - help_menu - exit 1 -fi - -# Default values -FORCE=false - -# Parse arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -d|--domain) DOMAIN="$2"; shift ;; - -f|--force) FORCE=true ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -# Validate domain format -if [[ ! "$DOMAIN" =~ ^[a-zA-Z0-9.-]+$ ]] && ! [[ $DOMAIN =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then - echo "Invalid domain format." - exit 1 -fi - -if { [ ! -f "$called_fn_dir/.portscan" ] || [ "$FORCE" = true ]; } && [ "$PORTSCANNER" = true ]; then - start_func "portscan" "Port scan" - - if ! [[ $DOMAIN =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then - # The following checks and actions are based on the original function logic - [ -s "subdomains/subdomains_dnsregs.json" ] && jq -r 'try . | "\(.host) \(.a[0])"' subdomains/subdomains_dnsregs.json | anew -q .tmp/subs_ips.txt - [ -s ".tmp/subs_ips.txt" ] && awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt - [ -s "hosts/subs_ips_vhosts.txt" ] && cut -d ' ' -f1 hosts/subs_ips_vhosts.txt | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt - else - echo $DOMAIN | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt - fi - - [ ! -s "hosts/cdn_providers.txt" ] && [ -s "hosts/ips.txt" ] && cdncheck -silent -resp -nc < hosts/ips.txt 2>/dev/null > hosts/cdn_providers.txt - - [ -s "hosts/ips.txt" ] && comm -23 <(sort -u hosts/ips.txt) <(cut -d'[' -f1 hosts/cdn_providers.txt | sed 's/[[:space:]]*$//' | sort -u) | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u | anew -q .tmp/ips_nocdn.txt - - printf "${bblue}\n Resolved IP addresses (No CDN) ${reset}\n\n" - [ -s ".tmp/ips_nocdn.txt" ] && sort .tmp/ips_nocdn.txt - printf "${bblue}\n Scanning ports... ${reset}\n\n" - - if [ "$PORTSCAN_PASSIVE" = true ] && [ ! -f "hosts/portscan_passive.txt" ] && [ -s ".tmp/ips_nocdn.txt" ]; then - smap -iL .tmp/ips_nocdn.txt > hosts/portscan_passive.txt - fi - - if [ "$PORTSCAN_ACTIVE" = true ]; then - if [ "$AXIOM" = true ]; then - [ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx --top-ports 200 -sV -n -Pn --open --max-retries 2 --script vulners -oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - else - [ -s ".tmp/ips_nocdn.txt" ] && $SUDO nmap --top-ports 200 -sV -n --max-retries 2 -Pn --open --script vulners -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" >/dev/null - fi - fi - - end_func "Results are saved in hosts/portscan_[passive|active].txt" "portscan" -else - if [ "$PORTSCANNER" = false ]; then - printf "\n${yellow} portscan skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} portscan is already processed, to force executing portscan delete\n $called_fn_dir/.portscan ${reset}\n\n" - fi -fi diff --git a/bin/rftw_osint_emails b/bin/rftw_osint_emails deleted file mode 100755 index d8207b02..00000000 --- a/bin/rftw_osint_emails +++ /dev/null @@ -1,95 +0,0 @@ -#!/bin/bash - -# Looks good, needs testing - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - - - -function help_menu(){ - echo "Usage: $0 [OPTIONS] [DOMAIN]" - echo "" - echo "Email Finder Tool" - echo "" - echo "Options:" - echo " -h, --help Show this help menu" - echo " -d, --domain DOMAIN Specify the domain target" - echo " -o, --output DIR Specify the output directory" - echo " -f, --force Force the execution even if already processed" -} - -function validate_domain(){ - if [[ ! "$1" =~ ^[a-zA-Z0-9.-]+$ ]]; then - echo "Error: Invalid domain format." - exit 1 - fi -} - -function emails(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$EMAILS" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - echo "Searching emails/users/passwords leaks" - emailfinder -d $domain 2>>"$LOGFILE" | anew -q .tmp/emailfinder.txt || { echo "emailfinder command failed"; exit 1; } - [ -s ".tmp/emailfinder.txt" ] && cat .tmp/emailfinder.txt | grep "@" | grep -iv "|_" | anew -q osint/emails.txt - echo "Results are saved in $domain/osint/emails.txt" - else - if [ "$EMAILS" = false ] || [ "$OSINT" = false ]; then - echo "${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg" - else - echo "${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]}" - fi - fi -} - -function main(){ - local OUTPUT_DIR="./" - local FORCE=false - local DOMAIN="" - - # Parse arguments - while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -d|--domain) DOMAIN="$2"; shift ;; - -o|--output) OUTPUT_DIR="$2"; shift ;; - -f|--force) FORCE=true ;; - *) - # If a domain wasn't set using -d, then set it using the last argument - if [[ -z "$DOMAIN" ]]; then - DOMAIN="$1" - else - echo "Unknown parameter or multiple domains passed: $1"; exit 1; - fi - ;; - esac - shift - done - - if [[ -z "$DOMAIN" ]]; then - echo "No domain provided." - help_menu - exit 1 - fi - - validate_domain "$DOMAIN" - - # Define other variables - called_fn_dir="./called_functions" - DIFF=true - EMAILS=true - OSINT=true - LOGFILE="./log.txt" - - echo "Processing domain: $DOMAIN" - - # Call the emails function - emails -} - -# Execute the main function -main "$@" \ No newline at end of file diff --git a/bin/rftw_osint_ghdorks b/bin/rftw_osint_ghdorks deleted file mode 100755 index 7d06cd19..00000000 --- a/bin/rftw_osint_ghdorks +++ /dev/null @@ -1,67 +0,0 @@ -#!/bin/bash - -# Default config path -CONFIG_PATH="$RECONFTW_CFG" - -# Check if the config file exists -if [ -f "$CONFIG_PATH" ]; then - source "$CONFIG_PATH" -else - echo "Error: reconftw.cfg not found at $CONFIG_PATH!" - exit 1 -fi - -# Help menu -function help_menu() { - echo -e "Usage: ./github_dorks_script.sh [OPTIONS] [DOMAIN]" - echo -e "Options:" - echo -e " -d, --domain DOMAIN Specify the domain" - echo -e " -g, --github-tokens FILE Specify the path to the GitHub tokens file" - echo -e " -D, --deep Use deep dorking (optional)" - echo -e " -h, --help Display this help menu" - exit 1 -} - -# Main function -function github_dorks() { - if [ -s "${github_tokens}" ]; then - local dorks_file="$tools/gitdorks_go/Dorks/smalldorks.txt" - [ "$deep" = true ] && dorks_file="$tools/gitdorks_go/Dorks/medium_dorks.txt" - gitdorks_go -gd "$dorks_file" -nws 20 -target "$domain" -tf "${github_tokens}" -ew 3 || { echo "gitdorks_go/anew command failed" >&2; exit 1; } - else - echo -e "${bred}Required file ${github_tokens} not exists or empty${reset}" >&2 - fi -} - -# Default values -domain="" -github_tokens="" -deep=false - -# Parse command-line arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -d|--domain) domain="$2"; shift ;; - -g|--github-tokens) github_tokens="$2"; shift ;; - -D|--deep) deep=true ;; - -h|--help) help_menu ;; - *) - if [ -z "$domain" ]; then - domain="$1" - else - echo -e "${bred}Unknown parameter passed: $1${reset}" >&2; help_menu - fi - ;; - esac - shift -done - -# Validate inputs -if [ -z "$domain" ] || [ -z "$github_tokens" ]; then - echo -e "${bred}Error: Missing required parameters.${reset}" >&2 - help_menu - exit 1 -fi - -# Execute the main function -github_dorks \ No newline at end of file diff --git a/bin/rftw_osint_ghrepos b/bin/rftw_osint_ghrepos deleted file mode 100755 index 52c138d6..00000000 --- a/bin/rftw_osint_ghrepos +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/bash - -# Default config path -CONFIG_PATH="$RECONFTW_CFG" - -# Check if the config file exists -if [ -f "$CONFIG_PATH" ]; then - source "$CONFIG_PATH" -else - echo "Error: reconftw.cfg not found at $CONFIG_PATH!" - exit 1 -fi - -# Help menu -function help_menu() { - echo -e "Usage: ./github_repos_script.sh [OPTIONS] [DOMAIN]" - echo -e "Options:" - echo -e " -d, --domain DOMAIN Specify the domain" - echo -e " -t, --tools-dir DIRECTORY Specify the tools directory path" - echo -e " -g, --github-tokens FILE Specify the path to the GitHub tokens file" - echo -e " -o, --output FILE (Optional )Specify the output file path (prints to stdout if not set)" - echo -e " -h, --help Display this help menu" - exit 1 -} - -# Main function -function github_repos() { - # Validate tools - mkdir -p .tmp >>"$LOGFILE" 2>&1 - for tool in unfurl enumerepo jq interlace gitleaks trufflehog; do - if ! command -v $tool &> /dev/null; then - echo -e "${bred}Error: ${tool} is not installed.${reset}" >&2 - exit 1 - fi - done - if [ -s "${github_tokens}" ]; then - GH_TOKEN=$(cat ${github_tokens} | head -1) - echo $domain | unfurl format %r > .tmp/company_name.txt - enumerepo -token-string "${GH_TOKEN}" -usernames .tmp/company_name.txt -o .tmp/company_repos.txt - [ -s ".tmp/company_repos.txt" ] && jq -r '.[].repos[]|.url' < .tmp/company_repos.txt > .tmp/company_repos_url.txt - mkdir -p .tmp/github_repos - mkdir -p .tmp/github - [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "git clone _target_ .tmp/github_repos/_cleantarget_" - [ -d ".tmp/github/" ] && ls .tmp/github_repos > .tmp/github_repos_folders.txt - [ -s ".tmp/github_repos_folders.txt" ] && interlace -tL .tmp/github_repos_folders.txt -threads ${INTERLACE_THREADS} -c "gitleaks detect --source .tmp/github_repos/_target_ --no-banner --no-color -r .tmp/github/gh_secret_cleantarget_.json" - [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "trufflehog git _target_ -j 2>&1 | jq -c > _output_/_cleantarget_" -o .tmp/github/ - if [ -d ".tmp/github/" ]; then - if [ -z "$output_file" ]; then - cat .tmp/github/* 2>/dev/null | jq -c | jq -r - else - cat .tmp/github/* 2>/dev/null | jq -c | jq -r > "$output_file" - fi - fi - else - printf "\n${bred} Required file ${GITHUB_TOKENS} not exists or empty${reset}\n" - fi -} - -# Default values -domain="" -github_tokens="" - -# Parse command-line arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -d|--domain) domain="$2"; shift ;; - -g|--github-tokens) github_tokens="$2"; shift ;; - -o|--output) output_file="$2"; shift ;; # Added this line to handle the -o flag - -h|--help) help_menu ;; - *) - if [ -z "$domain" ]; then - domain="$1" - else - echo -e "${bred}Unknown parameter passed: $1${reset}" >&2; help_menu - fi - ;; - esac - shift -done - -# Validate inputs -if [ -z "$domain" ] || [ -z "$github_tokens" ]; then - echo -e "${bred}Error: Missing required parameters.${reset}" >&2 - help_menu - exit 1 -fi - -# Execute the main function -github_repos \ No newline at end of file diff --git a/bin/rftw_osint_googledorks b/bin/rftw_osint_googledorks deleted file mode 100755 index 94e31137..00000000 --- a/bin/rftw_osint_googledorks +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash - -# Default config path -CONFIG_PATH="$RECONFTW_CFG" - -# Check if the config file exists -if [ -f "$CONFIG_PATH" ]; then - source "$CONFIG_PATH" -else - echo "Error: reconftw.cfg not found at $CONFIG_PATH!" - exit 1 -fi - -# Help menu -function help_menu() { - echo -e "Usage: ./google_dorks_script.sh [OPTIONS]" - echo -e "Options:" - echo -e " -d, --domain DOMAIN Specify the domain" - echo -e " -o, --output FILE (Optional) Specify the output directory file" - echo -e " -h, --help Display this help menu" - exit 1 -} - -# Main function -function google_dorks() { - output_file_arg="" - if [ -n "$output" ]; then - output_file_arg="-o ${output}" - fi - - python3 "${tools}/dorks_hunter/dorks_hunter.py" -d "$domain" $output_file_arg || { - echo -e "${bred}Error: dorks_hunter.py failed.${reset}" >&2; - exit 1; - } -} - -# Parse command-line arguments -domain="" -output_dir="" -while [[ "$#" -gt 0 ]]; do - case $1 in - -d|--domain) domain="$2"; shift ;; - -o|--output) output="$2"; shift ;; - -h|--help) help_menu ;; - *) echo -e "${bred}Unknown parameter passed: $1${reset}" >&2; help_menu ;; - esac - shift -done - -# Validate inputs -if [ -z "$domain" ]; then - echo -e "${bred}Error: Missing required parameters.${reset}" >&2 - help_menu - exit 1 -fi - -# Execute the main function -google_dorks diff --git a/bin/rftw_osint_metadata b/bin/rftw_osint_metadata deleted file mode 100755 index b9be0d0f..00000000 --- a/bin/rftw_osint_metadata +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Define global variables -called_fn_dir="/path/to/called_fn_dir" -LOGFILE="/path/to/logfile" -METAFINDER_LIMIT=100 -domain=$1 - -function gather_metadata() { - if { [ ! -f "$called_fn_dir/.metadata" ] || [ "$DIFF" = true ]; } && [ "$METADATA" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - echo -e "${yellow}Scanning metadata in public files${reset}" - - metafinder -d "$domain" -l $METAFINDER_LIMIT -o osint -go -bi -ba &>> "$LOGFILE" || { echo "metafinder command failed"; exit 1; } - mv "osint/${domain}/"*".txt" "osint/" 2>>"$LOGFILE" - rm -rf "osint/${domain}" 2>>"$LOGFILE" - echo "Results are saved in $domain/osint/[software/authors/metadata_results].txt" - elif [ "$METADATA" = false ] || [ "$OSINT" = false ]; then - echo -e "${yellow}metadata skipped as defined in reconftw.cfg${reset}" - else - echo -e "${yellow}metadata is already processed or input is an IP. To force, delete\n $called_fn_dir/.metadata${reset}" - fi -} - -function main() { - if [ -z "$domain" ]; then - echo "Usage: $0 " - exit 1 - fi - gather_metadata -} - -main diff --git a/bin/rftw_osint_postleaks b/bin/rftw_osint_postleaks deleted file mode 100755 index bb40b023..00000000 --- a/bin/rftw_osint_postleaks +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu function -function help_menu() { - echo -e "Usage: $0 [-d DOMAIN] or $0 [DOMAIN]" - echo -e "Options:" - echo -e " -d DOMAIN Specify the domain to scan for postman leaks." - echo -e " -h, --help Display this help menu" - exit 1 -} - -function postleaks() { - if { [ ! -f "$called_fn_dir/.postleaks" ] || [ "$DIFF" = true ]; } && [ "$POSTMAN_LEAKS" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - echo -e "${yellow}Scanning for leaks in postman public directory${reset}" - - postleaksNg -k "$domain" > .tmp/postleaks.txt || { echo "postleaksNg command failed"; exit 1; } - echo "Results are saved in .tmp/postleaks.txt" - elif [ "$POSTMAN_LEAKS" = false ] || [ "$OSINT" = false ]; then - echo -e "${yellow}postleaks skipped as defined in reconftw.cfg${reset}" - else - echo -e "${yellow}postleaks is already processed or input is an IP. To force, delete\n $called_fn_dir/.postleaks${reset}" - fi -} - -# Main function to execute the postleaks function -function main() { - if [[ -z "$domain" ]]; then - help_menu - fi - postleaks -} - -# Parse command-line arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -d) domain="$2"; shift ;; - -h|--help) help_menu ;; - *) domain="$1" ;; - esac - shift -done - -main diff --git a/bin/rftw_osint_whois b/bin/rftw_osint_whois deleted file mode 100755 index 99a6e0a8..00000000 --- a/bin/rftw_osint_whois +++ /dev/null @@ -1,68 +0,0 @@ -#!/bin/bash - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Function to display help menu -help_menu() { - echo "Usage: $0 " - echo "Search for domain information." - echo "" - echo "Options:" - echo " -h, --help Display this help menu" - exit 0 -} - -# Parse command-line arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu ;; - *) domain="$1" ;; - esac - shift -done - -# Input validation -if [ -z "$domain" ] || [[ "$domain" == "-h" ]] || [[ "$domain" == "--help" ]]; then - help_menu -fi - -# Function to start the process -start_func() { - echo "[*] $1" -} - -# Function to end the process -end_func() { - echo "[+] $1" -} - -# Main function to gather domain info -domain_info_func() { - if { [ ! -f "$called_fn_dir/.domain_info" ]; } && [ "$DOMAIN_INFO" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - start_func "Searching domain info (whois, registrant name/email domains)" - - # Domain General Info - whois -H $domain > osint/domain_info_general.txt || { echo "whois command failed"; exit 1; } - - # Reverse Whois (if DEEP or REVERSE_WHOIS is true) - if [ "$DEEP" = true ] || [ "$REVERSE_WHOIS" = true ]; then - timeout -k 1m ${AMASS_INTEL_TIMEOUT}m amass intel -d ${domain} -whois -timeout $AMASS_INTEL_TIMEOUT -o osint/domain_info_reverse_whois.txt 2>>"$LOGFILE" &>/dev/null - fi - - # Azure Tenant Domains - curl -s "https://aadinternals.azurewebsites.net/api/tenantinfo?domainName=${domain}" -H "Origin: https://aadinternals.com" | jq -r .domains[].name > osint/azure_tenant_domains.txt - - end_func "Results are saved in $domain/osint/domain_info_[general/name/email/ip].txt" - else - echo "Skipped domain_info due to conditions or configuration." - fi -} - -# Call the main function -domain_info_func diff --git a/bin/rftw_sub_active b/bin/rftw_sub_active deleted file mode 100755 index 97e51a6b..00000000 --- a/bin/rftw_sub_active +++ /dev/null @@ -1,75 +0,0 @@ -#!/bin/bash - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -display_help() { - echo "Usage: $0 [Options]" - echo - echo " -d, --domain Domain for which subdomain enumeration is required" - echo " -l, --log Log file location" - echo " -p, --public-limit PUREDNS Public Limit" - echo " ... Other options here ..." - echo " -h, --help Display this help and exit" - echo - echo "Example: $0 -d example.com -l /path/to/log" - exit 1 -} - -# Input validation -if [ "$#" -eq 0 ]; then - display_help - exit 1 -fi - -# Parse input arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -d|--domain) domain="$2"; shift ;; - -l|--log) LOGFILE="$2"; shift ;; # Add this - -h|--help) display_help ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -# Check if domain has been provided -if [ -z "$domain" ]; then - echo "Error: Domain not specified." - exit 1 -fi - -sub_active() { - if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then - start_subfunc ${FUNCNAME[0]} "Running : Active Subdomain Enumeration" - find .tmp -type f -iname "*_subs.txt" -exec cat {} + | anew -q .tmp/subs_no_resolved.txt - [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt - if [ ! "$AXIOM" = true ]; then - resolvers_update_quick_local - [ -s ".tmp/subs_no_resolved.txt" ] && puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - else - resolvers_update_quick_axiom - [ -s ".tmp/subs_no_resolved.txt" ] && axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - echo $domain | dnsx -retry 3 -silent -r $resolvers_trusted 2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt - if [ "$DEEP" = true ]; then - cat .tmp/subdomains_tmp.txt | tlsx -san -cn -silent -ro -c $TLSX_THREADS -p $TLS_PORTS | anew -q .tmp/subdomains_tmp.txt - else - cat .tmp/subdomains_tmp.txt | tlsx -san -cn -silent -ro -c $TLSX_THREADS | anew -q .tmp/subdomains_tmp.txt - fi - [[ "$INSCOPE" = true ]] && check_inscope .tmp/subdomains_tmp.txt 2>>"$LOGFILE" >/dev/null - NUMOFLINES=$(cat .tmp/subdomains_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) - end_subfunc "${NUMOFLINES} subs DNS resolved from passive" ${FUNCNAME[0]} - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi -} - -# Run the function -sub_active \ No newline at end of file diff --git a/bin/rftw_sub_analytics b/bin/rftw_sub_analytics deleted file mode 100755 index f31f9025..00000000 --- a/bin/rftw_sub_analytics +++ /dev/null @@ -1,96 +0,0 @@ -#!/bin/bash - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -function usage() { - echo "Usage: $0 [-h] [-d DOMAIN] [-i INPUT_FILE] [-o OUTPUT_FILE]" - echo " -h Display this help message." - echo " -d DOMAIN Specify the target domain." - echo " -i INPUT_FILE Specify the input file." - echo " -o OUTPUT_FILE Specify the output file." - exit 1 -} - -# Validate input arguments -input_file="" -output_file="" -while getopts ":hd:i:o:" opt; do - case ${opt} in - h) - usage - ;; - d) - domain="$OPTARG" - ;; - i) - input_file="$OPTARG" - if [[ ! -f "$input_file" ]]; then - echo "Error: Input file does not exist." - exit 1 - fi - ;; - o) - output_file="$OPTARG" - touch "$output_file" 2>/dev/null || { echo "Error: Cannot write to the specified output file."; exit 1; } - ;; - *) - echo "Invalid option: -$OPTARG" 1>&2 - usage - ;; - esac -done -shift $((OPTIND -1)) - -if [[ -z "$domain" ]]; then - echo "Error: No domain specified." - usage -fi - -function sub_analytics() { - if { [ ! -f "$called_fn_dir/.sub_analytics" ] || [ "$DIFF" = true ]; } && [ "$SUBANALYTICS" = true ]; then - echo "Running : Analytics Subdomain Enumeration" - - if [ -s ".tmp/probed_tmp_scrap.txt" ]; then - mkdir -p .tmp/output_analytics/ - analyticsrelationships -ch < .tmp/probed_tmp_scrap.txt >> .tmp/analytics_subs_tmp.txt 2>>"$LOGFILE" - - [ -s ".tmp/analytics_subs_tmp.txt" ] && cat .tmp/analytics_subs_tmp.txt | grep "\.$domain$\|^$domain$" | sed "s/|__ //" | anew -q .tmp/analytics_subs_clean.txt - - if [ ! "$AXIOM" = true ]; then - resolvers_update_quick_local - [ -s ".tmp/analytics_subs_clean.txt" ] && puredns resolve .tmp/analytics_subs_clean.txt -w .tmp/analytics_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - else - resolvers_update_quick_axiom - [ -s ".tmp/analytics_subs_clean.txt" ] && axiom-scan .tmp/analytics_subs_clean.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/analytics_subs_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - fi - - [[ "$INSCOPE" = true ]] && check_inscope .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" >/dev/null - NUMOFLINES=$(cat .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) - - echo "${NUMOFLINES} new subs (analytics relationship)" - else - if [ "$SUBANALYTICS" = false ]; then - echo -e "\n${yellow} sub_analytics skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - echo -e "${yellow} sub_analytics is already processed, to force executing sub_analytics delete\n $called_fn_dir/.sub_analytics ${reset}\n\n" - fi - fi -} - -# At the end where you save the output, you can redirect the output to the specified file -if [[ ! -z "$output_file" ]]; then - # For example, if you were saving data like this: - # echo "${NUMOFLINES} new subs (analytics relationship)" - # Change it to: - echo "${NUMOFLINES} new subs (analytics relationship)" > "$output_file" -fi - -# Execute the function -sub_analytics \ No newline at end of file diff --git a/bin/rftw_sub_brute b/bin/rftw_sub_brute deleted file mode 100755 index 6e64bf42..00000000 --- a/bin/rftw_sub_brute +++ /dev/null @@ -1,88 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -display_help() { - echo "Usage: $0 [Options]" - echo "Options:" - echo " -h, --help Display this help and exit" - echo " -i, --input INPUT_FILE Specify the input file" - echo " -o, --output OUTPUT_FILE Specify the output file" - echo - echo "Ensure all required environment variables are set in reconftw.cfg" - exit 1 -} - -# Input validation for environment variables -if [ -z "$domain" ] || [ -z "$resolvers_trusted" ] || [ -z "$LOGFILE" ] || [ -z "$subs_wordlist" ] || [ -z "$subs_wordlist_big" ]; then - echo "Error: One or more required environment variables are missing in reconftw.cfg." - display_help -fi - -# Initialize input_file and output_file variables -input_file="" -output_file="" - -# Parse input arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) display_help ;; - -i|--input) - shift; - input_file="$1"; - if [[ ! -f "$input_file" ]]; then - echo "Error: Input file does not exist." - exit 1 - fi - ;; - -o|--output) - shift; - output_file="$1"; - touch "$output_file" 2>/dev/null || { echo "Error: Cannot write to the specified output file."; exit 1; } - ;; - *) echo "Unknown parameter passed: $1"; display_help ;; - esac - shift -done - -# Main logic -sub_brute() { - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBBRUTE" = true ]; then - start_subfunc ${FUNCNAME[0]} "Running : Bruteforce Subdomain Enumeration" - if [ ! "$AXIOM" = true ]; then - resolvers_update_quick_local - if [ "$DEEP" = true ]; then - puredns bruteforce $subs_wordlist_big $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - else - puredns bruteforce $subs_wordlist $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - fi - [ -s ".tmp/subs_brute.txt" ] && puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - else - resolvers_update_quick_axiom - if [ "$DEEP" = true ]; then - axiom-scan $subs_wordlist_big -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - else - axiom-scan $subs_wordlist -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - [ -s ".tmp/subs_brute.txt" ] && axiom-scan .tmp/subs_brute.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute_valid.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - [[ "$INSCOPE" = true ]] && check_inscope .tmp/subs_brute_valid.txt 2>>"$LOGFILE" >/dev/null - NUMOFLINES=$(cat .tmp/subs_brute_valid.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) - end_subfunc "${NUMOFLINES} new subs (bruteforce)" ${FUNCNAME[0]} - else - if [ "$SUBBRUTE" = false ]; then - printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi -} - -sub_brute \ No newline at end of file diff --git a/bin/rftw_sub_crt b/bin/rftw_sub_crt deleted file mode 100755 index 806e48dd..00000000 --- a/bin/rftw_sub_crt +++ /dev/null @@ -1,75 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help Menu -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo "Options:" - echo " -d, --domain Set the target domain." - echo " -l, --limit Set the crt limit (default 100)." - echo " -D, --diff Set DIFF to true." - echo " -s, --subcrt Run crt subdomain enumeration." - echo " -i, --inscope Check inscope." - echo " -o, --output Specify the output file." - echo " -h, --help Display this help menu." -} - -# Other functions -function start_subfunc() { - echo "[+] Starting $1: $2" -} - -function end_subfunc() { - echo "[+] Ending $1: $2" -} - -function check_inscope() { - echo "Checking inscope for $1" - # TODO: Add your inscope logic here -} -# Variables for input and output -output_file="" - -# Input validation and argument parsing -while [[ "$#" -gt 0 ]]; do - case $1 in - -d|--domain) domain="$2"; shift;; - -l|--limit) CTR_LIMIT="$2"; shift;; - -D|--diff) DIFF=true;; - -s|--subcrt) SUBCRT=true;; - -i|--inscope) INSCOPE=true;; - -o|--output) - shift; - output_file="$1"; - touch "$output_file" 2>/dev/null || { echo "Error: Cannot write to the specified output file."; exit 1; } - ;; - -h|--help) display_help; exit 0;; - *) echo "Unknown parameter passed: $1"; display_help; exit 1;; - esac - shift -done - -# Main Functionality -script_name=$(basename $0) -if [[ ! -f "$called_fn_dir/.$script_name" ]] || [[ "$DIFF" = true ]] && [[ "$SUBCRT" = true ]]; then - start_subfunc "$script_name" "Running : Crtsh Subdomain Enumeration" - - crt -s -json -l ${CTR_LIMIT} $domain 2>>"$LOGFILE" | jq -r '.[].subdomain' 2>>"$LOGFILE" | sed -e "s/^\\*\\.//" | anew -q .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null - [[ "$INSCOPE" = true ]] && check_inscope .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null - NUMOFLINES=$(cat .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" | sed 's/\*.//g' | anew .tmp/crtsh_subs.txt | sed '/^$/d' | wc -l) - end_subfunc "${NUMOFLINES} new subs (cert transparency)" "$script_name" - -else - if [[ "$SUBCRT" = false ]]; then - echo "${script_name} skipped in this mode or defined in reconftw.cfg" - else - echo "${script_name} is already processed, to force executing ${script_name} delete $called_fn_dir/.${script_name}" - fi -fi \ No newline at end of file diff --git a/bin/rftw_sub_dns b/bin/rftw_sub_dns deleted file mode 100755 index 51483efb..00000000 --- a/bin/rftw_sub_dns +++ /dev/null @@ -1,85 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Color definitions for output (optional, you can adjust or remove) -yellow='\033[1;33m' -reset='\033[0m' - -# Help menu -display_help() { - echo "Usage: $0 [OPTIONS]" - echo "Options:" - echo " -d, --domain Set the target domain." - echo " -h, --help Display this help and exit." - echo " -f, --input-file Specify the input file with a list of domains." - echo " -o, --output-file Specify the output file for saving results." - echo - echo "Ensure all required environment variables are set in reconftw.cfg" -} - -# Input validation -if [ -z "$domain" ] || [ -z "$resolvers_trusted" ] || [ -z "$LOGFILE" ]; then - echo "Error: One or more required environment variables are missing in reconftw.cfg." - display_help - exit 1 -fi - -# Default values for input and output files -INPUTFILE="" -OUTPUTFILE="subdomains/subdomains.txt" - -# Parse input arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -d|--domain) domain="$2"; shift;; - -f|--input-file) INPUTFILE="$2"; shift;; - -o|--output-file) OUTPUTFILE="$2"; shift;; - -h|--help) display_help; exit 0;; - *) echo "Unknown parameter passed: $1"; display_help; exit 1;; - esac - shift -done - -# If an input file is provided, validate it -if [ -n "$INPUTFILE" ] && [ ! -f "$INPUTFILE" ]; then - echo "Error: Input file $INPUTFILE not found!" - exit 1 -fi - -# Main logic (Original function content) -sub_dns() { - if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then - start_subfunc ${FUNCNAME[0]} "Running : DNS Subdomain Enumeration and PTR search" - if [ ! "$AXIOM" = true ]; then - [ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | dnsx -r $resolvers_trusted -a -aaaa -cname -ns -ptr -mx -soa -silent -retry 3 -json -o subdomains/subdomains_dnsregs.json 2>>"$LOGFILE" >/dev/null - [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' 2>/dev/null | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt - [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | hakip2host | cut -d' ' -f 3 | unfurl -u domains | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt - [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try "\(.host) - \(.a[])"' 2>/dev/null | sort -u -k2 | anew -q subdomains/subdomains_ips.txt - resolvers_update_quick_local - [ -s ".tmp/subdomains_dns.txt" ] && puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - else - [ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -json -o subdomains/subdomains_dnsregs.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | anew -q .tmp/subdomains_dns_a_records.txt - [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | hakip2host | cut -d' ' -f 3 | unfurl -u domains | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt - [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' 2>/dev/null | grep ".$domain$" | anew -q .tmp/subdomains_dns.txt - [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try "\(.host) - \(.a[])"' 2>/dev/null | sort -u -k2 | anew -q subdomains/subdomains_ips.txt - resolvers_update_quick_axiom - [ -s ".tmp/subdomains_dns.txt" ] && axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_dns_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - [[ "$INSCOPE" = true ]] && check_inscope .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" >/dev/null - NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) - end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]} - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi -} - -# Call the main logic function -sub_dns diff --git a/bin/rftw_sub_full b/bin/rftw_sub_full deleted file mode 100755 index a1e7c442..00000000 --- a/bin/rftw_sub_full +++ /dev/null @@ -1,130 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Function to display help menu -help_menu() { - echo "Usage: $0 [OPTIONS]" - echo "Options:" - echo " -d, --domain Set the target domain." - echo " -h, --help Display this help and exit." - echo " -f, --input-file Specify the input file with a list of domains." - echo " -o, --output-file Specify the output file for saving results." - echo "Ensure all required environment variables are set in reconftw.cfg" - exit 0 -} - -# Parse input arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -d|--domain) domain="$2"; shift;; - -f|--input-file) INPUTFILE="$2"; shift;; - -o|--output-file) OUTPUTFILE="$2"; shift;; - -h|--help) help_menu;; - *) echo "Unknown parameter passed: $1"; help_menu; exit 1;; - esac - shift -done - -# If an input file is provided, validate it -if [ -n "$INPUTFILE" ] && [ ! -f "$INPUTFILE" ]; then - echo "Error: Input file $INPUTFILE not found!" - exit 1 -fi - -# Input validation -if [ -z "$domain" ] || [[ "$domain" == "-h" ]] || [[ "$domain" == "--help" ]]; then - help_menu -fi - -# Function to start the process -start_func() { - echo "[*] $1" -} - -# Function to end the process -end_func() { - echo "[+] $1" -} - -# Function to notify the user -notification() { - echo "[!] $1" -} - -# Main function to gather subdomain info -subdomains_full_func() { - echo "Starting subdomain enumeration for $domain" - - NUMOFLINES_subs="0" - NUMOFLINES_probed="0" - printf "${bgreen}#######################################################################\n\n" - ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && printf "${bblue} Subdomain Enumeration $domain\n\n" - [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && printf "${bblue} Scanning IP $domain\n\n" - [ -s "subdomains/subdomains.txt" ] && cp subdomains/subdomains.txt .tmp/subdomains_old.txt - [ -s "webs/webs.txt" ] && cp webs/webs.txt .tmp/probed_old.txt - - if ( [ ! -f "$called_fn_dir/.sub_active" ] || [ ! -f "$called_fn_dir/.sub_brute" ] || [ ! -f "$called_fn_dir/.sub_permut" ] || [ ! -f "$called_fn_dir/.sub_recursive_brute" ] ) || [ "$DIFF" = true ] ; then - resolvers_update - fi - - [ -s "${inScope_file}" ] && cat ${inScope_file} | anew -q subdomains/subdomains.txt - - if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && [ "$SUBDOMAINS_GENERAL" = true ]; then - rftw_sub_passive - rftw_sub_crt - rftw_sub_active - rftw_sub_noerror - rftw_sub_brute - rftw_sub_permut - rftw_sub_regex_permut - rftw_sub_recursive_passive - rftw_sub_recursive_brute - rftw_sub_dns - rftw_sub_scraping - rftw_sub_analytics - else - notification "IP/CIDR detected, subdomains search skipped" - echo $domain | anew -q subdomains/subdomains.txt - fi - - webprobe_simple - if [ -s "subdomains/subdomains.txt" ]; then - [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file subdomains/subdomains.txt - NUMOFLINES_subs=$(cat subdomains/subdomains.txt 2>>"$LOGFILE" | anew .tmp/subdomains_old.txt | sed '/^$/d' | wc -l) - fi - if [ -s "webs/webs.txt" ]; then - [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file webs/webs.txt - NUMOFLINES_probed=$(cat webs/webs.txt 2>>"$LOGFILE" | anew .tmp/probed_old.txt | sed '/^$/d' | wc -l) - fi - - if [ -n "$OUTPUTFILE" ]; then - cat subdomains/subdomains.txt > "$OUTPUTFILE" - cat webs/webs.txt >> "$OUTPUTFILE" - fi - - printf "${bblue}\n Total subdomains: ${reset}\n\n" - notification "- ${NUMOFLINES_subs} alive" good - [ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | sort - notification "- ${NUMOFLINES_probed} new web probed" good - [ -s "webs/webs.txt" ] && cat webs/webs.txt | sort - notification "Subdomain Enumeration Finished" good - printf "${bblue} Results are saved in $domain/subdomains/subdomains.txt and webs/webs.txt${reset}\n" - printf "${bgreen}#######################################################################\n\n" -} - -# Call the main function -# Logic to handle multiple domains if INPUTFILE is provided -if [ -n "$INPUTFILE" ]; then - while read -r domain; do - subdomains_full_func - done < "$INPUTFILE" -else - subdomains_full_func -fi diff --git a/bin/rftw_sub_noerror b/bin/rftw_sub_noerror deleted file mode 100755 index c98f3927..00000000 --- a/bin/rftw_sub_noerror +++ /dev/null @@ -1,74 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -display_help() { - echo "Usage: $0 [Options]" - echo - echo " -d, --domain Domain to check" - echo " -i, --input-file Input file with domains" - echo " -o, --output-file File to save results" - echo " -h, --help Display this help and exit" - echo - echo "Example: $0 -d example.com" - exit 1 -} - -# Parse input arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -d|--domain) domain="$2"; shift ;; - -i|--input-file) INPUTFILE="$2"; shift ;; - -o|--output-file) OUTPUTFILE="$2"; shift ;; - -h|--help) display_help ;; - *) echo "Unknown parameter passed: $1"; display_help ;; - esac - shift -done - -# Input validation -if [ -z "$domain" ] && [ -z "$INPUTFILE" ]; then - echo "Error: Either domain or input file must be specified." - display_help -fi - -sub_noerror() { - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBNOERROR" = true ]; then - start_subfunc ${FUNCNAME[0]} "Running : Checking NOERROR DNS response" - if [[ $(echo "${RANDOM}thistotallynotexist${RANDOM}.$domain" | dnsx -r $resolvers -rcode noerror,nxdomain -retry 3 -silent | cut -d' ' -f2) == "[NXDOMAIN]" ]]; then - resolvers_update_quick_local - if [ "$DEEP" = true ]; then - dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist_big | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null - else - dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null - fi - [[ "$INSCOPE" = true ]] && check_inscope .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null - NUMOFLINES=$(cat .tmp/subs_noerror.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) - end_subfunc "${NUMOFLINES} new subs (DNS noerror)" ${FUNCNAME[0]} - else - printf "\n${yellow} Detected DNSSEC black lies, skipping this technique ${reset}\n" - fi - else - if [ "$SUBBRUTE" = false ]; then - printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi -} - -# Logic to handle multiple domains if INPUTFILE is provided -if [ -n "$INPUTFILE" ]; then - while read -r domain; do - sub_noerror - done < "$INPUTFILE" -else - sub_noerror -fi \ No newline at end of file diff --git a/bin/rftw_sub_passive b/bin/rftw_sub_passive deleted file mode 100755 index 113d2082..00000000 --- a/bin/rftw_sub_passive +++ /dev/null @@ -1,79 +0,0 @@ -#!/bin/bash - -# Variables -AMASS_ENUM_TIMEOUT=10m -DIFF=false -SUBPASSIVE=false -RUNAMASS=false -RUNSUBFINDER=false -DEEP=false -INSCOPE=false - -# Help menu -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo "Options:" - echo " -d, --domain Set the target domain." - echo " -a, --amass Run amass." - echo " -s, --subfinder Run subfinder." - echo " -g, --github-tokens Specify GitHub tokens file." - echo " -l, --gitlab-tokens Specify GitLab tokens file." - echo " -D, --deep Run in deep mode." - echo " -i, --inscope Check inscope." - echo " -h, --help Display this help menu." -} - -function start_subfunc() { - echo "[+] Starting $1: $2" -} - -function end_subfunc() { - echo "[+] Ending $1: $2" -} - -function check_inscope() { - echo "Checking inscope for $1" - # TODO: Add your inscope logic here -} - -# Input validation and argument parsing -while [[ "$#" -gt 0 ]]; do - case $1 in - -d|--domain) domain="$2"; shift;; - -a|--amass) RUNAMASS=true;; - -s|--subfinder) RUNSUBFINDER=true;; - -g|--github-tokens) GITHUB_TOKENS="$2"; shift;; - -l|--gitlab-tokens) GITLAB_TOKENS="$2"; shift;; - -D|--deep) DEEP=true;; - -i|--inscope) INSCOPE=true;; - -h|--help) display_help; exit 0;; - *) echo "Unknown parameter passed: $1"; display_help; exit 1;; - esac - shift -done - -# Main functionality -if [[ ! -f "$called_fn_dir/.$0" ]] || [[ "$DIFF" = true ]] && [[ "$SUBPASSIVE" = true ]]; then - start_subfunc $0 "Running : Passive Subdomain Enumeration" - - [[ $RUNAMASS == true ]] && timeout -k 1m ${AMASS_ENUM_TIMEOUT} amass enum -passive -d $domain -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT -json .tmp/amass_json.json 2>>"$LOGFILE" &>/dev/null - [ -s ".tmp/amass_json.json" ] && cat .tmp/amass_json.json | jq -r '.name' | anew -q .tmp/amass_psub.txt - [[ $RUNSUBFINDER == true ]] && subfinder -all -d "$domain" -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null - if [ -s "${GITHUB_TOKENS}" ]; then - if [ "$DEEP" = true ]; then - github-subdomains -d $domain -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null - else - github-subdomains -d $domain -k -q -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null - fi - fi - if [ -s "${GITLAB_TOKENS}" ]; then - gitlab-subdomains -d $domain -t $GITLAB_TOKENS > .tmp/gitlab_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null - fi - -else - if [[ "$SUBPASSIVE" = false ]]; then - echo "${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg" - else - echo "${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete $called_fn_dir/.${FUNCNAME[0]}" - fi -fi diff --git a/bin/rftw_sub_permut b/bin/rftw_sub_permut deleted file mode 100755 index e2077047..00000000 --- a/bin/rftw_sub_permut +++ /dev/null @@ -1,92 +0,0 @@ -#!/bin/bash - -# Configuration file loading -CONFIG_FILE="reconftw.cfg" -if [[ -f "$CONFIG_FILE" ]]; then - source "$CONFIG_FILE" -else - echo "Configuration file '$CONFIG_FILE' not found. Exiting." - exit 1 -fi - -# Helper function to display usage/help menu -usage() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Options:" - echo " -h, --help Display this help and exit." - echo - echo "Make sure to set up 'reconftw.cfg' with required environment variables." - exit 1 -} - -# Input validation -if [[ "$#" -eq 1 && ("$1" == "-h" || "$1" == "--help") ]]; then - usage -fi - -# Check for mandatory variables -if [[ -z "$SUBPERMUTE" || -z "$called_fn_dir" || -z "$DIFF" ]]; then - echo "Mandatory variables not set in the configuration file. Exiting." - exit 1 -fi - -sub_permut() { - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPERMUTE" = true ]; then - start_subfunc ${FUNCNAME[0]} "Running : Permutations Subdomain Enumeration" - if [ "$DEEP" = true ] || [ "$(cat subdomains/subdomains.txt | wc -l)" -le $DEEP_LIMIT ] ; then - if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then - [ -s "subdomains/subdomains.txt" ] && gotator -sub subdomains/subdomains.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt - else - [ -s "subdomains/subdomains.txt" ] && ripgen -d subdomains/subdomains.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt - fi - elif [ "$(cat .tmp/subs_no_resolved.txt | wc -l)" -le $DEEP_LIMIT2 ]; then - if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then - [ -s ".tmp/subs_no_resolved.txt" ] && gotator -sub .tmp/subs_no_resolved.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt - else - [ -s ".tmp/subs_no_resolved.txt" ] && ripgen -d .tmp/subs_no_resolved.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt - fi - else - end_subfunc "Skipping Permutations: Too Many Subdomains" ${FUNCNAME[0]} - return 1 - fi - if [ ! "$AXIOM" = true ]; then - resolvers_update_quick_local - [ -s ".tmp/gotator1.txt" ] && puredns resolve .tmp/gotator1.txt -w .tmp/permute1.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - else - resolvers_update_quick_axiom - [ -s ".tmp/gotator1.txt" ] && axiom-scan .tmp/gotator1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - - if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then - [ -s ".tmp/permute1.txt" ] && gotator -sub .tmp/permute1.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2.txt - else - [ -s ".tmp/permute1.txt" ] && ripgen -d .tmp/permute1.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2.txt - fi - - if [ ! "$AXIOM" = true ]; then - [ -s ".tmp/gotator2.txt" ] && puredns resolve .tmp/gotator2.txt -w .tmp/permute2.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - else - [ -s ".tmp/gotator2.txt" ] && axiom-scan .tmp/gotator2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - - if [ -s ".tmp/permute_subs.txt" ]; then - [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/permute_subs.txt - [[ "$INSCOPE" = true ]] && check_inscope .tmp/permute_subs.txt 2>>"$LOGFILE" >/dev/null - NUMOFLINES=$(cat .tmp/permute_subs.txt 2>>"$LOGFILE" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) - else - NUMOFLINES=0 - fi - end_subfunc "${NUMOFLINES} new subs (permutations)" ${FUNCNAME[0]} - else - if [ "$SUBPERMUTE" = false ]; then - printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi -} - -# Execute the main function -sub_permut \ No newline at end of file diff --git a/bin/rftw_sub_recbrute b/bin/rftw_sub_recbrute deleted file mode 100755 index ff79892b..00000000 --- a/bin/rftw_sub_recbrute +++ /dev/null @@ -1,130 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -function print_help() { - echo "Usage: $0 [-d ] [-h]" - echo "" - echo "Options:" - echo " -d Specify target domain" - echo " -h Display this help message" - echo "" -} - -function start_subfunc() { - # This function is just a placeholder as the original function details were not provided. - echo "Starting function $1 with message: $2" -} - -function end_subfunc() { - # This function is just a placeholder as the original function details were not provided. - echo "Ending function $2 with result: $1" -} - -function check_inscope() { - # Placeholder for check_inscope function - echo "Checking inscope for file $1" -} - -function resolvers_update_quick_local() { - # Placeholder for resolvers_update_quick_local function - echo "Updating local resolvers" -} - -function resolvers_update_quick_axiom() { - # Placeholder for resolvers_update_quick_axiom function - echo "Updating axiom resolvers" -} - -domain="" -while getopts "d:h" option; do - case "${option}" in - d) domain=${OPTARG};; - h) print_help; exit 0;; - *) print_help; exit 1;; - esac -done - -if [[ -z $domain ]]; then - echo "Error: Domain is required!" - print_help - exit 1 -fi - -# Main function -sub_recursive_brute() { - if { [ ! -f "$called_fn_dir/.$FUNCNAME" ] || [ "$DIFF" = true ]; } && [ "$SUB_RECURSIVE_BRUTE" = true ] && [ -s "subdomains/subdomains.txt" ]; then - start_subfunc ${FUNCNAME[0]} "Running : Subdomains recursive search active" - if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] ; then - [ ! -s ".tmp/subdomains_recurs_top.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE > .tmp/subdomains_recurs_top.txt - ripgen -d .tmp/subdomains_recurs_top.txt -w $subs_wordlist > .tmp/brute_recursive_wordlist.txt - if [ ! "$AXIOM" = true ]; then - resolvers_update_quick_local - [ -s ".tmp/brute_recursive_wordlist.txt" ] && puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -w .tmp/brute_recursive_result.txt 2>>"$LOGFILE" >/dev/null - else - resolvers_update_quick_axiom - [ -s ".tmp/brute_recursive_wordlist.txt" ] && axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_recursive_result.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - [ -s ".tmp/brute_recursive_result.txt" ] && cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt - - if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then - [ -s ".tmp/brute_recursive.txt" ] && gotator -sub .tmp/brute_recursive.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1_recursive.txt - else - [ -s ".tmp/brute_recursive.txt" ] && ripgen -d .tmp/brute_recursive.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1_recursive.txt - fi - - if [ ! "$AXIOM" = true ]; then - [ -s ".tmp/gotator1_recursive.txt" ] && puredns resolve .tmp/gotator1_recursive.txt -w .tmp/permute1_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - else - [ -s ".tmp/gotator1_recursive.txt" ] && axiom-scan .tmp/gotator1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - - if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then - [ -s ".tmp/permute1_recursive.txt" ] && gotator -sub .tmp/permute1_recursive.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2_recursive.txt - else - [ -s ".tmp/permute1_recursive.txt" ] && ripgen -d .tmp/permute1_recursive.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2_recursive.txt - fi - - if [ ! "$AXIOM" = true ]; then - [ -s ".tmp/gotator2_recursive.txt" ] && puredns resolve .tmp/gotator2_recursive.txt -w .tmp/permute2_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - else - [ -s ".tmp/gotator2_recursive.txt" ] && axiom-scan .tmp/gotator2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - cat .tmp/permute1_recursive.txt .tmp/permute2_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/permute_recursive.txt - else - end_subfunc "skipped in this mode or defined in reconftw.cfg" ${FUNCNAME[0]} - fi - if [ "$INSCOPE" = true ]; then - check_inscope .tmp/permute_recursive.txt 2>>"$LOGFILE" >/dev/null - check_inscope .tmp/brute_recursive.txt 2>>"$LOGFILE" >/dev/null - fi - - # Last validation - cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/brute_perm_recursive.txt - if [ ! "$AXIOM" = true ]; then - [ -s ".tmp/brute_recursive.txt" ] && puredns resolve .tmp/brute_perm_recursive.txt -w .tmp/brute_perm_recursive_final.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - else - [ -s ".tmp/brute_recursive.txt" ] && axiom-scan .tmp/brute_perm_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_perm_recursive_final.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - - NUMOFLINES=$(cat .tmp/brute_perm_recursive_final.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l) - end_subfunc "${NUMOFLINES} new subs (recursive active)" ${FUNCNAME[0]} - - else - if [ "$SUB_RECURSIVE_BRUTE" = false ]; then - printf "\n${yellow} ${FUNCNAME} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME} is already processed, to force executing ${FUNCNAME} delete\n $called_fn_dir/.${FUNCNAME} ${reset}\n\n" - fi - fi -} - -# Call main function -sub_recursive_brute \ No newline at end of file diff --git a/bin/rftw_sub_recpassive b/bin/rftw_sub_recpassive deleted file mode 100755 index c2f10282..00000000 --- a/bin/rftw_sub_recpassive +++ /dev/null @@ -1,68 +0,0 @@ -#!/bin/bash - -# Load the configuration -CONFIG_FILE="reconftw.cfg" -if [[ -f "$CONFIG_FILE" ]]; then - source "$CONFIG_FILE" -else - echo "Configuration file '$CONFIG_FILE' not found. Exiting." - exit 1 -fi - -# Helper function to display usage/help menu -usage() { - echo "Usage: $0 -d domain" - echo - echo "Options:" - echo " -d, --domain domain Specify the domain to perform recursive passive search." - echo " -h, --help Display this help and exit." - echo - echo "Make sure to set up 'reconftw.cfg' with required environment variables." - exit 1 -} - -# Input validation and parsing -domain="" -while [[ "$#" -gt 0 ]]; do - case $1 in - -d|--domain) domain="$2"; shift ;; - -h|--help) usage ;; - *) echo "Unknown parameter: $1"; usage ;; - esac - shift -done - -if [[ -z "$domain" ]]; then - echo "Domain not specified!" - usage -fi - -sub_recursive_passive() { - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUB_RECURSIVE_PASSIVE" = true ] && [ -s "subdomains/subdomains.txt" ]; then - start_subfunc ${FUNCNAME[0]} "Running : Subdomains recursive search passive" - # Passive recursive - [ -s "subdomains/subdomains.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE > .tmp/subdomains_recurs_top.txt - if [ ! "$AXIOM" = true ]; then - resolvers_update_quick_local - [ -s ".tmp/subdomains_recurs_top.txt" ] && timeout -k 1m ${AMASS_ENUM_TIMEOUT}m amass enum -passive -df .tmp/subdomains_recurs_top.txt -nf subdomains/subdomains.txt -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt - [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null - else - resolvers_update_quick_axiom - [ -s ".tmp/subdomains_recurs_top.txt" ] && axiom-scan .tmp/subdomains_recurs_top.txt -m amass -passive -o .tmp/amass_prec.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/amass_prec.txt" ] && cat .tmp/amass_prec.txt | anew -q .tmp/passive_recursive.txt - [ -s ".tmp/passive_recursive.txt" ] && axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/passive_recurs_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - [[ "$INSCOPE" = true ]] && check_inscope .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" >/dev/null - NUMOFLINES=$(cat .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l) - end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]} - else - if [ "$SUB_RECURSIVE_PASSIVE" = false ]; then - printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi -} - -# Execute the main function -sub_recursive_passive \ No newline at end of file diff --git a/bin/rftw_sub_regex b/bin/rftw_sub_regex deleted file mode 100755 index 1258f47a..00000000 --- a/bin/rftw_sub_regex +++ /dev/null @@ -1,67 +0,0 @@ -#!/bin/bash - -# Configuration file loading -CONFIG_FILE="reconftw.cfg" -if [[ -f "$CONFIG_FILE" ]]; then - source "$CONFIG_FILE" -else - echo "Configuration file '$CONFIG_FILE' not found. Exiting." - exit 1 -fi - -# Helper function to display usage/help menu -usage() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Options:" - echo " -h, --help Display this help and exit." - echo - echo "Make sure to set up 'reconftw.cfg' with required environment variables." - exit 1 -} - -# Input validation -if [[ "$#" -eq 1 && ("$1" == "-h" || "$1" == "--help") ]]; then - usage -fi - -# Check for mandatory variables -if [[ -z "$SUBREGEXPERMUTE" || -z "$called_fn_dir" || -z "$DIFF" ]]; then - echo "Mandatory variables not set in the configuration file. Exiting." - exit 1 -fi - -sub_regex_permut() { - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBREGEXPERMUTE" = true ]; then - start_subfunc ${FUNCNAME[0]} "Running : Permutations by regex analysis" - cd "$tools/regulator" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - python3 main.py -t $domain -f ${dir}/subdomains/subdomains.txt -o ${dir}/.tmp/${domain}.brute - cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - - if [ ! "$AXIOM" = true ]; then - resolvers_update_quick_local - [ -s ".tmp/${domain}.brute" ] && puredns resolve .tmp/${domain}.brute -w .tmp/regulator.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - else - resolvers_update_quick_axiom - [ -s ".tmp/${domain}.brute" ] && axiom-scan .tmp/${domain}.brute -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/regulator.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - - if [ -s ".tmp/regulator.txt" ]; then - [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/regulator.txt - [[ "$INSCOPE" = true ]] && check_inscope .tmp/regulator.txt 2>>"$LOGFILE" >/dev/null - NUMOFLINES=$(cat .tmp/regulator.txt 2>>"$LOGFILE" | grep ".$domain$" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) - else - NUMOFLINES=0 - fi - end_subfunc "${NUMOFLINES} new subs (permutations by regex)" ${FUNCNAME[0]} - else - if [ "$SUBREGEXPERMUTE" = false ]; then - printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi -} - -# Execute the main function -sub_regex_permut \ No newline at end of file diff --git a/bin/rftw_sub_s3buckets b/bin/rftw_sub_s3buckets deleted file mode 100755 index db6342ff..00000000 --- a/bin/rftw_sub_s3buckets +++ /dev/null @@ -1,79 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -help_menu() { - echo "Usage: $0 [DOMAIN] [OPTIONS]" - echo "AWS S3 buckets and cloud assets checker." - echo - echo "Options:" - echo " -h, --help Display this help menu and exit" - echo " -f, --force Force the execution even if it was already processed" -} - -validate_inputs() { - if [[ -z "$domain" ]]; then - echo -e "${yellow} No domain provided! ${reset}" - exit 1 - fi - - if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - echo -e "${yellow} Invalid domain format: IP address provided instead of a domain name ${reset}" - exit 1 - fi - - if [ "$S3BUCKETS" != true ] && [ "$FORCE_EXECUTION" != true ]; then - echo -e "${yellow} s3buckets skipped in this mode or defined in reconftw.cfg ${reset}" - exit 0 - fi -} - -run_s3buckets() { - start_func "s3buckets" "AWS S3 buckets search" - # S3Scanner - if [ ! "$AXIOM" = true ]; then - [ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt - else - axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/s3buckets_tmp.txt" ] && cat .tmp/s3buckets_tmp.txt .tmp/s3buckets_tmp2.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt && sed -i '/^$/d' .tmp/s3buckets.txt - fi - # Cloudenum - keyword=${domain%%.*} - python3 ~/Tools/cloud_enum/cloud_enum.py -k $keyword -qs -l .tmp/output_cloud.txt 2>>"$LOGFILE" >/dev/null - - NUMOFLINES1=$(cat .tmp/output_cloud.txt 2>>"$LOGFILE" | sed '/^#/d' | sed '/^$/d' | anew subdomains/cloud_assets.txt | wc -l) - if [ "$NUMOFLINES1" -gt 0 ]; then - notification "${NUMOFLINES1} new cloud assets found" info - fi - - NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -aiv "not_exist" | grep -aiv "Warning:" | grep -aiv "invalid_name" | grep -aiv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l) - if [ "$NUMOFLINES2" -gt 0 ]; then - notification "${NUMOFLINES2} new S3 buckets found" info - fi - - end_func "Results are saved in subdomains/s3buckets.txt and subdomains/cloud_assets.txt" "s3buckets" -} - -# Main -FORCE_EXECUTION=false -domain="$1" - -shift -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE_EXECUTION=true; shift ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -validate_inputs -run_s3buckets diff --git a/bin/rftw_sub_scraping b/bin/rftw_sub_scraping deleted file mode 100755 index b378b95f..00000000 --- a/bin/rftw_sub_scraping +++ /dev/null @@ -1,92 +0,0 @@ -#!/bin/bash - -# Load the environment variables from the configuration file -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Function to display the help menu -help_menu() { - echo "Usage: sub_scraping.sh [OPTIONS]" - echo "" - echo "Options:" - echo " -h, --help Display this help menu" - echo " --deep Run in deep mode" - echo " --no-axiom Disable Axiom" - echo " --diff Enable DIFF mode" -} - -# Input validation and options parsing -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - --deep) DEEP=true; shift ;; - --no-axiom) AXIOM=false; shift ;; - --diff) DIFF=true; shift ;; - *) echo "Unknown parameter: $1"; help_menu; exit 1 ;; - esac -done - -# The main sub_scraping function -sub_scraping() { - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBSCRAPING" = true ]; then - start_subfunc ${FUNCNAME[0]} "Running : Source code scraping subdomain search" - touch .tmp/scrap_subs.txt - if [ -s "$dir/subdomains/subdomains.txt" ]; then - if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] || [ "$DEEP" = true ] ; then - if [ ! "$AXIOM" = true ]; then - resolvers_update_quick_local - cat subdomains/subdomains.txt | httpx -follow-host-redirects -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - - if [ "$DEEP" = true ]; then - [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null - else - [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null - fi - else - resolvers_update_quick_axiom - axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - if [ "$DEEP" = true ]; then - [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 3 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - else - [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 2 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - fi - sed -i '/^.\{2048\}./d' .tmp/katana.txt - [ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | unfurl -u domains 2>>"$LOGFILE" | grep ".$domain$" | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/scrap_subs.txt" ] && puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - if [ "$INSCOPE" = true ]; then - check_inscope .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" >/dev/null - fi - NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | sed '/^$/d' | wc -l) - [ -s ".tmp/diff_scrap.txt" ] && cat .tmp/diff_scrap.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info3.txt 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/web_full_info3.txt" ] && cat .tmp/web_full_info3.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - cat .tmp/web_full_info1.txt .tmp/web_full_info2.txt .tmp/web_full_info3.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" > .tmp/web_full_info.txt - end_subfunc "${NUMOFLINES} new subs (code scraping)" ${FUNCNAME[0]} - else - end_subfunc "Skipping Subdomains Web Scraping: Too Many Subdomains" ${FUNCNAME[0]} - fi - else - end_subfunc "No subdomains to search (code scraping)" ${FUNCNAME[0]} - fi - else - if [ "$SUBSCRAPING" = false ]; then - printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi -} - -# Execute the function -sub_scraping \ No newline at end of file diff --git a/bin/rftw_sub_takeover b/bin/rftw_sub_takeover deleted file mode 100755 index 1951ac83..00000000 --- a/bin/rftw_sub_takeover +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -help_menu() { - echo "Usage: $0 [OPTIONS]" - echo "Subdomain and DNS takeover checker." - echo - echo "Options:" - echo " -h, --help Display this help menu and exit" - echo " -f, --force Force the execution even if it was already processed" -} - -validate_inputs() { - if [ "$SUBTAKEOVER" != true ] && [ "$FORCE_EXECUTION" != true ]; then - echo -e "${yellow} subtakeover skipped in this mode or defined in reconftw.cfg ${reset}" - exit 0 - fi -} - -run_subtakeover() { - start_func ${FUNCNAME[0]} "Looking for possible subdomain and DNS takeover" - touch .tmp/tko.txt - [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ ! "$AXIOM" = true ]; then - nuclei -update 2>>"$LOGFILE" >/dev/null - cat subdomains/subdomains.txt .tmp/webs_all.txt 2>/dev/null | nuclei -silent -nh -tags takeover -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -t ${NUCLEI_TEMPLATES_PATH} -o .tmp/tko.txt - else - cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | sed '/^$/d' | anew -q .tmp/webs_subs.txt - [ -s ".tmp/webs_subs.txt" ] && axiom-scan .tmp/webs_subs.txt -m nuclei --nuclei-templates ${NUCLEI_TEMPLATES_PATH} -tags takeover -nh -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -t ${NUCLEI_TEMPLATES_PATH} -o .tmp/tko.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - - # DNS_TAKEOVER - cat .tmp/subs_no_resolved.txt .tmp/subdomains_dns.txt .tmp/scrap_subs.txt .tmp/analytics_subs_clean.txt .tmp/passive_recursive.txt 2>/dev/null | anew -q .tmp/subs_dns_tko.txt - cat .tmp/subs_dns_tko.txt 2>/dev/null | dnstake -c $DNSTAKE_THREADS -s 2>>"$LOGFILE" | sed '/^$/d' | anew -q .tmp/tko.txt - - sed -i '/^$/d' .tmp/tko.txt - - NUMOFLINES=$(cat .tmp/tko.txt 2>>"$LOGFILE" | anew webs/takeover.txt | sed '/^$/d' | wc -l) - if [ "$NUMOFLINES" -gt 0 ]; then - notification "${NUMOFLINES} new possible takeovers found" info - fi - end_func "Results are saved in $domain/webs/takeover.txt" ${FUNCNAME[0]} -} - -# Main -FORCE_EXECUTION=false - -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE_EXECUTION=true; shift ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -validate_inputs -run_subtakeover \ No newline at end of file diff --git a/bin/rftw_sub_vhosts b/bin/rftw_sub_vhosts deleted file mode 100755 index d6994fb6..00000000 --- a/bin/rftw_sub_vhosts +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -help_menu() { - echo "Usage: $0 [DOMAIN] [OPTIONS]" - echo "Virtual Hosts discovery tool for the specified domain." - echo - echo "Options:" - echo " -h, --help Display this help menu and exit" - echo " -f, --force Force the execution even if it was already processed" -} - -validate_inputs() { - if [[ -z "$domain" ]]; then - echo -e "${yellow} No domain provided! ${reset}" - exit 1 - fi - - if [ "$VIRTUALHOSTS" != true ] && [ "$FORCE_EXECUTION" != true ]; then - echo -e "${yellow} virtualhosts skipped in this mode or defined in reconftw.cfg ${reset}" - exit 0 - fi -} - -run_virtualhosts() { - start_func "virtualhosts" "Virtual Hosts discovery" - - [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - - if [ -s ".tmp/webs_all.txt" ]; then - mkdir -p $dir/virtualhosts $dir/.tmp/virtualhosts - interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf -ac -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -H \"Host: FUZZ._cleantarget_\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_ -of json -o _output_/_cleantarget_.json" -o $dir/.tmp/virtualhosts 2>>"$LOGFILE" >/dev/null - - for sub in $(cat .tmp/webs_all.txt); do - sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') - [ -s "$dir/.tmp/virtualhosts/${sub_out}.json" ] && cat $dir/.tmp/virtualhosts/${sub_out}.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort | anew -q $dir/virtualhosts/${sub_out}.txt - done - - find $dir/virtualhosts/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | anew -q $dir/virtualhosts/virtualhosts_full.txt - end_func "Results are saved in $domain/virtualhosts/*subdomain*.txt" "virtualhosts" - else - end_func "No $domain/web/webs.txts file found, virtualhosts skipped " "virtualhosts" - fi -} - -# Main -FORCE_EXECUTION=false -domain="$1" - -shift -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE_EXECUTION=true; shift ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -validate_inputs -run_virtualhosts diff --git a/bin/rftw_sub_zonetransfer b/bin/rftw_sub_zonetransfer deleted file mode 100755 index e6b5a5a6..00000000 --- a/bin/rftw_sub_zonetransfer +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -help_menu() { - echo "Usage: $0 [DOMAIN] [OPTIONS]" - echo "Zone transfer checker." - echo - echo "Options:" - echo " -h, --help Display this help menu and exit" - echo " -f, --force Force the execution even if it was already processed" -} - -validate_inputs() { - if [[ -z "$domain" ]]; then - echo -e "${yellow} No domain provided! ${reset}" - exit 1 - fi - - if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - echo -e "${yellow} Invalid domain format: IP address provided instead of a domain name ${reset}" - exit 1 - fi - - if [ "$ZONETRANSFER" != true ] && [ "$FORCE_EXECUTION" != true ]; then - echo -e "${yellow} zonetransfer skipped in this mode or defined in reconftw.cfg ${reset}" - exit 0 - fi -} - -run_zonetransfer() { - start_func "zonetransfer" "Zone transfer check" - for ns in $(dig +short ns "$domain"); do - dig axfr "$domain" @"$ns" >> subdomains/zonetransfer.txt - done - if [ -s "subdomains/zonetransfer.txt" ] && ! grep -q "Transfer failed" subdomains/zonetransfer.txt ; then - notification "Zone transfer found on ${domain}!" info - fi - end_func "Results are saved in $domain/subdomains/zonetransfer.txt" "zonetransfer" -} - -# Main -FORCE_EXECUTION=false -domain="$1" - -shift -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE_EXECUTION=true; shift ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -validate_inputs -run_zonetransfer diff --git a/bin/rftw_uti_transfer b/bin/rftw_uti_transfer deleted file mode 100755 index 4170bbe6..00000000 --- a/bin/rftw_uti_transfer +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Display help information -function display_help() { - echo "Usage: $0 " - echo " ... | $0 " - echo - echo "Uploads a specified file or directory to https://transfer.sh/" - echo - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Uploads a file or directory to transfer.sh -function transfer() { - if [ $# -eq 0 ]; then - echo "Error: No arguments specified." - display_help - exit 1 - fi - - if tty -s; then - local file="$1" - local file_name=$(basename "$file") - - if [ ! -e "$file" ]; then - echo "Error: $file: No such file or directory" >&2 - exit 1 - fi - - if [ -d "$file" ]; then - # If the given input is a directory, zip and transfer it. - file_name="$file_name.zip" - (cd "$file" && zip -r -q - .) | curl --progress-bar --upload-file "-" "https://transfer.sh/$file_name" | tee /dev/null - else - # If the given input is a file, transfer it. - cat "$file" | curl --progress-bar --upload-file "-" "https://transfer.sh/$file_name" | tee /dev/null - fi - else - # Transfer data from standard input. - local file_name=$1 - curl --progress-bar --upload-file "-" "https://transfer.sh/$file_name" | tee /dev/null - fi -} - -# Main script execution -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -fi - -transfer "$@" diff --git a/bin/rftw_util_ascii b/bin/rftw_util_ascii deleted file mode 100755 index 4ffd1156..00000000 --- a/bin/rftw_util_ascii +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors for console output -green="\033[1;32m" -red="\033[1;31m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [FILE]" - echo - echo "Checks if the provided file contains ASCII text." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate arguments function -function validate_args() { - if [[ -z "$1" ]]; then - echo -e "${red}Error: FILE is required.${reset}" - display_help - exit 1 - fi - if [[ ! -f "$1" ]]; then - echo -e "${red}Error: $1 is not a file.${reset}" - exit 2 - fi -} - -# Check ASCII text function -function is_ascii_text() { - local filepath="$1" - if [[ $(file "$filepath" | grep -o 'ASCII text$') == "ASCII text" ]]; then - echo -e "${green}$filepath contains ASCII text.${reset}" - else - echo -e "${red}$filepath does not contain ASCII text.${reset}" - fi -} - -# Main script execution -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -fi - -validate_args "$1" -is_ascii_text "$1" diff --git a/bin/rftw_util_axiomoff b/bin/rftw_util_axiomoff deleted file mode 100755 index e93754ab..00000000 --- a/bin/rftw_util_axiomoff +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu function -function display_help() { - echo "Usage: $0 [MODE]" - echo - echo "Shut down the Axiom fleet based on configurations in reconftw.cfg." - echo - echo "MODE: Specifies the mode for the shutdown. (e.g., 'subs_menu', 'passive', 'all')" - echo - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Main Axiom shutdown function -function axiom_shutdown() { - local mode="$1" - if [ "$AXIOM_FLEET_LAUNCH" = true ] && [ "$AXIOM_FLEET_SHUTDOWN" = true ] && [ -n "$AXIOM_FLEET_NAME" ]; then - # Check mode conditions - if [[ "$mode" == "subs_menu" || "$mode" == "passive" || "$mode" == "all" ]]; then - # You might want to add a logging function here for "notification" - # notification "Automatic Axiom fleet shutdown is not enabled in this mode" info - echo "Automatic Axiom fleet shutdown is not enabled in mode: $mode" - return - fi - - # Remove the Axiom fleet - eval axiom-rm -f "$AXIOM_FLEET_NAME*" - echo "Axiom fleet $AXIOM_FLEET_NAME shutdown" | $NOTIFY - # Another potential place for a logging function - # notification "Axiom fleet $AXIOM_FLEET_NAME shutdown" info - echo "Axiom fleet $AXIOM_FLEET_NAME shutdown" - else - echo "Axiom fleet conditions not met for shutdown." - fi -} - -# Check arguments -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -else - # Ensure mode is specified - if [ -z "$1" ]; then - echo "Error: Mode not specified." - display_help - exit 1 - fi - axiom_shutdown "$1" -fi diff --git a/bin/rftw_util_axiomon b/bin/rftw_util_axiomon deleted file mode 100755 index c86754f3..00000000 --- a/bin/rftw_util_axiomon +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -function display_help() { - echo "Usage: $0" - echo - echo "Launch an Axiom fleet based on configurations in reconftw.cfg." - echo - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Main Axiom launch function -function axiom_launch() { - if [ "$AXIOM_FLEET_LAUNCH" = true ] && [ -n "$AXIOM_FLEET_NAME" ] && [ -n "$AXIOM_FLEET_COUNT" ]; then - # Additional logging function can be used here, example: - # start_func ${FUNCNAME[0]} "Launching our Axiom fleet" - - # Ensure the linode-cli tool is up-to-date - python3 -m pip install --upgrade linode-cli 2>>"$LOGFILE" >/dev/null - - # Check the current number of nodes - NUMOFNODES=$(timeout 30 axiom-ls | grep -c "$AXIOM_FLEET_NAME") - - if [[ $NUMOFNODES -ge $AXIOM_FLEET_COUNT ]]; then - axiom-select "$AXIOM_FLEET_NAME*" - # Logging/notification example: - # end_func "Axiom fleet $AXIOM_FLEET_NAME already has $NUMOFNODES instances" - else - [ $NUMOFNODES -eq 0 ] && startcount=$AXIOM_FLEET_COUNT || startcount=$((AXIOM_FLEET_COUNT-NUMOFNODES)) - - AXIOM_ARGS=" -i $startcount" - # Execute the axiom fleet command - axiom-fleet ${AXIOM_FLEET_NAME} ${AXIOM_ARGS} - - axiom-select "$AXIOM_FLEET_NAME*" - - [ -n "$AXIOM_POST_START" ] && eval "$AXIOM_POST_START" 2>>"$LOGFILE" >/dev/null - - NUMOFNODES=$(timeout 30 axiom-ls | grep -c "$AXIOM_FLEET_NAME") - # Notification/logging example: - # echo "Axiom fleet $AXIOM_FLEET_NAME launched w/ $NUMOFNODES instances" | $NOTIFY - # end_func "Axiom fleet $AXIOM_FLEET_NAME launched w/ $NUMOFNODES instances" - fi - fi -} - -# Main execution starts here -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -else - axiom_launch -fi diff --git a/bin/rftw_util_axiomsel b/bin/rftw_util_axiomsel deleted file mode 100755 index 7b7f9f13..00000000 --- a/bin/rftw_util_axiomsel +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu function -function display_help() { - echo "Usage: $0" - echo - echo "Check for running Axiom instances and whether any instances are selected." - echo - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Main Axiom check function -function axiom_selected() { - # Check if there are any running axiom instances - if [[ ! $(axiom-ls | tail -n +2 | sed '$ d' | wc -l) -gt 0 ]]; then - # You might want to add a logging function here for "notification" - # notification "\n\n${bred} No axiom instances running ${reset}\n\n" error - echo -e "\n\n${bred} No axiom instances running ${reset}\n\n" - exit 1 - fi - - # Check if there are any selected axiom instances - if [[ ! $(cat ~/.axiom/selected.conf | sed '/^\s*$/d' | wc -l) -gt 0 ]]; then - # Again, consider adding a logging function here for "notification" - # notification "\n\n${bred} No axiom instances selected ${reset}\n\n" error - echo -e "\n\n${bred} No axiom instances selected ${reset}\n\n" - exit 1 - fi - - echo "Axiom instances are running and selected." -} - -# Check arguments -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -else - axiom_selected -fi diff --git a/bin/rftw_util_deleteoos b/bin/rftw_util_deleteoos deleted file mode 100755 index 31a2d046..00000000 --- a/bin/rftw_util_deleteoos +++ /dev/null @@ -1,72 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -red="\033[1;31m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS] [SCOPE_FILE] [TARGET_FILE]" - echo - echo "Remove out-of-scope items from a target file based on a scope file." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate arguments function -function validate_args() { - if [[ -z "$1" || -z "$2" ]]; then - echo -e "${red}Error: Both SCOPE_FILE and TARGET_FILE are required.${reset}" - display_help - exit 1 - fi - - if [[ ! -f "$1" ]]; then - echo -e "${red}Error: SCOPE_FILE '$1' does not exist.${reset}" - exit 1 - fi - - if [[ ! -f "$2" ]]; then - echo -e "${red}Error: TARGET_FILE '$2' does not exist.${reset}" - exit 1 - fi -} - -# Delete out-of-scope items function -function delete_out_scoped() { - local scope_file="$1" - local target_file="$2" - - if [ -s "$scope_file" ]; then - while IFS= read -r outscoped - do - if grep -q "^[*]" <<< "$outscoped"; then - outscoped="${outscoped:1}" - sed -i "/$outscoped$/d" "$target_file" - else - sed -i "/$outscoped/d" "$target_file" - fi - done < "$scope_file" - fi -} - -# Main script execution -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -fi - -validate_args "$1" "$2" -delete_out_scoped "$1" "$2" -echo -e "${yellow}Processed $2 based on out-of-scope items from $1.${reset}" diff --git a/bin/rftw_util_gettime b/bin/rftw_util_gettime deleted file mode 100755 index 67cc782e..00000000 --- a/bin/rftw_util_gettime +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -red="\033[1;31m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [START_TIME] [END_TIME]" - echo - echo "Calculate the elapsed time between two time values in seconds." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate arguments function -function validate_args() { - if [[ -z "$1" || -z "$2" ]]; then - echo -e "${red}Error: Both START_TIME and END_TIME are required.${reset}" - display_help - exit 1 - fi - - if ! [[ "$1" =~ ^[0-9]+$ && "$2" =~ ^[0-9]+$ ]]; then - echo -e "${red}Error: Both START_TIME and END_TIME should be numeric.${reset}" - exit 1 - fi -} - -# Get elapsed time function -function get_elapsed_time() { - local start_time="$1" - local end_time="$2" - runtime="" - local T=$((end_time - start_time)) - local D=$((T/60/60/24)) - local H=$((T/60/60%24)) - local M=$((T/60%60)) - local S=$((T%60)) - (( D > 0 )) && runtime="$runtime$D days, " - (( H > 0 )) && runtime="$runtime$H hours, " - (( M > 0 )) && runtime="$runtime$M minutes, " - runtime="$runtime$S seconds." - echo -e "${yellow}$runtime${reset}" -} - -# Main script execution -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -fi - -validate_args "$1" "$2" -get_elapsed_time "$1" "$2" diff --git a/bin/rftw_util_ipcidr b/bin/rftw_util_ipcidr deleted file mode 100755 index 3f6f4770..00000000 --- a/bin/rftw_util_ipcidr +++ /dev/null @@ -1,61 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -function display_help() { - echo "Usage: $0 [IP CIDR] [OPTIONAL FILE]" - echo - echo "Processes an IP CIDR, maps it, and optionally saves the output to a file." - echo - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -function process_ip_cidr() { - local IP_CIDR=$1 - local OUTFILE=$2 - local IP_CIDR_REGEX='(((25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?))(\/([8-9]|[1-2][0-9]|3[0-2]))([^0-9.]|$)|(((25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)$)' - - if [[ $IP_CIDR =~ ^$IP_CIDR_REGEX ]]; then - echo $IP_CIDR | mapcidr -silent | anew -q target_reconftw_ipcidr.txt - - if [ -s "./target_reconftw_ipcidr.txt" ]; then - [ "$REVERSE_IP" = true ] && cat ./target_reconftw_ipcidr.txt | hakip2host | cut -d' ' -f 3 | unfurl -u domains 2>/dev/null | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | anew -q ./target_reconftw_ipcidr.txt - - if [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -eq 1 ]]; then - domain=$(cat ./target_reconftw_ipcidr.txt) - elif [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -gt 1 ]]; then - unset domain - list=${PWD}/target_reconftw_ipcidr.txt - fi - fi - - if [ -n "$OUTFILE" ]; then - cat $list | anew -q $OUTFILE - sed -i '/\/[0-9]*$/d' $OUTFILE - fi - else - echo "Invalid IP CIDR format provided. Please check the input." - exit 1 - fi -} - -# Main execution starts here -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -elif [[ -z "$1" ]]; then - echo "Error: No IP CIDR provided. Use -h or --help for more information." - exit 1 -else - process_ip_cidr "$1" "$2" -fi diff --git a/bin/rftw_util_notification b/bin/rftw_util_notification deleted file mode 100755 index cc91501d..00000000 --- a/bin/rftw_util_notification +++ /dev/null @@ -1,74 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors for console output -bblue="\033[1;34m" -yellow="\033[1;33m" -reset="\033[0m" -bred="\033[1;31m" -bgreen="\033[1;32m" - -# Help menu function -function display_help() { - echo "Usage: $0 " - echo - echo "Sends notification based on the given message and type." - echo "Types:" - echo " info - Informational message (Blue)" - echo " warn - Warning message (Yellow)" - echo " error - Error message (Red)" - echo " good - Positive message (Green)" - echo - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Send notification function -function notification() { - local message=$1 - local type=$2 - local text="" - - if [[ -z "$message" || -z "$type" ]]; then - echo -e "${bred}Error: Both message and type are required.${reset}" - exit 2 - fi - - case $type in - info) - text="\n${bblue} ${message} ${reset}" - ;; - warn) - text="\n${yellow} ${message} ${reset}" - ;; - error) - text="\n${bred} ${message} ${reset}" - ;; - good) - text="\n${bgreen} ${message} ${reset}" - ;; - *) - echo -e "${bred}Error: Invalid type. Valid types are: info, warn, error, good.${reset}" - exit 2 - ;; - esac - - printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY -} - -# Main script execution -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -fi - -notification "$1" "$2" diff --git a/bin/rftw_util_output b/bin/rftw_util_output deleted file mode 100755 index d783ff03..00000000 --- a/bin/rftw_util_output +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors for console output -green="\033[1;32m" -red="\033[1;31m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0" - echo - echo "Copies content of directory specified in 'dir' variable to 'dir_output' variable." - echo "If 'dir' is different from 'dir_output', then 'dir' will be deleted." - echo "Both 'dir' and 'dir_output' are sourced from the reconftw.cfg configuration file." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate directory existence function -function validate_dirs() { - if [[ ! -d "$dir" ]]; then - echo -e "${red}Error: '$dir' directory does not exist.${reset}" - exit 2 - fi -} - -# Output function -function output() { - mkdir -p "$dir_output" - cp -r "$dir" "$dir_output" - if [[ "$(dirname "$dir")" != "$dir_output" ]]; then - rm -rf "$dir" - echo -e "${green}Content copied and original directory deleted.${reset}" - else - echo -e "${green}Content copied.${reset}" - fi -} - -# Main script execution -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -fi - -validate_dirs -output diff --git a/bin/rftw_util_removebig b/bin/rftw_util_removebig deleted file mode 100755 index 3c806ede..00000000 --- a/bin/rftw_util_removebig +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors for console output -green="\033[1;32m" -red="\033[1;31m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0" - echo - echo "Removes specific files and any files larger than 200MB from .tmp directory." - echo "Logs errors to 'LOGFILE' specified in reconftw.cfg." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate logfile existence function -function validate_logfile() { - if [[ ! -w "$(dirname "$LOGFILE")" ]]; then - echo -e "${red}Error: Unable to write to LOGFILE directory: $(dirname "$LOGFILE").${reset}" - exit 2 - fi -} - -# Remove big files function -function remove_big_files() { - rm -rf .tmp/gotator*.txt 2>>"$LOGFILE" - rm -rf .tmp/brute_recursive_wordlist.txt 2>>"$LOGFILE" - rm -rf .tmp/subs_dns_tko.txt 2>>"$LOGFILE" - rm -rf .tmp/subs_no_resolved.txt .tmp/subdomains_dns.txt .tmp/brute_dns_tko.txt .tmp/scrap_subs.txt .tmp/analytics_subs_clean.txt .tmp/gotator1.txt .tmp/gotator2.txt .tmp/passive_recursive.txt .tmp/brute_recursive_wordlist.txt .tmp/gotator1_recursive.txt .tmp/gotator2_recursive.txt 2>>"$LOGFILE" - find .tmp -type f -size +200M -exec rm -f {} + 2>>"$LOGFILE" - echo -e "${green}Specified files and large files in .tmp directory removed successfully.${reset}" -} - -# Main script execution -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -fi - -validate_logfile -remove_big_files diff --git a/bin/rftw_util_resolver b/bin/rftw_util_resolver deleted file mode 100755 index cfcee0eb..00000000 --- a/bin/rftw_util_resolver +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Display help information -function display_help() { - echo "Usage: $0" - echo - echo "Update resolvers for reconftw." - echo - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -function update_resolvers() { - if [ "$generate_resolvers" = true ]; then - if [ ! "$AXIOM" = true ]; then - if [ ! -s "$resolvers" ] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then - notification "Resolvers seem older than 1 day\n Generating custom resolvers..." warn - rm -f $resolvers 2>>"$LOGFILE" - dnsvalidator -tL https://public-dns.info/nameservers.txt -threads $DNSVALIDATOR_THREADS -o $resolvers 2>>"$LOGFILE" >/dev/null - dnsvalidator -tL https://raw.githubusercontent.com/blechschmidt/massdns/master/lists/resolvers.txt -threads $DNSVALIDATOR_THREADS -o tmp_resolvers 2>>"$LOGFILE" >/dev/null - - [ -s "tmp_resolvers" ] && cat tmp_resolvers | anew -q $resolvers - [ -s "tmp_resolvers" ] && rm -f tmp_resolvers 2>>"$LOGFILE" >/dev/null - - [ ! -s "$resolvers" ] && wget -q -O - ${resolvers_url} > $resolvers - [ ! -s "$resolvers_trusted" ] && wget -q -O - ${resolvers_trusted_url} > $resolvers_trusted - notification "Updated\n" good - fi - else - notification "Checking resolvers lists...\n Accurate resolvers are the key to great results\n This may take around 10 minutes if it's not updated" warn - axiom-exec 'if [ $(find "/home/op/lists/resolvers.txt" -mtime +1 -print) ] || [ $(cat /home/op/lists/resolvers.txt | wc -l) -le 40 ] ; then dnsvalidator -tL https://public-dns.info/nameservers.txt -threads 200 -o /home/op/lists/resolvers.txt ; fi' &>/dev/null - axiom-exec "wget -q -O - ${resolvers_url} > /home/op/lists/resolvers.txt" 2>>"$LOGFILE" >/dev/null - axiom-exec "wget -q -O - ${resolvers_trusted_url} > /home/op/lists/resolvers_trusted.txt" 2>>"$LOGFILE" >/dev/null - notification "Updated\n" good - fi - generate_resolvers=false - else - if [ ! -s "$resolvers" ] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then - notification "Resolvers seem older than 1 day\n Downloading new resolvers..." warn - wget -q -O - ${resolvers_url} > $resolvers - wget -q -O - ${resolvers_trusted_url} > $resolvers_trusted - notification "Resolvers updated\n" good - fi - fi -} - -# Check arguments and call the main function -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -else - update_resolvers -fi diff --git a/bin/rftw_util_sendnotify b/bin/rftw_util_sendnotify deleted file mode 100755 index 81de22d5..00000000 --- a/bin/rftw_util_sendnotify +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Display help information -function display_help() { - echo "Usage: $0 " - echo - echo "Send a specified file to notify providers based on configurations." - echo - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Main sendToNotify function -function sendToNotify() { - local file="$1" - - if [[ -z "$file" ]]; then - printf "\n${yellow}Error: No file provided to send${reset}\n" - display_help - exit 1 - fi - - if [[ -z "$NOTIFY_CONFIG" ]]; then - NOTIFY_CONFIG=~/.config/notify/provider-config.yaml - fi - - if [ -n "$(find "$file" -prune -size +8000000c)" ]; then - printf '%s is larger than 8MB, sending over transfer.sh\n' "$file" - transfer "$file" | notify - return 0 - fi - - local config_value - for provider in telegram discord slack; do - if grep -q -E "^( )?${provider}" "$NOTIFY_CONFIG"; then - case $provider in - telegram) - notification "Sending ${domain} data over Telegram" info - config_value=$(grep -E "^( )?telegram_(chat_id|api_key)" "$NOTIFY_CONFIG" | xargs) - telegram_chat_id=$(echo "$config_value" | cut -d' ' -f2) - telegram_key=$(echo "$config_value" | cut -d' ' -f4) - curl -F document=@"$file" "https://api.telegram.org/bot${telegram_key}/sendDocument?chat_id=${telegram_chat_id}" 2>>"$LOGFILE" >/dev/null - ;; - - discord) - notification "Sending ${domain} data over Discord" info - discord_url=$(grep -E "^( )?discord_webhook_url" "$NOTIFY_CONFIG" | xargs | cut -d' ' -f2) - curl -v -i -H "Accept: application/json" -H "Content-Type: multipart/form-data" -X POST -F file1=@"$file" "$discord_url" 2>>"$LOGFILE" >/dev/null - ;; - - slack) - if [[ -n "$slack_channel" ]] && [[ -n "$slack_auth" ]]; then - notification "Sending ${domain} data over Slack" info - curl -F file=@"$file" -F "initial_comment=reconftw zip file" -F channels="$slack_channel" -H "Authorization: Bearer ${slack_auth}" https://slack.com/api/files.upload 2>>"$LOGFILE" >/dev/null - fi - ;; - esac - fi - done -} - -# Check arguments and call the main function -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -else - sendToNotify "$1" -fi diff --git a/bin/rftw_util_tools b/bin/rftw_util_tools deleted file mode 100755 index 9ebcbbe7..00000000 --- a/bin/rftw_util_tools +++ /dev/null @@ -1,142 +0,0 @@ -#!/bin/bash - -# Default config path -CONFIG_PATH="$RECONFTW_CFG" - -# Check if the config file exists -if [ -f "$CONFIG_PATH" ]; then - source "$CONFIG_PATH" -else - echo "Error: reconftw.cfg not found at $CONFIG_PATH!" - exit 1 -fi - -# Help menu -function help_menu() { - echo -e "Usage: ./tools_installed_script.sh [OPTIONS]" - echo -e "Options:" - echo -e " -t, --tools-dir DIRECTORY Specify the tools directory path" - echo -e " -h, --help Display this help menu" - exit 1 -} - -# Main function -function tools_installed() { - printf "\n\n${bgreen}#######################################################################${reset}\n" - printf "${bblue} Checking installed tools ${reset}\n\n" - - allinstalled=true - - [ -n "$GOPATH" ] || { printf "${bred} [*] GOPATH var [NO]${reset}\n"; allinstalled=false;} - [ -n "$GOROOT" ] || { printf "${bred} [*] GOROOT var [NO]${reset}\n"; allinstalled=false;} - [ -n "$PATH" ] || { printf "${bred} [*] PATH var [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/dorks_hunter/dorks_hunter.py" ] || { printf "${bred} [*] dorks_hunter [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/brutespray/brutespray.py" ] || { printf "${bred} [*] brutespray [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/fav-up/favUp.py" ] || { printf "${bred} [*] fav-up [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/Corsy/corsy.py" ] || { printf "${bred} [*] Corsy [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/testssl.sh/testssl.sh" ] || { printf "${bred} [*] testssl [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/CMSeeK/cmseek.py" ] || { printf "${bred} [*] CMSeeK [NO]${reset}\n"; allinstalled=false;} - [ -f "${fuzz_wordlist}" ] || { printf "${bred} [*] OneListForAll [NO]${reset}\n"; allinstalled=false;} - [ -f "${lfi_wordlist}" ] || { printf "${bred} [*] lfi_wordlist [NO]${reset}\n"; allinstalled=false;} - [ -f "${ssti_wordlist}" ] || { printf "${bred} [*] ssti_wordlist [NO]${reset}\n"; allinstalled=false;} - [ -f "${subs_wordlist}" ] || { printf "${bred} [*] subs_wordlist [NO]${reset}\n"; allinstalled=false;} - [ -f "${subs_wordlist_big}" ] || { printf "${bred} [*] subs_wordlist_big [NO]${reset}\n"; allinstalled=false;} - [ -f "${resolvers}" ] || { printf "${bred} [*] resolvers [NO]${reset}\n"; allinstalled=false;} - [ -f "${resolvers_trusted}" ] || { printf "${bred} [*] resolvers_trusted [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/xnLinkFinder/xnLinkFinder.py" ] || { printf "${bred} [*] xnLinkFinder [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/waymore/waymore.py" ] || { printf "${bred} [*] waymore [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/commix/commix.py" ] || { printf "${bred} [*] commix [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/getjswords.py" ] || { printf "${bred} [*] getjswords [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/JSA/jsa.py" ] || { printf "${bred} [*] JSA [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/cloud_enum/cloud_enum.py" ] || { printf "${bred} [*] cloud_enum [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/ultimate-nmap-parser/ultimate-nmap-parser.sh" ] || { printf "${bred} [*] nmap-parse-output [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/pydictor/pydictor.py" ] || { printf "${bred} [*] pydictor [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/urless/urless/urless.py" ] || { printf "${bred} [*] urless [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/smuggler/smuggler.py" ] || { printf "${bred} [*] smuggler [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/regulator/main.py" ] || { printf "${bred} [*] regulator [NO]${reset}\n"; allinstalled=false;} - which github-endpoints &>/dev/null || { printf "${bred} [*] github-endpoints [NO]${reset}\n"; allinstalled=false;} - which github-subdomains &>/dev/null || { printf "${bred} [*] github-subdomains [NO]${reset}\n"; allinstalled=false;} - which gitlab-subdomains &>/dev/null || { printf "${bred} [*] gitlab-subdomains [NO]${reset}\n"; allinstalled=false;} - which katana &>/dev/null || { printf "${bred} [*] katana [NO]${reset}\n"; allinstalled=false;} - which wafw00f &>/dev/null || { printf "${bred} [*] wafw00f [NO]${reset}\n"; allinstalled=false;} - which dnsvalidator &>/dev/null || { printf "${bred} [*] dnsvalidator [NO]${reset}\n"; allinstalled=false;} - which gowitness &>/dev/null || { printf "${bred} [*] gowitness [NO]${reset}\n"; allinstalled=false;} - which amass &>/dev/null || { printf "${bred} [*] Amass [NO]${reset}\n"; allinstalled=false;} - which dnsx &>/dev/null || { printf "${bred} [*] dnsx [NO]${reset}\n"; allinstalled=false;} - which gotator &>/dev/null || { printf "${bred} [*] gotator [NO]${reset}\n"; allinstalled=false;} - which nuclei &>/dev/null || { printf "${bred} [*] Nuclei [NO]${reset}\n"; allinstalled=false;} - [ -d ${NUCLEI_TEMPLATES_PATH} ] || { printf "${bred} [*] Nuclei templates [NO]${reset}\n"; allinstalled=false;} - [ -d ${tools}/fuzzing-templates ] || { printf "${bred} [*] Fuzzing templates [NO]${reset}\n"; allinstalled=false;} - which gf &>/dev/null || { printf "${bred} [*] Gf [NO]${reset}\n"; allinstalled=false;} - which Gxss &>/dev/null || { printf "${bred} [*] Gxss [NO]${reset}\n"; allinstalled=false;} - which subjs &>/dev/null || { printf "${bred} [*] subjs [NO]${reset}\n"; allinstalled=false;} - which ffuf &>/dev/null || { printf "${bred} [*] ffuf [NO]${reset}\n"; allinstalled=false;} - which massdns &>/dev/null || { printf "${bred} [*] Massdns [NO]${reset}\n"; allinstalled=false;} - which qsreplace &>/dev/null || { printf "${bred} [*] qsreplace [NO]${reset}\n"; allinstalled=false;} - which interlace &>/dev/null || { printf "${bred} [*] interlace [NO]${reset}\n"; allinstalled=false;} - which anew &>/dev/null || { printf "${bred} [*] Anew [NO]${reset}\n"; allinstalled=false;} - which unfurl &>/dev/null || { printf "${bred} [*] unfurl [NO]${reset}\n"; allinstalled=false;} - which crlfuzz &>/dev/null || { printf "${bred} [*] crlfuzz [NO]${reset}\n"; allinstalled=false;} - which httpx &>/dev/null || { printf "${bred} [*] Httpx [NO]${reset}\n${reset}"; allinstalled=false;} - which jq &>/dev/null || { printf "${bred} [*] jq [NO]${reset}\n${reset}"; allinstalled=false;} - which notify &>/dev/null || { printf "${bred} [*] notify [NO]${reset}\n${reset}"; allinstalled=false;} - which dalfox &>/dev/null || { printf "${bred} [*] dalfox [NO]${reset}\n${reset}"; allinstalled=false;} - which puredns &>/dev/null || { printf "${bred} [*] puredns [NO]${reset}\n${reset}"; allinstalled=false;} - which emailfinder &>/dev/null || { printf "${bred} [*] emailfinder [NO]${reset}\n"; allinstalled=false;} - which analyticsrelationships &>/dev/null || { printf "${bred} [*] analyticsrelationships [NO]${reset}\n"; allinstalled=false;} - which mapcidr &>/dev/null || { printf "${bred} [*] mapcidr [NO]${reset}\n"; allinstalled=false;} - which ppfuzz &>/dev/null || { printf "${bred} [*] ppfuzz [NO]${reset}\n"; allinstalled=false;} - which cdncheck &>/dev/null || { printf "${bred} [*] cdncheck [NO]${reset}\n"; allinstalled=false;} - which interactsh-client &>/dev/null || { printf "${bred} [*] interactsh-client [NO]${reset}\n"; allinstalled=false;} - which tlsx &>/dev/null || { printf "${bred} [*] tlsx [NO]${reset}\n"; allinstalled=false;} - which smap &>/dev/null || { printf "${bred} [*] smap [NO]${reset}\n"; allinstalled=false;} - which gitdorks_go &>/dev/null || { printf "${bred} [*] gitdorks_go [NO]${reset}\n"; allinstalled=false;} - which ripgen &>/dev/null || { printf "${bred} [*] ripgen [NO]${reset}\n${reset}"; allinstalled=false;} - which dsieve &>/dev/null || { printf "${bred} [*] dsieve [NO]${reset}\n${reset}"; allinstalled=false;} - which inscope &>/dev/null || { printf "${bred} [*] inscope [NO]${reset}\n${reset}"; allinstalled=false;} - which enumerepo &>/dev/null || { printf "${bred} [*] enumerepo [NO]${reset}\n${reset}"; allinstalled=false;} - which Web-Cache-Vulnerability-Scanner &>/dev/null || { printf "${bred} [*] Web-Cache-Vulnerability-Scanner [NO]${reset}\n"; allinstalled=false;} - which subfinder &>/dev/null || { printf "${bred} [*] subfinder [NO]${reset}\n${reset}"; allinstalled=false;} - which byp4xx &>/dev/null || { printf "${bred} [*] byp4xx [NO]${reset}\n${reset}"; allinstalled=false;} - which ghauri &>/dev/null || { printf "${bred} [*] ghauri [NO]${reset}\n${reset}"; allinstalled=false;} - which hakip2host &>/dev/null || { printf "${bred} [*] hakip2host [NO]${reset}\n${reset}"; allinstalled=false;} - which gau &>/dev/null || { printf "${bred} [*] gau [NO]${reset}\n${reset}"; allinstalled=false;} - which crt &>/dev/null || { printf "${bred} [*] crt [NO]${reset}\n${reset}"; allinstalled=false;} - which gitleaks &>/dev/null || { printf "${bred} [*] gitleaks [NO]${reset}\n${reset}"; allinstalled=false;} - which trufflehog &>/dev/null || { printf "${bred} [*] trufflehog [NO]${reset}\n${reset}"; allinstalled=false;} - which s3scanner &>/dev/null || { printf "${bred} [*] s3scanner [NO]${reset}\n${reset}"; allinstalled=false;} - - if [ "${allinstalled}" = true ]; then - printf "${bgreen} Good! All installed! ${reset}\n\n" - else - printf "\n${yellow} Try running the installer script again ./install.sh" - printf "\n${yellow} If it fails for any reason try to install manually the tools missed" - printf "\n${yellow} Finally remember to set the ${bred}\$tools${yellow} variable at the start of this script" - printf "\n${yellow} If nothing works and the world is gonna end you can always ping me :D ${reset}\n\n" - fi - - printf "${bblue} Tools check finished\n" - printf "${bgreen}#######################################################################\n${reset}" - -} - -# Parse command-line arguments -tools_dir="" -while [[ "$#" -gt 0 ]]; do - case $1 in - -t|--tools-dir) tools_dir="$2"; shift ;; - -h|--help) help_menu ;; - *) echo -e "${bred}Unknown parameter passed: $1${reset}" >&2; help_menu ;; - esac - shift -done - -# Validate tools directory -if [ -z "$tools_dir" ]; then - echo -e "${bred}Error: Tools directory not specified.${reset}" >&2 - help_menu - exit 1 -fi - -# Execute the main function -tools_installed \ No newline at end of file diff --git a/bin/rftw_util_version b/bin/rftw_util_version deleted file mode 100755 index 5c540742..00000000 --- a/bin/rftw_util_version +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash - - -# Default config path -CONFIG_PATH="$RECONFTW_CFG" - -# Check if the config file exists -if [ -f "$CONFIG_PATH" ]; then - source "$CONFIG_PATH" -else - echo "Error: reconftw.cfg not found at $CONFIG_PATH!" - exit 1 -fi - -# Help menu -function help_menu() { - echo -e "Usage: ./check_version_script.sh" - echo -e "Checks for updates to the current git repository." - echo -e "Options:" - echo -e " -h, --help Display this help menu" - exit 1 -} - -# Main function -function check_version() { - # Check if current directory is a git repository - if ! git rev-parse --is-inside-work-tree > /dev/null 2>&1; then - echo -e "${bred}Error: This is not a git repository.${reset}" >&2 - exit 1 - fi - - timeout 10 git fetch - exit_status=$? - if [ $exit_status -eq 0 ]; then - BRANCH=$(git rev-parse --abbrev-ref HEAD) - HEADHASH=$(git rev-parse HEAD) - UPSTREAMHASH=$(git rev-parse "${BRANCH}"@\{upstream\}) - if [ "$HEADHASH" != "$UPSTREAMHASH" ]; then - echo -e "\n${yellow} There is a new version, run ./install.sh to get the latest version${reset}\n\n" - fi - else - echo -e "\n${bred} Unable to check updates ${reset}\n\n" >&2 - fi -} - -# Parse command-line arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu ;; - *) echo -e "${bred}Unknown parameter passed: $1${reset}" >&2; help_menu ;; - esac - shift -done - -# Execute the main function -check_version \ No newline at end of file diff --git a/bin/rftw_util_zipfolder b/bin/rftw_util_zipfolder deleted file mode 100755 index 5f171513..00000000 --- a/bin/rftw_util_zipfolder +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors for console output -yellow="\033[1;33m" -red="\033[1;31m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [DOMAIN]" - echo - echo "Zips the specified domain's output folder and sends the zip file." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate arguments function -function validate_args() { - if [[ -z "$1" ]]; then - echo -e "${red}Error: DOMAIN is required.${reset}" - display_help - exit 1 - fi -} - -# Main zipping and sending function -function zip_send_output_folder() { - local domain="$1" - local zip_name1=$(date +"%Y_%m_%d-%H.%M.%S") - local zip_name="${zip_name1}_${domain}.zip" - (cd "$dir" && zip -r "$zip_name" .) - - echo "Creating and sending zip file "${dir}/${zip_name}"" - - if [ -s "${dir}/$zip_name" ]; then - sendToNotify "$dir/$zip_name" - rm -f "${dir}/$zip_name" - else - notification "No Zip file to send" warn - fi -} - -# Main script execution -if [[ "$1" == "-h" || "$1" == "--help" ]]; then - display_help - exit 0 -fi - -validate_args "$1" -zip_send_output_folder "$1" diff --git a/bin/rftw_vuln_4xx b/bin/rftw_vuln_4xx deleted file mode 100755 index e7b202b0..00000000 --- a/bin/rftw_vuln_4xx +++ /dev/null @@ -1,77 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform 4XX Bypass checks on provided inputs." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v byp4xx)" ]; then - echo "Error: byp4xx not found." >&2 - exit 1 - fi - if ! [ -f "fuzzing/fuzzing_full.txt" ]; then - echo "Error: fuzzing_full.txt not found." >&2 - exit 1 - fi -} - -# 4XX Bypass test function -function bypass_4xx_test() { - if { [ ! -f "$called_fn_dir/.4xxbypass" ] || [ "$DIFF" = true ]; } && [ "$BYPASSER4XX" = true ]; then - if [[ $(cat fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 | wc -l) -le 1000 ]] || [ "$DEEP" = true ]; then - echo "[*] Starting 403 bypass" - cat $dir/fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 > $dir/.tmp/403test.txt - cd "$tools/byp4xx" || { echo "Failed to cd directory in 4xxbypass @ line ${LINENO}"; exit 1; } - byp4xx -threads $BYP4XX_THREADS $dir/.tmp/403test.txt > $dir/.tmp/byp4xx.txt - cd "$dir" || { echo "Failed to cd directory in 4xxbypass @ line ${LINENO}"; exit 1; } - [ -s ".tmp/byp4xx.txt" ] && cat .tmp/byp4xx.txt | anew -q vulns/byp4xx.txt - echo "[+] Results are saved in vulns/byp4xx.txt" - else - echo "[!] Too many URLs to bypass, skipping" - fi - else - if [ "$BYPASSER4XX" = false ]; then - echo -e "\n${yellow} 4xxbypass skipped in this mode or defined in reconftw.cfg ${reset}" - else - echo -e "${yellow} 4xxbypass is already processed, to force executing 4xxbypass delete\n $called_fn_dir/.4xxbypass ${reset}\n" - fi - fi -} - -# Main script execution -while (( "$#" )); do - case "$1" in - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -bypass_4xx_test diff --git a/bin/rftw_vuln_brokenlink b/bin/rftw_vuln_brokenlink deleted file mode 100755 index 56fc9b8b..00000000 --- a/bin/rftw_vuln_brokenlink +++ /dev/null @@ -1,68 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -help_menu() { - echo "Usage: $0 [OPTIONS]" - echo "Broken Links Check using katana and axiom" - echo "" - echo "Options:" - echo " -d, --deep Set deep mode (optional)" - echo " -h, --help Display this help message and exit" - exit 0 -} - -# Input validation -DEEP_MODE=false -while [ "$1" != "" ]; do - case $1 in - -d | --deep ) DEEP_MODE=true - ;; - -h | --help ) help_menu - exit - ;; - * ) echo "Unknown option: $1" - exit 1 - esac - shift -done - -broken_links_check() { - local deep_mode=$1 - - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$BROKENLINKS" = true ]; then - start_func ${FUNCNAME[0]} "Broken links checks" - - # The code remains mostly unchanged, with only minor adjustments for clarity. - [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ ! "$AXIOM" = true ]; then - depth_level=$([ "$deep_mode" = true ] && echo 3 || echo 2) - [ -s ".tmp/webs_all.txt" ] && katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d $depth_level -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt - else - depth_level=$([ "$deep_mode" = true ] && echo 3 || echo 2) - [ -s ".tmp/webs_all.txt" ] && axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d $depth_level -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt - fi - [ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | sort -u | httpx -follow-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | grep "\[4" | cut -d ' ' -f1 | anew -q .tmp/brokenLinks_total.txt - NUMOFLINES=$(cat .tmp/brokenLinks_total.txt 2>>"$LOGFILE" | anew vulns/brokenLinks.txt | sed '/^$/d' | wc -l) - notification "${NUMOFLINES} new broken links found" info - end_func "Results are saved in vulns/brokenLinks.txt" ${FUNCNAME[0]} - else - if [ "$BROKENLINKS" = false ]; then - printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi -} - -broken_links_check $DEEP_MODE diff --git a/bin/rftw_vuln_comminject b/bin/rftw_vuln_comminject deleted file mode 100755 index a641bfbe..00000000 --- a/bin/rftw_vuln_comminject +++ /dev/null @@ -1,76 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform Command Injection checks on provided inputs." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v python3)" ]; then - echo "Error: python3 not found." >&2 - exit 1 - fi - if ! [ -f "gf/rce.txt" ]; then - echo "Error: rce.txt not found." >&2 - exit 1 - fi -} - -# Command Injection test function -function command_injection_test() { - if { [ ! -f "$called_fn_dir/.command_injection" ] || [ "$DIFF" = true ]; } && [ "$COMM_INJ" = true ] && [ -s "gf/rce.txt" ]; then - echo "[*] Starting Command Injection checks" - [ -s "gf/rce.txt" ] && cat gf/rce.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_rce.txt - if [ "$DEEP" = true ] || [[ $(cat .tmp/tmp_rce.txt | wc -l) -le $DEEP_LIMIT ]]; then - [ -s ".tmp/tmp_rce.txt" ] && python3 $tools/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection.txt 2>>"$LOGFILE" >/dev/null - echo "[+] Results are saved in vulns/command_injection folder" - else - echo "[!] Skipping Command injection: Too many URLs to test, try with --deep flag" - fi - else - if [ "$COMM_INJ" = false ]; then - echo -e "\n${yellow} command_injection_test skipped in this mode or defined in reconftw.cfg ${reset}" - elif [ ! -s "gf/rce.txt" ]; then - echo -e "\n${yellow} command_injection_test No URLs potentially vulnerables to Command Injection ${reset}\n" - else - echo -e "${yellow} command_injection_test is already processed, to force executing command_injection_test delete\n $called_fn_dir/.command_injection ${reset}\n" - fi - fi -} - -# Main script execution -while (( "$#" )); do - case "$1" in - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -command_injection_test diff --git a/bin/rftw_vuln_cors b/bin/rftw_vuln_cors deleted file mode 100755 index 1e11c67b..00000000 --- a/bin/rftw_vuln_cors +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform CORS scan on provided inputs." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate the required tools and files -function validate_requirements() { - if ! [ -x "$(command -v python3)" ]; then - echo "Error: python3 is not installed." >&2 - exit 1 - fi - if [ ! -f "$tools/Corsy/corsy.py" ]; then - echo "Error: Corsy tool is missing." >&2 - exit 1 - fi - if [ ! -f "webs/webs.txt" ] || [ ! -f "webs/webs_uncommon_ports.txt" ]; then - echo "Error: Required input files are missing." >&2 - exit 1 - fi -} - -# Main CORS function -function cors_scan() { - if { [ ! -f "$called_fn_dir/.cors_scan" ] || [ "$DIFF" = true ]; } && [ "$CORS" = true ]; then - echo "[*] Starting CORS Scan" - - # Check and consolidate input files - [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - - # Perform CORS scan - [ -s ".tmp/webs_all.txt" ] && python3 $tools/Corsy/corsy.py -i .tmp/webs_all.txt -o vulns/cors.txt 2>>"$LOGFILE" >/dev/null - - echo "[+] Results are saved in vulns/cors.txt" - else - if [ "$CORS" = false ]; then - echo -e "\n${yellow} cors_scan skipped in this mode or defined in reconftw.cfg ${reset}" - else - echo -e "${yellow} cors_scan is already processed, to force executing cors_scan delete\n $called_fn_dir/.cors_scan ${reset}\n" - fi - fi -} - -# Main script execution -if [ "$#" -eq 0 ]; then - display_help - exit 0 -fi - -while (( "$#" )); do - case "$1" in - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac - shift -done - -validate_requirements -cors_scan - diff --git a/bin/rftw_vuln_crlf b/bin/rftw_vuln_crlf deleted file mode 100755 index 2e7c559d..00000000 --- a/bin/rftw_vuln_crlf +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform CRLF checks on provided inputs." - echo "Options:" - echo " -d, --deep Perform deep scanning" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v crlfuzz)" ]; then - echo "Error: crlfuzz is not installed." >&2 - exit 1 - fi -} - -# CRLF checks function -function crlf_checks() { - local deep_flag=$1 - - if { [ ! -f "$called_fn_dir/.crlf_checks" ] || [ "$DIFF" = true ]; } && [ "$CRLF_CHECKS" = true ]; then - echo "[*] Starting CRLF checks" - - if [ ! -s ".tmp/webs_all.txt" ]; then - cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - fi - - if [ "$deep_flag" = true ] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then - crlfuzz -l .tmp/webs_all.txt -o vulns/crlf.txt 2>>"$LOGFILE" >/dev/null - echo "[+] Results are saved in vulns/crlf.txt" - else - echo "[!] Skipping CRLF: Too many URLs to test, try with --deep flag" - fi - else - if [ "$CRLF_CHECKS" = false ]; then - echo -e "\n${yellow} crlf_checks skipped in this mode or defined in reconftw.cfg ${reset}" - else - echo -e "${yellow} crlf_checks is already processed, to force executing crlf_checks delete\n $called_fn_dir/.crlf_checks ${reset}\n" - fi - fi -} - -# Main script execution -deep_flag=false - -while (( "$#" )); do - case "$1" in - -d|--deep) - deep_flag=true - shift - ;; - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -crlf_checks $deep_flag diff --git a/bin/rftw_vuln_fuzzparam b/bin/rftw_vuln_fuzzparam deleted file mode 100755 index 2e479cf4..00000000 --- a/bin/rftw_vuln_fuzzparam +++ /dev/null @@ -1,81 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform Fuzzing params values checks on provided inputs." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v nuclei)" ]; then - echo "Error: nuclei not found." >&2 - exit 1 - fi - if ! [ -d "$tools/fuzzing-templates" ]; then - echo "Error: fuzzing-templates directory not found." >&2 - exit 1 - fi -} - -# Fuzzing params values test function -function fuzzparams_test() { - if { [ ! -f "$called_fn_dir/.fuzzparams" ] || [ "$DIFF" = true ]; } && [ "$FUZZPARAMS" = true ]; then - echo "[*] Starting Fuzzing params values checks" - if [ "$DEEP" = true ] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then - if [ ! "$AXIOM" = true ]; then - nuclei -update 2>>"$LOGFILE" >/dev/null - git -C $tools/fuzzing-templates pull - cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t $tools/fuzzing-templates -o .tmp/fuzzparams.txt - else - axiom-exec "git clone https://github.com/projectdiscovery/fuzzing-templates /home/op/fuzzing-templates" &>/dev/null - axiom-scan webs/url_extract.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -rl $NUCLEI_RATELIMIT -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - [ -s ".tmp/fuzzparams.txt" ] && cat .tmp/fuzzparams.txt | anew -q vulns/fuzzparams.txt - echo "[+] Results are saved in vulns/fuzzparams.txt" - else - echo "[!] Fuzzing params values: Too many entries to test, try with --deep flag" - fi - else - if [ "$FUZZPARAMS" = false ]; then - echo -e "\n${yellow} fuzzparams skipped in this mode or defined in reconftw.cfg ${reset}" - else - echo -e "${yellow} fuzzparams is already processed, to force executing fuzzparams delete\n $called_fn_dir/.fuzzparams ${reset}\n" - fi - fi -} - -# Main script execution -while (( "$#" )); do - case "$1" in - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -fuzzparams_test diff --git a/bin/rftw_vuln_lfi b/bin/rftw_vuln_lfi deleted file mode 100755 index 209d35b7..00000000 --- a/bin/rftw_vuln_lfi +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform LFI checks on provided inputs." - echo "Options:" - echo " -d, --deep Perform deep scanning" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v ffuf)" ] || ! [ -x "$(command -v interlace)" ]; then - echo "Error: ffuf and/or interlace are not installed." >&2 - exit 1 - fi -} - -# LFI checks function -function lfi_checks() { - local deep_flag=$1 - - if { [ ! -f "$called_fn_dir/.lfi_checks" ] || [ "$DIFF" = true ]; } && [ "$LFI" = true ] && [ -s "gf/lfi.txt" ]; then - echo "[*] Starting LFI checks" - - cat gf/lfi.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_lfi.txt - - if [ "$deep_flag" = true ] || [[ $(cat .tmp/tmp_lfi.txt | wc -l) -le $DEEP_LIMIT ]]; then - interlace -tL .tmp/tmp_lfi.txt -threads ${INTERLACE_THREADS} -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${lfi_wordlist} -u \"_target_\" -mr \"root:\" " 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt - echo "[+] Results are saved in vulns/lfi.txt" - else - echo "[!] Skipping LFI: Too many URLs to test, try with --deep flag" - fi - else - if [ "$LFI" = false ]; then - echo -e "\n${yellow} lfi_checks skipped in this mode or defined in reconftw.cfg ${reset}" - elif [ ! -s "gf/lfi.txt" ]; then - echo -e "\n${yellow} lfi_checks No URLs potentially vulnerables to LFI ${reset}\n" - else - echo -e "${yellow} lfi_checks is already processed, to force executing lfi_checks delete\n $called_fn_dir/.lfi_checks ${reset}\n" - fi - fi -} - -# Main script execution -deep_flag=false - -while (( "$#" )); do - case "$1" in - -d|--deep) - deep_flag=true - shift - ;; - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -lfi_checks $deep_flag diff --git a/bin/rftw_vuln_openredir b/bin/rftw_vuln_openredir deleted file mode 100755 index 53bc5fd6..00000000 --- a/bin/rftw_vuln_openredir +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -bgreen="\033[1;32m" -reset="\033[0m" - -# Help menu -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform Open Redirects scan on provided inputs." - echo "Options:" - echo " -d, --deep Perform deep scanning" - echo " -h, --help Display this help and exit" - echo -} - -# Validate the required tools and files -function validate_requirements() { - if ! [ -x "$(command -v python3)" ]; then - echo "Error: python3 is not installed." >&2 - exit 1 - fi - if [ ! -f "$tools/Oralyzer/oralyzer.py" ] || [ ! -f "$tools/Oralyzer/payloads.txt" ]; then - echo "Error: Oralyzer tool or its payloads are missing." >&2 - exit 1 - fi -} - -# Main Open Redirect function -function open_redirect_scan() { - local deep_flag=$1 - - if { [ ! -f "$called_fn_dir/.open_redirect_scan" ] || [ "$DIFF" = true ]; } && [ "$OPEN_REDIRECT" = true ] && [ -s "gf/redirect.txt" ]; then - echo "[*] Starting Open Redirects checks" - - if [ "$deep_flag" = true ] || [[ $(cat gf/redirect.txt | wc -l) -le $DEEP_LIMIT ]]; then - cat gf/redirect.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_redirect.txt - python3 $tools/Oralyzer/oralyzer.py -l .tmp/tmp_redirect.txt -p $tools/Oralyzer/payloads.txt > vulns/redirect.txt - sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" vulns/redirect.txt - echo "[+] Results are saved in vulns/redirect.txt" - else - echo "[!] Skipping Open redirects: Too many URLs to test, try with --deep flag" - fi - else - if [ "$OPEN_REDIRECT" = false ]; then - echo -e "\n${yellow} open_redirect_scan skipped in this mode or defined in reconftw.cfg ${reset}" - elif [ ! -s "gf/redirect.txt" ]; then - echo -e "\n${yellow} open_redirect_scan No URLs potentially vulnerables to Open Redirect ${reset}\n" - else - echo -e "${yellow} open_redirect_scan is already processed, to force executing open_redirect_scan delete\n $called_fn_dir/.open_redirect_scan ${reset}\n" - fi - fi -} - -# Main script execution -deep_flag=false - -while (( "$#" )); do - case "$1" in - -d|--deep) - deep_flag=true - shift - ;; - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -open_redirect_scan $deep_flag - diff --git a/bin/rftw_vuln_protpollut b/bin/rftw_vuln_protpollut deleted file mode 100755 index 7f9d9c60..00000000 --- a/bin/rftw_vuln_protpollut +++ /dev/null @@ -1,74 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform Prototype Pollution checks on provided inputs." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v ppfuzz)" ]; then - echo "Error: ppfuzz not found." >&2 - exit 1 - fi - if ! [ -f "webs/url_extract.txt" ]; then - echo "Error: url_extract.txt not found." >&2 - exit 1 - fi -} - -# Prototype Pollution test function -function prototype_pollution_test() { - if { [ ! -f "$called_fn_dir/.prototype_pollution" ] || [ "$DIFF" = true ]; } && [ "$PROTO_POLLUTION" = true ]; then - echo "[*] Starting Prototype Pollution checks" - if [ "$DEEP" = true ] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT ]]; then - [ -s "webs/url_extract.txt" ] && ppfuzz -l webs/url_extract.txt -c $PPFUZZ_THREADS 2>/dev/null | anew -q .tmp/prototype_pollution.txt - [ -s ".tmp/prototype_pollution.txt" ] && cat .tmp/prototype_pollution.txt | sed -e '1,8d' | sed '/^\[ERR/d' | anew -q vulns/prototype_pollution.txt - echo "[+] Results are saved in vulns/prototype_pollution.txt" - else - echo "[!] Skipping Prototype Pollution: Too many URLs to test, try with --deep flag" - fi - else - if [ "$PROTO_POLLUTION" = false ]; then - echo -e "\n${yellow} prototype_pollution skipped in this mode or defined in reconftw.cfg ${reset}" - else - echo -e "${yellow} prototype_pollution is already processed, to force executing prototype_pollution delete\n $called_fn_dir/.prototype_pollution ${reset}\n" - fi - fi -} - -# Main script execution -while (( "$#" )); do - case "$1" in - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -prototype_pollution_test diff --git a/bin/rftw_vuln_smuggling b/bin/rftw_vuln_smuggling deleted file mode 100755 index 828416f5..00000000 --- a/bin/rftw_vuln_smuggling +++ /dev/null @@ -1,77 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform HTTP Request Smuggling checks on provided inputs." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v python3)" ]; then - echo "Error: python3 not found." >&2 - exit 1 - fi - if ! [ -d "$tools/smuggler" ]; then - echo "Error: smuggler tool directory not found." >&2 - exit 1 - fi -} - -# HTTP Request Smuggling test function -function smuggling_test() { - if { [ ! -f "$called_fn_dir/.smuggling" ] || [ "$DIFF" = true ]; } && [ "$SMUGGLING" = true ]; then - echo "[*] Starting HTTP Request Smuggling checks" - [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ "$DEEP" = true ] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then - cd "$tools/smuggler" || { echo "Failed to cd directory in smuggling @ line $LINENO"; exit 1; } - cat $dir/.tmp/webs_all.txt | python3 smuggler.py -q --no-color 2>/dev/null | anew -q $dir/.tmp/smuggling.txt - cd "$dir" || { echo "Failed to cd to $dir in smuggling @ line $LINENO"; exit 1; } - [ -s ".tmp/smuggling.txt" ] && cat .tmp/smuggling.txt | anew -q vulns/smuggling.txt - echo "[+] Results are saved in vulns/smuggling.txt" - else - echo "[!] Skipping HTTP Request Smuggling: Too many webs to test, try with --deep flag" - fi - else - if [ "$SMUGGLING" = false ]; then - echo -e "\n${yellow} smuggling skipped in this mode or defined in reconftw.cfg ${reset}" - else - echo -e "${yellow} smuggling is already processed, to force executing smuggling delete\n $called_fn_dir/.smuggling ${reset}\n" - fi - fi -} - -# Main script execution -while (( "$#" )); do - case "$1" in - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -smuggling_test diff --git a/bin/rftw_vuln_spray b/bin/rftw_vuln_spray deleted file mode 100755 index e087b345..00000000 --- a/bin/rftw_vuln_spray +++ /dev/null @@ -1,71 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform Password spraying tests on provided inputs." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v python3)" ]; then - echo "Error: python3 not found." >&2 - exit 1 - fi - if ! [ -f "$dir/hosts/portscan_active.gnmap" ]; then - echo "Error: portscan_active.gnmap not found." >&2 - exit 1 - fi -} - -# Password spraying test function -function spraying_test() { - if { [ ! -f "$called_fn_dir/.spraying" ] || [ "$DIFF" = true ]; } && [ "$SPRAY" = true ]; then - echo "[*] Starting Password spraying" - cd "$tools/brutespray" || { echo "Failed to cd directory in spraying_test @ line ${LINENO}"; exit 1; } - python3 brutespray.py --file $dir/hosts/portscan_active.gnmap --threads $BRUTESPRAY_THREADS --hosts $BRUTESPRAY_CONCURRENCE -o $dir/vulns/brutespray 2>>"$LOGFILE" >/dev/null - cd "$dir" || { echo "Failed to cd directory in spraying_test @ line ${LINENO}"; exit 1; } - echo "[+] Results are saved in vulns/brutespray folder" - else - if [ "$SPRAY" = false ]; then - echo -e "\n${yellow} spraying_test skipped in this mode or defined in reconftw.cfg ${reset}" - else - echo -e "${yellow} spraying_test is already processed, to force executing spraying_test delete\n $called_fn_dir/.spraying ${reset}\n" - fi - fi -} - -# Main script execution -while (( "$#" )); do - case "$1" in - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -spraying_test diff --git a/bin/rftw_vuln_sqli b/bin/rftw_vuln_sqli deleted file mode 100755 index adb97d68..00000000 --- a/bin/rftw_vuln_sqli +++ /dev/null @@ -1,92 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform SQLi checks on provided inputs." - echo "Options:" - echo " -d, --deep Perform deep scanning" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v python3)" ] || ! [ -x "$(command -v interlace)" ]; then - echo "Error: python3 and/or interlace are not installed." >&2 - exit 1 - fi - if [ "$SQLMAP" = true ] && ! [ -f "$tools/sqlmap/sqlmap.py" ]; then - echo "Error: sqlmap.py not found at specified location." >&2 - exit 1 - fi -} - -# SQLi checks function -function sqli_checks() { - local deep_flag=$1 - - if { [ ! -f "$called_fn_dir/.sqli_checks" ] || [ "$DIFF" = true ]; } && [ "$SQLI" = true ] && [ -s "gf/sqli.txt" ]; then - echo "[*] Starting SQLi checks" - - cat gf/sqli.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_sqli.txt - - if [ "$deep_flag" = true ] || [[ $(cat .tmp/tmp_sqli.txt | wc -l) -le $DEEP_LIMIT ]]; then - if [ "$SQLMAP" = true ]; then - python3 $tools/sqlmap/sqlmap.py -m .tmp/tmp_sqli.txt -b -o --smart --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap 2>>"$LOGFILE" >/dev/null - fi - if [ "$GHAURI" = true ]; then - interlace -tL .tmp/tmp_sqli.txt -threads ${INTERLACE_THREADS} -c "ghauri -u _target_ --batch -H \"${HEADER}\" --force-ssl >> vulns/ghauri_log.txt" 2>>"$LOGFILE" >/dev/null - fi - echo "[+] Results are saved in vulns/sqlmap folder" - else - echo "[!] Skipping SQLi: Too many URLs to test, try with --deep flag" - fi - else - if [ "$SQLI" = false ]; then - echo -e "\n${yellow} sqli_checks skipped in this mode or defined in reconftw.cfg ${reset}" - elif [ ! -s "gf/sqli.txt" ]; then - echo -e "\n${yellow} sqli_checks No URLs potentially vulnerables to SQLi ${reset}\n" - else - echo -e "${yellow} sqli_checks is already processed, to force executing sqli_checks delete\n $called_fn_dir/.sqli_checks ${reset}\n" - fi - fi -} - -# Main script execution -deep_flag=false - -while (( "$#" )); do - case "$1" in - -d|--deep) - deep_flag=true - shift - ;; - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -sqli_checks $deep_flag diff --git a/bin/rftw_vuln_ssrf b/bin/rftw_vuln_ssrf deleted file mode 100755 index 6f70f378..00000000 --- a/bin/rftw_vuln_ssrf +++ /dev/null @@ -1,108 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -bgreen="\033[1;32m" -reset="\033[0m" - -# Help menu -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform SSRF checks on provided inputs." - echo "Options:" - echo " -d, --deep Perform deep scanning" - echo " -h, --help Display this help and exit" - echo -} - -# Validate the required tools and files -function validate_requirements() { - if ! [ -x "$(command -v python3)" ]; then - echo "Error: python3 is not installed." >&2 - exit 1 - fi - if [ ! -f "$tools/Oralyzer/oralyzer.py" ]; then - echo "Error: Oralyzer tool is missing." >&2 - exit 1 - fi -} - -# Main SSRF checks function -function ssrf_checks() { - local deep_flag=$1 - - if { [ ! -f "$called_fn_dir/.ssrf_checks" ] || [ "$DIFF" = true ]; } && [ "$SSRF_CHECKS" = true ] && [ -s "gf/ssrf.txt" ]; then - echo "[*] Starting SSRF checks" - - if [ -z "$COLLAB_SERVER" ]; then - interactsh-client &>.tmp/ssrf_callback.txt & - sleep 2 - COLLAB_SERVER_FIX="FFUFHASH.$(cat .tmp/ssrf_callback.txt | tail -n1 | cut -c 16-)" - COLLAB_SERVER_URL="http://$COLLAB_SERVER_FIX" - INTERACT=true - else - COLLAB_SERVER_FIX="FFUFHASH.$(echo ${COLLAB_SERVER} | sed -r "s/https?:\/\///")" - INTERACT=false - fi - if [ "$deep_flag" = true ] || [[ $(cat gf/ssrf.txt | wc -l) -le $DEEP_LIMIT ]]; then - cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_FIX} | anew -q .tmp/tmp_ssrf.txt - cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_URL} | anew -q .tmp/tmp_ssrf.txt - ffuf -v -H "${HEADER}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -w .tmp/tmp_ssrf.txt -u FUZZ 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf_requested_url.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -u W1 2>/dev/null | anew -q vulns/ssrf_requested_headers.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -u W1 2>/dev/null | anew -q vulns/ssrf_requested_headers.txt - sleep 5 - if [ -s ".tmp/ssrf_callback.txt" ]; then - cat .tmp/ssrf_callback.txt | tail -n+11 | anew -q vulns/ssrf_callback.txt - NUMOFLINES=$(cat .tmp/ssrf_callback.txt | tail -n+12 | sed '/^$/d' | wc -l) - [ "$INTERACT" = true ] && echo "SSRF: ${NUMOFLINES} callbacks received" - fi - echo "[+] Results are saved in vulns/ssrf_*" - else - echo "[!] Skipping SSRF: Too many URLs to test, try with --deep flag" - fi - pkill -f interactsh-client & - else - if [ "$SSRF_CHECKS" = false ]; then - echo -e "\n${yellow} ssrf_checks skipped in this mode or defined in reconftw.cfg ${reset}" - elif [ ! -s "gf/ssrf.txt" ]; then - echo -e "\n${yellow} ssrf_checks No URLs potentially vulnerables to SSRF ${reset}\n" - else - echo -e "${yellow} ssrf_checks is already processed, to force executing ssrf_checks delete\n $called_fn_dir/.ssrf_checks ${reset}\n" - fi - fi -} - -# Main script execution -deep_flag=false - -while (( "$#" )); do - case "$1" in - -d|--deep) - deep_flag=true - shift - ;; - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -ssrf_checks $deep_flag - diff --git a/bin/rftw_vuln_ssti b/bin/rftw_vuln_ssti deleted file mode 100755 index d57ba1f5..00000000 --- a/bin/rftw_vuln_ssti +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform SSTI checks on provided inputs." - echo "Options:" - echo " -d, --deep Perform deep scanning" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v ffuf)" ] || ! [ -x "$(command -v interlace)" ]; then - echo "Error: ffuf and/or interlace are not installed." >&2 - exit 1 - fi -} - -# SSTI checks function -function ssti_checks() { - local deep_flag=$1 - - if { [ ! -f "$called_fn_dir/.ssti_checks" ] || [ "$DIFF" = true ]; } && [ "$SSTI" = true ] && [ -s "gf/ssti.txt" ]; then - echo "[*] Starting SSTI checks" - - cat gf/ssti.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_ssti.txt - - if [ "$deep_flag" = true ] || [[ $(cat .tmp/tmp_ssti.txt | wc -l) -le $DEEP_LIMIT ]]; then - interlace -tL .tmp/tmp_ssti.txt -threads ${INTERLACE_THREADS} -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${ssti_wordlist} -u \"_target_\" -mr \"ssti49\" " 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt - echo "[+] Results are saved in vulns/ssti.txt" - else - echo "[!] Skipping SSTI: Too many URLs to test, try with --deep flag" - fi - else - if [ "$SSTI" = false ]; then - echo -e "\n${yellow} ssti_checks skipped in this mode or defined in reconftw.cfg ${reset}" - elif [ ! -s "gf/ssti.txt" ]; then - echo -e "\n${yellow} ssti_checks No URLs potentially vulnerables to SSTI ${reset}\n" - else - echo -e "${yellow} ssti_checks is already processed, to force executing ssti_checks delete\n $called_fn_dir/.ssti_checks ${reset}\n" - fi - fi -} - -# Main script execution -deep_flag=false - -while (( "$#" )); do - case "$1" in - -d|--deep) - deep_flag=true - shift - ;; - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -ssti_checks $deep_flag diff --git a/bin/rftw_vuln_testssl b/bin/rftw_vuln_testssl deleted file mode 100755 index 63237794..00000000 --- a/bin/rftw_vuln_testssl +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform SSL tests on provided inputs." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v $tools/testssl.sh/testssl.sh)" ]; then - echo "Error: testssl.sh not found at specified location." >&2 - exit 1 - fi - if ! [ -f "hosts/ips.txt" ]; then - echo "Error: hosts/ips.txt not found." >&2 - exit 1 - fi -} - -# SSL test function -function ssl_test() { - if { [ ! -f "$called_fn_dir/.ssl_test" ] || [ "$DIFF" = true ]; } && [ "$TEST_SSL" = true ]; then - echo "[*] Starting SSL Test" - $tools/testssl.sh/testssl.sh --quiet --color 0 -U -iL hosts/ips.txt 2>>"$LOGFILE" > vulns/testssl.txt - echo "[+] Results are saved in vulns/testssl.txt" - else - if [ "$TEST_SSL" = false ]; then - echo -e "\n${yellow} ssl_test skipped in this mode or defined in reconftw.cfg ${reset}" - else - echo -e "${yellow} ssl_test is already processed, to force executing ssl_test delete\n $called_fn_dir/.ssl_test ${reset}\n" - fi - fi -} - -# Main script execution -while (( "$#" )); do - case "$1" in - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -ssl_test diff --git a/bin/rftw_vuln_webcache b/bin/rftw_vuln_webcache deleted file mode 100755 index bd517459..00000000 --- a/bin/rftw_vuln_webcache +++ /dev/null @@ -1,77 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Colors -yellow="\033[1;33m" -reset="\033[0m" - -# Help menu function -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform Web Cache Poisoning checks on provided inputs." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate requirements function -function validate_requirements() { - if ! [ -x "$(command -v anew)" ]; then - echo "Error: anew not found." >&2 - exit 1 - fi - if ! [ -d "$tools/Web-Cache-Vulnerability-Scanner" ]; then - echo "Error: Web-Cache-Vulnerability-Scanner directory not found." >&2 - exit 1 - fi -} - -# Web Cache Poisoning test function -function webcache_test() { - if { [ ! -f "$called_fn_dir/.webcache" ] || [ "$DIFF" = true ]; } && [ "$WEBCACHE" = true ]; then - echo "[*] Starting Web Cache Poisoning checks" - [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ "$DEEP" = true ] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then - cd "$tools/Web-Cache-Vulnerability-Scanner" || { echo "Failed to cd directory in webcache @ line $LINENO"; exit 1; } - Web-Cache-Vulnerability-Scanner -u file:$dir/.tmp/webs_all.txt -v 0 2>/dev/null | anew -q $dir/.tmp/webcache.txt - cd "$dir" || { echo "Failed to cd to $dir in webcache @ line $LINENO"; exit 1; } - [ -s ".tmp/webcache.txt" ] && cat .tmp/webcache.txt | anew -q vulns/webcache.txt - echo "[+] Results are saved in vulns/webcache.txt" - else - echo "[!] Skipping Web Cache Poisoning: Too many webs to test, try with --deep flag" - fi - else - if [ "$WEBCACHE" = false ]; then - echo -e "\n${yellow} webcache skipped in this mode or defined in reconftw.cfg ${reset}" - else - echo -e "${yellow} webcache is already processed, to force executing webcache delete\n $called_fn_dir/.webcache ${reset}\n" - fi - fi -} - -# Main script execution -while (( "$#" )); do - case "$1" in - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac -done - -validate_requirements -webcache_test diff --git a/bin/rftw_vuln_xss b/bin/rftw_vuln_xss deleted file mode 100755 index 033b5baf..00000000 --- a/bin/rftw_vuln_xss +++ /dev/null @@ -1,120 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -function display_help() { - echo "Usage: $0 [OPTIONS]" - echo - echo "Perform XSS analysis on provided inputs." - echo "Options:" - echo " -h, --help Display this help and exit" - echo -} - -# Validate the required tools and files -function validate_requirements() { - if ! [ -x "$(command -v dalfox)" ]; then - echo "Error: dalfox is not installed." >&2 - exit 1 - fi - if [ ! -f "gf/xss.txt" ]; then - echo "Error: gf/xss.txt does not exist." >&2 - exit 1 - fi -} - -# Main xss function -function xss_analysis() { - if { [ ! -f "$called_fn_dir/.xss_analysis" ] || [ "$DIFF" = true ]; } && [ "$XSS" = true ] && [ -s "gf/xss.txt" ]; then - echo "[*] Starting XSS Analysis" - - # Check if gf/xss.txt exists and process it - [ -s "gf/xss.txt" ] && cat gf/xss.txt | qsreplace FUZZ | sed '/FUZZ/!d' | Gxss -c 100 -p Xss | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/xss_reflected.txt - - # Check if AXIOM is true or not and perform the corresponding operations - if [ ! "$AXIOM" = true ]; then - if [ "$DEEP" = true ]; then - if [ -n "$XSS_SERVER" ]; then - [ -s ".tmp/xss_reflected.txt" ] && cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --only-poc r --ignore-return 302,404,403 --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt - else - printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" - [ -s ".tmp/xss_reflected.txt" ] && cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --only-poc r --ignore-return 302,404,403 --skip-bav -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt - fi - else - if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le $DEEP_LIMIT ]]; then - if [ -n "$XSS_SERVER" ]; then - cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --skip-bav --skip-mining-dom --skip-mining-dict --only-poc r --ignore-return 302,404,403 -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt - else - printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" - cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --skip-bav --skip-mining-dom --skip-mining-dict --only-poc r --ignore-return 302,404,403 -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt - fi - else - printf "${bred} Skipping XSS: Too many URLs to test, try with --deep flag${reset}\n" - fi - fi - else - if [ "$DEEP" = true ]; then - if [ -n "$XSS_SERVER" ]; then - [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - else - printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" - [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - else - if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le $DEEP_LIMIT ]]; then - if [ -n "$XSS_SERVER" ]; then - axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - else - printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" - axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - else - printf "${bred} Skipping XSS: Too many URLs to test, try with --deep flag${reset}\n" - fi - fi - fi - echo "[+] Results are saved in vulns/xss.txt" - end_func "Results are saved in vulns/xss.txt" ${FUNCNAME[0]} - else - if [ "$XSS" = false ]; then - echo -e "\n${yellow} xss_analysis skipped in this mode or defined in reconftw.cfg ${reset}" - elif [ ! -s "gf/xss.txt" ]; then - echo -e "\n${yellow} xss_analysis: No URLs potentially vulnerable to XSS ${reset}\n" - else - echo -e "${yellow} xss_analysis is already processed, to force executing xss_analysis delete\n $called_fn_dir/.xss_analysis ${reset}\n" - fi - fi -} - -# Main script execution -if [ "$#" -eq 0 ]; then - display_help - exit 0 -fi - -while (( "$#" )); do - case "$1" in - -h|--help) - display_help - exit 0 - ;; - *) - echo "Unknown parameter passed: $1" - display_help - exit 1 - ;; - esac - shift -done - -validate_requirements -xss_analysis - diff --git a/bin/rftw_web_cms b/bin/rftw_web_cms deleted file mode 100755 index df1631a9..00000000 --- a/bin/rftw_web_cms +++ /dev/null @@ -1,86 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help function -help_menu() { - echo "Usage: $0 [OPTIONS]" - echo "" - echo "CMS Scanner" - echo "" - echo "Options:" - echo " -h, --help Show this help menu" - echo " -f, --force Force the execution even if already processed" -} - -# Start function -start_func() { - echo "Starting $1: $2..." -} - -# End function -end_func() { - echo "$1" - echo "End of $2..." -} - -# Default values -FORCE=false - -# Parse arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE=true ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -if { [ ! -f "$called_fn_dir/.cms_scanner" ] || [ "$FORCE" = true ]; } && [ "$CMS_SCANNER" = true ]; then - start_func "cms_scanner" "CMS Scanner" - - mkdir -p $dir/cms && rm -rf $dir/cms/* - anew -q .tmp/webs_all.txt < <(cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null) - - if [ -s ".tmp/webs_all.txt" ]; then - tr '\n' ',' < .tmp/webs_all.txt > .tmp/cms.txt - timeout -k 1m ${CMSSCAN_TIMEOUT}s python3 $tools/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r 2>>"$LOGFILE" &>/dev/null - exit_status=$? - if [[ $exit_status -eq 125 ]]; then - echo "TIMEOUT cmseek.py - investigate manually for $dir" >> "$LOGFILE" - end_func "TIMEOUT cmseek.py - investigate manually for $dir" "cms_scanner" - exit 1 - elif [[ $exit_status -ne 0 ]]; then - echo "ERROR cmseek.py - investigate manually for $dir" >> "$LOGFILE" - end_func "ERROR cmseek.py - investigate manually for $dir" "cms_scanner" - exit 1 - fi - while read -r sub; do - sub_out=$(echo "$sub" | sed -e 's|^[^/]*//||' -e 's|/.*$||') - cms_id=$(jq -r 'try .cms_id' "$tools/CMSeeK/Result/${sub_out}/cms.json" 2>/dev/null) - if [ -z "$cms_id" ]; then - rm -rf "$tools/CMSeeK/Result/${sub_out}" - else - mv -f "$tools/CMSeeK/Result/${sub_out}" $dir/cms/ 2>>"$LOGFILE" - fi - done < .tmp/webs_all.txt - - end_func "Results are saved in $domain/cms/*subdomain* folder" "cms_scanner" - else - end_func "No $domain/web/webs.txts file found, cms scanner skipped" "cms_scanner" - fi -else - if [ "$CMS_SCANNER" = false ]; then - printf "\n${yellow} cms_scanner skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} cms_scanner is already processed. To force executing cms_scanner, delete\n $called_fn_dir/.cms_scanner ${reset}\n\n" - fi -fi diff --git a/bin/rftw_web_fuzz b/bin/rftw_web_fuzz deleted file mode 100755 index 39ddb39a..00000000 --- a/bin/rftw_web_fuzz +++ /dev/null @@ -1,92 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help function -help_menu() { - echo "Usage: $0 [OPTIONS]" - echo "" - echo "Web Directory Fuzzing" - echo "" - echo "Options:" - echo " -h, --help Show this help menu" - echo " -f, --force Force the execution even if already processed" -} - -# Start function -start_func() { - echo "Starting $1: $2..." -} - -# End function -end_func() { - echo "$1" - echo "End of $2..." -} - -# Default values -FORCE=false - -# Parse arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE=true ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -if { [ ! -f "$called_fn_dir/.fuzz" ] || [ "$FORCE" = true ]; } && [ "$FUZZ" = true ]; then - start_func "fuzz" "Web directory fuzzing" - - anew -q .tmp/webs_all.txt < <(cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null) - - if [ -s ".tmp/webs_all.txt" ]; then - mkdir -p $dir/fuzzing $dir/.tmp/fuzzing - - if [ ! "$AXIOM" = true ]; then - interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -o _output_/_cleantarget_.json" -o $dir/.tmp/fuzzing 2>>"$LOGFILE" >/dev/null - - while read -r sub; do - sub_out=$(echo "$sub" | sed -e 's|^[^/]*//||' -e 's|/.*$||') - if [ -s "$dir/.tmp/fuzzing/${sub_out}.json" ]; then - jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' < "$dir/.tmp/fuzzing/${sub_out}.json" | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt - fi - done < .tmp/webs_all.txt - - sort -k1 < <(find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE") | anew -q $dir/fuzzing/fuzzing_full.txt - else - axiom-exec "mkdir -p /home/op/lists/seclists/Discovery/Web-Content/" &>/dev/null - axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/fuzz_wordlist.txt" &>/dev/null - axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/seclists/Discovery/Web-Content/big.txt" &>/dev/null - axiom-scan .tmp/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - - while read -r sub; do - sub_out=$(echo "$sub" | sed -e 's|^[^/]*//||' -e 's|/.*$||') - if [ -s "$dir/.tmp/ffuf-content.json" ]; then - grep "$sub" < <(jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' .tmp/ffuf-content.json) | sort -k1 | anew -q fuzzing/${sub_out}.txt - fi - done < .tmp/webs_all.txt - - sort -k1 < <(find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE") | anew -q $dir/fuzzing/fuzzing_full.txt - fi - - end_func "Results are saved in $domain/fuzzing/*subdomain*.txt" "fuzz" - else - end_func "No $domain/web/webs.txts file found, fuzzing skipped " "fuzz" - fi -else - if [ "$FUZZ" = false ]; then - printf "\n${yellow} fuzz skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} fuzz is already processed. To force executing fuzz, delete\n $called_fn_dir/.fuzz ${reset}\n\n" - fi -fi diff --git a/bin/rftw_web_jschecks b/bin/rftw_web_jschecks deleted file mode 100755 index 001cc6a0..00000000 --- a/bin/rftw_web_jschecks +++ /dev/null @@ -1,98 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -help_menu() { - echo "Usage: $0 [OPTIONS]" - echo "Perform Javascript Checks" - echo "" - echo "Options:" - echo " -d, --domain DOMAIN Set domain (required)" - echo " -h, --help Display this help message and exit" - exit 0 -} - -# Input validation -if [ $# -eq 0 ]; then - help_menu -fi - -DOMAIN="" -while [ "$1" != "" ]; do - case $1 in - -d | --domain ) shift - DOMAIN=$1 - ;; - -h | --help ) help_menu - exit - ;; - * ) echo "Unknown option: $1" - exit 1 - esac - shift -done - -if [ -z "$DOMAIN" ]; then - echo "Error: Domain is required!" - exit 1 -fi - -jschecks() { - local domain=$1 - - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$JSCHECKS" = true ]; then - start_func ${FUNCNAME[0]} "Javascript Scan" - if [ -s ".tmp/url_extract_js.txt" ]; then - printf "${yellow} Running : Fetching Urls 1/5${reset}\n" - if [ ! "$AXIOM" = true ]; then - cat .tmp/url_extract_js.txt | subjs -ua "Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" -c 40 | grep "$domain" | anew -q .tmp/subjslinks.txt - else - axiom-scan .tmp/url_extract_js.txt -m subjs -o .tmp/subjslinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - [ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | egrep -iv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)" | anew -q js/nojs_links.txt - [ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | grep -iE "\.js($|\?)" | anew -q .tmp/url_extract_js.txt - cat .tmp/url_extract_js.txt | python3 $tools/urless/urless/urless.py | anew -q js/url_extract_js.txt 2>>"$LOGFILE" >/dev/null - printf "${yellow} Running : Resolving JS Urls 2/5${reset}\n" - if [ ! "$AXIOM" = true ]; then - [ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -status-code -content-type -retries 2 -no-color | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt - else - [ -s "js/url_extract_js.txt" ] && axiom-scan js/url_extract_js.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -content-type -retries 2 -no-color -o .tmp/js_livelinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/js_livelinks.txt" ] && cat .tmp/js_livelinks.txt | anew .tmp/web_full_info.txt | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt - fi - printf "${yellow} Running : Gathering endpoints 3/5${reset}\n" - [ -s "js/js_livelinks.txt" ] && python3 $tools/xnLinkFinder/xnLinkFinder.py -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d $XNLINKFINDER_DEPTH -o .tmp/js_endpoints.txt 2>>"$LOGFILE" >/dev/null - [ -s "parameters.txt" ] && rm -f parameters.txt 2>>"$LOGFILE" >/dev/null - if [ -s ".tmp/js_endpoints.txt" ]; then - sed -i '/^\//!d' .tmp/js_endpoints.txt - cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt - fi - printf "${yellow} Running : Gathering secrets 4/5${reset}\n" - if [ ! "$AXIOM" = true ]; then - [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | Mantra -ua ${HEADER} -s | anew -q js/js_secrets.txt - else - [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua ${HEADER} -s -o js/js_secrets.txt $AXIOM_EXTRA_ARGS &>/dev/null - fi - [ -s "js/js_secrets.txt" ] && sed -r "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2};?)?)?[mGK]//g" -i js/js_secrets.txt - printf "${yellow} Running : Building wordlist 5/5${reset}\n" - [ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "python3 $tools/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" >/dev/null - else - end_func "No JS urls found for $domain, function skipped" ${FUNCNAME[0]} - fi - else - if [ "$JSCHECKS" = false ]; then - printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi -} - -jschecks $DOMAIN diff --git a/bin/rftw_web_nucleicheck b/bin/rftw_web_nucleicheck deleted file mode 100755 index 463bf60a..00000000 --- a/bin/rftw_web_nucleicheck +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help function -help_menu() { - echo "Usage: $0 [OPTIONS]" - echo "" - echo "Templates Based Web Scanner" - echo "" - echo "Options:" - echo " -h, --help Show this help menu" - echo " -f, --force Force the execution even if already processed" -} - -# Start function -start_func() { - echo "Starting $1: $2..." -} - -# End function -end_func() { - echo "$1" - echo "End of $2..." -} - -# Default values -FORCE=false - -# Parse arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE=true ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -if { [ ! -f "$called_fn_dir/.nuclei_check" ] || [ "$FORCE" = true ]; } && [ "$NUCLEICHECK" = true ]; then - start_func "nuclei_check" "Templates based web scanner" - nuclei -update 2>>"$LOGFILE" >/dev/null - mkdir -p nuclei_output - - anew -q .tmp/webs_all.txt < <(cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null) - anew -q .tmp/webs_subs.txt < <(cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE") - - if [ ! "$AXIOM" = true ]; then - IFS=',' read -ra severity_array <<< "$NUCLEI_SEVERITY" - for crit in "${severity_array[@]}" - do - printf "${yellow}\n Running : Nuclei $crit ${reset}\n\n" - nuclei $NUCLEI_FLAGS -severity $crit -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt < .tmp/webs_subs.txt - done - printf "\n\n" - else - if [ -s ".tmp/webs_subs.txt" ]; then - IFS=',' read -ra severity_array <<< "$NUCLEI_SEVERITY" - for crit in "${severity_array[@]}" - do - printf "${yellow}\n Running : Nuclei $crit, check results on nuclei_output folder${reset}\n\n" - axiom-scan .tmp/webs_subs.txt -m nuclei --nuclei-templates ${NUCLEI_TEMPLATES_PATH} -severity ${crit} -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s "nuclei_output/${crit}.txt" ] && cat nuclei_output/${crit}.txt - done - printf "\n\n" - fi - fi - end_func "Results are saved in $domain/nuclei_output folder" "nuclei_check" -else - if [ "$NUCLEICHECK" = false ]; then - printf "\n${yellow} nuclei_check skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} nuclei_check is already processed. To force executing nuclei_check, delete\n $called_fn_dir/.nuclei_check ${reset}\n\n" - fi -fi diff --git a/bin/rftw_web_passdict b/bin/rftw_web_passdict deleted file mode 100755 index 1a7a749d..00000000 --- a/bin/rftw_web_passdict +++ /dev/null @@ -1,67 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -help_menu() { - echo "Usage: $0 [OPTIONS]" - echo "Password Dictionary Generation using pydictor" - echo "" - echo "Options:" - echo " -d, --domain DOMAIN Set domain (required)" - echo " -h, --help Display this help message and exit" - exit 0 -} - -# Input validation -if [ $# -eq 0 ]; then - help_menu -fi - -DOMAIN="" -while [ "$1" != "" ]; do - case $1 in - -d | --domain ) shift - DOMAIN=$1 - ;; - -h | --help ) help_menu - exit - ;; - * ) echo "Unknown option: $1" - exit 1 - esac - shift -done - -if [ -z "$DOMAIN" ]; then - echo "Error: Domain is required!" - exit 1 -fi - -password_dict() { - local domain=$1 - - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PASSWORD_DICT" = true ]; then - start_func ${FUNCNAME[0]} "Password dictionary generation" - - local word=${domain%%.*} - python3 $tools/pydictor/pydictor.py -extend $word --leet 0 1 2 11 21 --len ${PASSWORD_MIN_LENGTH} ${PASSWORD_MAX_LENGTH} -o webs/password_dict.txt 2>>"$LOGFILE" >/dev/null - - end_func "Results are saved in $domain/webs/password_dict.txt" ${FUNCNAME[0]} - else - if [ "$PASSWORD_DICT" = false ]; then - printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi -} - -password_dict $DOMAIN diff --git a/bin/rftw_web_probecommon b/bin/rftw_web_probecommon deleted file mode 100755 index 934f86ab..00000000 --- a/bin/rftw_web_probecommon +++ /dev/null @@ -1,70 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -help_menu() { - echo "Usage: $0 [DOMAIN] [OPTIONS]" - echo "Web probing tool for specified domain." - echo - echo "Options:" - echo " -h, --help Display this help menu and exit" - echo " -f, --force Force the execution even if it was already processed" -} - -validate_inputs() { - if [[ -z "$domain" ]]; then - echo -e "${yellow} No domain provided! ${reset}" - exit 1 - fi - - if [ "$WEBPROBESIMPLE" != true ] && [ "$FORCE_EXECUTION" != true ]; then - echo -e "${yellow} webprobe_simple skipped in this mode or defined in reconftw.cfg ${reset}" - exit 0 - fi -} - -run_webprobe_simple() { - start_subfunc "webprobe_simple" "Running : Http probing $domain" - if [ ! "$AXIOM" = true ]; then - cat subdomains/subdomains.txt | httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt 2>>"$LOGFILE" >/dev/null - else - axiom-scan subdomains/subdomains.txt -m httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - - cat .tmp/web_full_info.txt .tmp/web_full_info_probe.txt webs/web_full_info.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" > webs/web_full_info.txt - [ -s "webs/web_full_info.txt" ] && cat webs/web_full_info.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew -q .tmp/probed_tmp.txt - [ -s "webs/web_full_info.txt" ] && cat webs/web_full_info.txt | jq -r 'try . |"\(.url) [\(.status_code)] [\(.title)] [\(.webserver)] \(.tech)"' | grep "$domain" | anew -q webs/web_full_info_plain.txt - [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/probed_tmp.txt - NUMOFLINES=$(cat .tmp/probed_tmp.txt 2>>"$LOGFILE" | anew webs/webs.txt | sed '/^$/d' | wc -l) - cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - end_subfunc "${NUMOFLINES} new websites resolved" "webprobe_simple" - - if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs.txt| wc -l) -le $DEEP_LIMIT2 ]]; then - notification "Sending websites to proxy" info - ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null - fi -} - -# Main -FORCE_EXECUTION=false -domain="$1" - -shift -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE_EXECUTION=true; shift ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -validate_inputs -run_webprobe_simple \ No newline at end of file diff --git a/bin/rftw_web_probeuncommon b/bin/rftw_web_probeuncommon deleted file mode 100755 index 3e1e936d..00000000 --- a/bin/rftw_web_probeuncommon +++ /dev/null @@ -1,81 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -help_menu() { - echo "Usage: $0 [DOMAIN] [OPTIONS]" - echo "Web probing tool for non-standard ports for the specified domain." - echo - echo "Options:" - echo " -h, --help Display this help menu and exit" - echo " -f, --force Force the execution even if it was already processed" -} - -validate_inputs() { - if [[ -z "$domain" ]]; then - echo -e "${yellow} No domain provided! ${reset}" - exit 1 - fi - - if [ "$WEBPROBEFULL" != true ] && [ "$FORCE_EXECUTION" != true ]; then - echo -e "${yellow} webprobe_full skipped in this mode or defined in reconftw.cfg ${reset}" - exit 0 - fi -} - -run_webprobe_full() { - start_func "webprobe_full" "Http probing non-standard ports" - if [ -s "subdomains/subdomains.txt" ]; then - if [ ! "$AXIOM" = true ]; then - cat subdomains/subdomains.txt | httpx -follow-host-redirects -random-agent -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" >/dev/null - else - axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - fi - - if [ -s ".tmp/web_full_info_uncommon.txt" ]; then - cat .tmp/web_full_info_uncommon.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | sed "s/*.//" | anew -q .tmp/probed_uncommon_ports_tmp.txt - cat .tmp/web_full_info_uncommon.txt | jq -r 'try . |"\(.url) [\(.status_code)] [\(.title)] [\(.webserver)] \(.tech)"' | anew -q webs/web_full_info_uncommon_plain.txt - - if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - cat .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" | anew -q webs/web_full_info_uncommon.txt - else - cat .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" | grep "$domain" | anew -q webs/web_full_info_uncommon.txt - fi - fi - - NUMOFLINES=$(cat .tmp/probed_uncommon_ports_tmp.txt 2>>"$LOGFILE" | anew webs/webs_uncommon_ports.txt | sed '/^$/d' | wc -l) - notification "Uncommon web ports: ${NUMOFLINES} new websites" good - [ -s "webs/webs_uncommon_ports.txt" ] && cat webs/webs_uncommon_ports.txt - cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" "webprobe_full" - - if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs_uncommon_ports.txt| wc -l) -le $DEEP_LIMIT2 ]]; then - notification "Sending websites with uncommon ports to proxy" info - ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null - fi -} - -# Main -FORCE_EXECUTION=false -domain="$1" - -shift -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE_EXECUTION=true; shift ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -validate_inputs -run_webprobe_full diff --git a/bin/rftw_web_roboxtractor b/bin/rftw_web_roboxtractor deleted file mode 100755 index ec5aa1c3..00000000 --- a/bin/rftw_web_roboxtractor +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -help_menu() { - echo "Usage: $0 [OPTIONS]" - echo "Robots Wordlist Generation using roboxtractor" - echo "" - echo "Options:" - echo " -d, --domain DOMAIN Set domain (required)" - echo " -h, --help Display this help message and exit" - exit 0 -} - -# Input validation -if [ $# -eq 0 ]; then - help_menu -fi - -DOMAIN="" -while [ "$1" != "" ]; do - case $1 in - -d | --domain ) shift - DOMAIN=$1 - ;; - -h | --help ) help_menu - exit - ;; - * ) echo "Unknown option: $1" - exit 1 - esac - shift -done - -if [ -z "$DOMAIN" ]; then - echo "Error: Domain is required!" - exit 1 -fi - -wordlist_gen_roboxtractor() { - local domain=$1 - - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$ROBOTSWORDLIST" = true ]; then - start_func ${FUNCNAME[0]} "Robots wordlist generation" - - [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ -s ".tmp/webs_all.txt" ]; then - cat .tmp/webs_all.txt | roboxtractor -m 1 -wb 2>/dev/null | anew -q webs/robots_wordlist.txt - fi - - end_func "Results are saved in $domain/webs/robots_wordlist.txt" ${FUNCNAME[0]} - else - if [ "$ROBOTSWORDLIST" = false ]; then - printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi -} - -wordlist_gen_roboxtractor $DOMAIN diff --git a/bin/rftw_web_screenshot b/bin/rftw_web_screenshot deleted file mode 100755 index 81935dfc..00000000 --- a/bin/rftw_web_screenshot +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -help_menu() { - echo "Usage: $0 [DOMAIN] [OPTIONS]" - echo "Web Screenshots tool for the specified domain." - echo - echo "Options:" - echo " -h, --help Display this help menu and exit" - echo " -f, --force Force the execution even if it was already processed" -} - -validate_inputs() { - if [[ -z "$domain" ]]; then - echo -e "${yellow} No domain provided! ${reset}" - exit 1 - fi - - if [ "$WEBSCREENSHOT" != true ] && [ "$FORCE_EXECUTION" != true ]; then - echo -e "${yellow} screenshot skipped in this mode or defined in reconftw.cfg ${reset}" - exit 0 - fi -} - -run_screenshot() { - start_func "screenshot" "Web Screenshots" - - [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - - num_lines=$(wc -l < .tmp/webs_all.txt) - dynamic_gowitness_timeout=$(expr $num_lines \* $GOWITNESS_TIMEOUT_PER_SITE) - - if [ ! "$AXIOM" = true ]; then - [ -s ".tmp/webs_all.txt" ] && timeout -k 1m ${dynamic_gowitness_timeout}s gowitness file -f .tmp/webs_all.txt -t $GOWITNESS_THREADS $GOWITNESS_FLAGS 2>>"$LOGFILE" - else - timeout -k 1m ${dynamic_gowitness_timeout}s axiom-scan .tmp/webs_all.txt -m gowitness -t $GOWITNESS_THREADS $GOWITNESS_FLAGS -o screenshots $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - - end_func "Results are saved in $domain/screenshots folder" "screenshot" -} - -# Main -FORCE_EXECUTION=false -domain="$1" - -shift -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE_EXECUTION=true; shift ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -validate_inputs -run_screenshot diff --git a/bin/rftw_web_urlchecks b/bin/rftw_web_urlchecks deleted file mode 100755 index a83bb38f..00000000 --- a/bin/rftw_web_urlchecks +++ /dev/null @@ -1,133 +0,0 @@ -#!/bin/bash - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu function -help_menu() { - echo "Usage: urlchecks.sh [OPTIONS]" - echo "" - echo "Options:" - echo " -d, --domain Specify the domain to scan." - echo " -h, --help Display this help menu." - echo "" - echo "Example:" - echo " urlchecks.sh -d example.com" -} - -# Input validation function -validate_input() { - if [ -z "$domain" ]; then - echo "Error: Domain not specified." - help_menu - exit 1 - fi -} - -# URL checks function -urlchecks() { - if { [ ! -f "$called_fn_dir/.urlchecks" ] || [ "$DIFF" = true ]; } && [ "$URL_CHECK" = true ]; then - echo "[+] Starting URL Extraction" - - mkdir -p js - [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ -s ".tmp/webs_all.txt" ]; then - if [ ! "$AXIOM" = true ]; then - if [ "$URL_CHECK_PASSIVE" = true ]; then - if [ "$DEEP" = true ]; then - cat .tmp/webs_all.txt | unfurl -u domains > .tmp/waymore_input.txt - python3 ${tools}/waymore/waymore.py -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null - else - cat .tmp/webs_all.txt | gau --threads $GAU_THREADS | anew -q .tmp/url_extract_tmp.txt - fi - if [ -s "${GITHUB_TOKENS}" ]; then - github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt - fi - fi - diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt 2>>"$LOGFILE") <(sort -u .tmp/webs_all.txt 2>>"$LOGFILE") | wc -l) - if [ $diff_webs != "0" ] || [ ! -s ".tmp/katana.txt" ]; then - if [ "$URL_CHECK_ACTIVE" = true ]; then - if [ "$DEEP" = true ]; then - katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null - else - katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null - fi - fi - fi - else - if [ "$URL_CHECK_PASSIVE" = true ]; then - if [ "$DEEP" = true ]; then - cat .tmp/webs_all.txt | unfurl -u domains > .tmp/waymore_input.txt - axiom-scan .tmp/waymore_input.txt -m waymore -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - else - axiom-scan .tmp/webs_all.txt -m gau -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - if [ -s "${GITHUB_TOKENS}" ]; then - github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt - fi - fi - diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u .tmp/webs_all.txt) | wc -l) - if [ $diff_webs != "0" ] || [ ! -s ".tmp/katana.txt" ]; then - if [ "$URL_CHECK_ACTIVE" = true ]; then - if [ "$DEEP" = true ]; then - axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 3 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - else - axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 2 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - fi - fi - fi - [ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt - [ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | anew -q .tmp/url_extract_tmp.txt - [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -aEi "\.(js)" | anew -q .tmp/url_extract_js.txt - if [ "$DEEP" = true ]; then - [ -s ".tmp/url_extract_js.txt" ] && interlace -tL .tmp/url_extract_js.txt -threads 10 -c "python3 $tools/JSA/jsa.py -f target | anew -q .tmp/url_extract_tmp.txt" &>/dev/null - fi - [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt - [ -s ".tmp/url_extract_tmp2.txt" ] && cat .tmp/url_extract_tmp2.txt | python3 $tools/urless/urless/urless.py | anew -q .tmp/url_extract_uddup.txt 2>>"$LOGFILE" >/dev/null - NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | sed '/^$/d' | wc -l) - notification "${NUMOFLINES} new urls with params" info - end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} - if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then - notification "Sending urls to proxy" info - ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null - fi - fi - else - if [ "$URL_CHECK" = false ]; then - echo "[!] URL checks skipped in this mode or defined in reconftw.cfg" - else - echo "[!] URL checks are already processed, to force executing delete $called_fn_dir/.urlchecks" - fi - fi -} - -# Parse command-line arguments -while [ "$#" -gt 0 ]; do - case "$1" in - -d|--domain) - domain="$2" - shift 2 - ;; - -h|--help) - help_menu - exit 0 - ;; - *) - echo "Unknown parameter: $1" - help_menu - exit 1 - ;; - esac -done - -# Validate input and start main function -validate_input -urlchecks diff --git a/bin/rftw_web_urlext b/bin/rftw_web_urlext deleted file mode 100755 index 620dab22..00000000 --- a/bin/rftw_web_urlext +++ /dev/null @@ -1,97 +0,0 @@ -#!/bin/bash - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu function -help_menu() { - echo "Usage: url_ext.sh [OPTIONS]" - echo "" - echo "Options:" - echo " -i, --input-file Specify the input URL file for extraction." - echo " -o, --output-file Specify the output file for URLs by extension." - echo " -h, --help Display this help menu." - echo "" - echo "Example:" - echo " url_ext.sh -i .tmp/url_extract_tmp.txt -o webs/urls_by_ext.txt" -} - -# Input validation function -validate_input() { - if [ -z "$input_file" ]; then - echo "Error: Input file not specified." - help_menu - exit 1 - elif [ ! -f "$input_file" ]; then - echo "Error: Specified input file does not exist." - exit 1 - fi - - if [ -z "$output_file" ]; then - echo "Error: Output file not specified." - help_menu - exit 1 - fi -} - -# Extract URLs by extension function -url_ext() { - if { [ ! -f "$called_fn_dir/.url_ext" ] || [ "$DIFF" = true ]; } && [ "$URL_EXT" = true ]; then - if [ -s "$input_file" ]; then - echo "[+] Extracting URLs by extension..." - - ext=("7z" "achee" "action" "adr" "apk" "arj" "ascx" "asmx" "asp" "aspx" "axd" "backup" "bak" "bat" "bin" "bkf" "bkp" "bok" "cab" "cer" "cfg" "cfm" "cfml" "cgi" "cnf" "conf" "config" "cpl" "crt" "csr" "csv" "dat" "db" "dbf" "deb" "dmg" "dmp" "doc" "docx" "drv" "email" "eml" "emlx" "env" "exe" "gadget" "gz" "html" "ica" "inf" "ini" "iso" "jar" "java" "jhtml" "json" "jsp" "key" "log" "lst" "mai" "mbox" "mbx" "md" "mdb" "msg" "msi" "nsf" "ods" "oft" "old" "ora" "ost" "pac" "passwd" "pcf" "pdf" "pem" "pgp" "php" "php3" "php4" "php5" "phtm" "phtml" "pkg" "pl" "plist" "pst" "pwd" "py" "rar" "rb" "rdp" "reg" "rpm" "rtf" "sav" "sh" "shtm" "shtml" "skr" "sql" "swf" "sys" "tar" "tar.gz" "tmp" "toast" "tpl" "txt" "url" "vcd" "vcf" "wml" "wpd" "wsdl" "wsf" "xls" "xlsm" "xlsx" "xml" "xsd" "yaml" "yml" "z" "zip") - - # Truncate or create the output file - echo "" > "$output_file" - - for t in "${ext[@]}"; do - NUMOFLINES=$(grep -aEi "\.(${t})($|\/|\?)" "$input_file" | sort -u | sed '/^$/d' | wc -l) - if [[ $NUMOFLINES -gt 0 ]]; then - echo -e "\n############################\n + ${t} + \n############################\n" >> "$output_file" - grep -aEi "\.(${t})($|\/|\?)" "$input_file" >> "$output_file" - fi - done - - echo "[+] Results are saved in $output_file" - fi - else - if [ "$URL_EXT" = false ]; then - echo "[!] URL_Ext skipped in this mode or defined in reconftw.cfg" - else - echo "[!] URL_Ext is already processed, to force executing delete $called_fn_dir/.url_ext" - fi - fi -} - -# Parse command-line arguments -while [ "$#" -gt 0 ]; do - case "$1" in - -i|--input-file) - input_file="$2" - shift 2 - ;; - -o|--output-file) - output_file="$2" - shift 2 - ;; - -h|--help) - help_menu - exit 0 - ;; - *) - echo "Unknown parameter: $1" - help_menu - exit 1 - ;; - esac -done - -# Validate input and start the main function -validate_input -url_ext diff --git a/bin/rftw_web_urlgf b/bin/rftw_web_urlgf deleted file mode 100755 index d2f002ed..00000000 --- a/bin/rftw_web_urlgf +++ /dev/null @@ -1,84 +0,0 @@ -#!/bin/bash - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu function -help_menu() { - echo "Usage: url_gf.sh [OPTIONS]" - echo "" - echo "Options:" - echo " -u, --url-file Specify the URL file to scan." - echo " -h, --help Display this help menu." - echo "" - echo "Example:" - echo " url_gf.sh -u webs/url_extract.txt" -} - -# Input validation function -validate_input() { - if [ -z "$url_file" ]; then - echo "Error: URL file not specified." - help_menu - exit 1 - elif [ ! -f "$url_file" ]; then - echo "Error: Specified URL file does not exist." - exit 1 - fi -} - -# Vulnerable pattern search function -url_gf() { - if { [ ! -f "$called_fn_dir/.url_gf" ] || [ "$DIFF" = true ]; } && [ "$URL_GF" = true ]; then - echo "[+] Starting Vulnerable Pattern Search" - - mkdir -p gf - if [ -s "$url_file" ]; then - gf xss "$url_file" | anew -q gf/xss.txt - gf ssti "$url_file" | anew -q gf/ssti.txt - gf ssrf "$url_file" | anew -q gf/ssrf.txt - gf sqli "$url_file" | anew -q gf/sqli.txt - gf redirect "$url_file" | anew -q gf/redirect.txt - [ -s "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt - gf rce "$url_file" | anew -q gf/rce.txt - gf potential "$url_file" | cut -d ':' -f3-5 | anew -q gf/potential.txt - [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q gf/endpoints.txt - gf lfi "$url_file" | anew -q gf/lfi.txt - fi - echo "[+] Results are saved in $domain/gf folder" - else - if [ "$URL_GF" = false ]; then - echo "[!] URL_Gf skipped in this mode or defined in reconftw.cfg" - else - echo "[!] URL_Gf is already processed, to force executing delete $called_fn_dir/.url_gf" - fi - fi -} - -# Parse command-line arguments -while [ "$#" -gt 0 ]; do - case "$1" in - -u|--url-file) - url_file="$2" - shift 2 - ;; - -h|--help) - help_menu - exit 0 - ;; - *) - echo "Unknown parameter: $1" - help_menu - exit 1 - ;; - esac -done - -# Validate input and start main function -validate_input -url_gf diff --git a/bin/rftw_web_wafcheck b/bin/rftw_web_wafcheck deleted file mode 100755 index 3ae55f00..00000000 --- a/bin/rftw_web_wafcheck +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/bash - -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help function -help_menu() { - echo "Usage: $0 [OPTIONS]" - echo "" - echo "Website's WAF Detection Tool" - echo "" - echo "Options:" - echo " -h, --help Show this help menu" - echo " -f, --force Force the execution even if already processed" -} - -# Start function -start_func() { - echo "Starting $1: $2..." -} - -# End function -end_func() { - echo "$1" - echo "End of $2..." -} - -# Notification function -notification() { - echo "$1" -} - -# Default values -FORCE=false - -# Parse arguments -while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) help_menu; exit 0 ;; - -f|--force) FORCE=true ;; - *) echo "Unknown parameter passed: $1"; exit 1 ;; - esac - shift -done - -if { [ ! -f "$called_fn_dir/.waf_checks" ] || [ "$FORCE" = true ]; } && [ "$WAF_DETECTION" = true ]; then - start_func "waf_checks" "Website's WAF detection" - - if [ ! -s ".tmp/webs_all.txt" ]; then - anew -q .tmp/webs_all.txt < <(cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null) - fi - - if [ -s ".tmp/webs_all.txt" ]; then - if [ ! "$AXIOM" = true ]; then - wafw00f -i .tmp/webs_all.txt -o .tmp/wafs.txt 2>>"$LOGFILE" >/dev/null - else - axiom-scan .tmp/webs_all.txt -m wafw00f -o .tmp/wafs.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - - if [ -s ".tmp/wafs.txt" ]; then - sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' .tmp/wafs.txt | tr -s "\t" ";" > webs/webs_wafs.txt - NUMOFLINES=$(sed '/^$/d' webs/webs_wafs.txt 2>>"$LOGFILE" | wc -l) - notification "${NUMOFLINES} websites protected by waf" info - end_func "Results are saved in $domain/webs/webs_wafs.txt" "waf_checks" - else - end_func "No results found" "waf_checks" - fi - else - end_func "No websites to scan" "waf_checks" - fi -else - if [ "$WAF_DETECTION" = false ]; then - printf "\n${yellow} waf_checks skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} waf_checks is already processed. To force executing waf_checks, delete\n $called_fn_dir/.waf_checks ${reset}\n\n" - fi -fi diff --git a/bin/rftw_web_wordlists b/bin/rftw_web_wordlists deleted file mode 100755 index 234a5054..00000000 --- a/bin/rftw_web_wordlists +++ /dev/null @@ -1,73 +0,0 @@ -#!/bin/bash - -# Load environment variables from reconftw.cfg -# Load environment variables -if [ -f "reconftw.cfg" ]; then - source reconftw.cfg -else - echo "Error: reconftw.cfg not found!" - exit 1 -fi - -# Help menu -help_menu() { - echo "Usage: $0 [OPTIONS]" - echo "Wordlist Generation" - echo "" - echo "Options:" - echo " -d, --domain DOMAIN Set domain (required)" - echo " -h, --help Display this help message and exit" - exit 0 -} - -# Input validation -if [ $# -eq 0 ]; then - help_menu -fi - -DOMAIN="" -while [ "$1" != "" ]; do - case $1 in - -d | --domain ) shift - DOMAIN=$1 - ;; - -h | --help ) help_menu - exit - ;; - * ) echo "Unknown option: $1" - exit 1 - esac - shift -done - -if [ -z "$DOMAIN" ]; then - echo "Error: Domain is required!" - exit 1 -fi - -wordlist_gen() { - local domain=$1 - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WORDLIST" = true ]; then - start_func ${FUNCNAME[0]} "Wordlist generation" - if [ -s ".tmp/url_extract_tmp.txt" ]; then - cat .tmp/url_extract_tmp.txt | unfurl -u keys 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_params.txt - cat .tmp/url_extract_tmp.txt | unfurl -u values 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_values.txt - cat .tmp/url_extract_tmp.txt | tr "[:punct:]" "\n" | anew -q webs/dict_words.txt - fi - [ -s ".tmp/js_endpoints.txt" ] && cat .tmp/js_endpoints.txt | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q webs/all_paths.txt - [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q webs/all_paths.txt - end_func "Results are saved in $domain/webs/dict_[words|paths].txt" ${FUNCNAME[0]} - if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/all_paths.txt | wc -l) -le $DEEP_LIMIT2 ]]; then - notification "Sending urls to proxy" info - ffuf -mc all -w webs/all_paths.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null - fi - else - if [ "$WORDLIST" = false ]; then - printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi -} - -wordlist_gen $DOMAIN diff --git a/images/banner.png b/images/banner.png old mode 100755 new mode 100644 diff --git a/images/docker.png b/images/docker.png old mode 100755 new mode 100644 diff --git a/images/reconFTW.gif b/images/reconFTW.gif old mode 100755 new mode 100644 diff --git a/install.sh b/install.sh index 6723704a..9a7090f1 100755 --- a/install.sh +++ b/install.sh @@ -37,7 +37,7 @@ gotools["amass"]="go install -v github.com/owasp-amass/amass/v3/...@master" gotools["ffuf"]="go install -v github.com/ffuf/ffuf/v2@latest" gotools["github-subdomains"]="go install -v github.com/gwen001/github-subdomains@latest" gotools["gitlab-subdomains"]="go install -v github.com/gwen001/gitlab-subdomains@latest" -gotools["nuclei"]="go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest" +gotools["nuclei"]="go install -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei@latest" gotools["anew"]="go install -v github.com/tomnomnom/anew@latest" gotools["notify"]="go install -v github.com/projectdiscovery/notify/cmd/notify@latest" gotools["unfurl"]="go install -v github.com/tomnomnom/unfurl@v0.3.0" diff --git a/reconftw.cfg b/reconftw.cfg index fee6c739..40be4fdc 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -6,7 +6,7 @@ tools=~/Tools # Path installed tools SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" # Get current script's path profile_shell=".$(basename $(echo $SHELL))rc" # Get current shell profile -#reconftw_version=$(git rev-parse --abbrev-ref HEAD)-$(git describe --tags) # Fetch current reconftw version +reconftw_version=$(git rev-parse --abbrev-ref HEAD)-$(git describe --tags) # Fetch current reconftw version generate_resolvers=false # Generate custom resolvers with dnsvalidator update_resolvers=true # Fetch and rewrite resolvers from trickest/resolvers before DNS resolution resolvers_url="https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt" diff --git a/reconftw.sh b/reconftw.sh index d62b2c98..779be012 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1,18 +1,5 @@ #!/usr/bin/env bash -# Welcome to reconFTW main script -# ██▀███ ▓█████ ▄████▄ ▒█████ ███▄ █ █████▒▄▄▄█████▓ █ █░ -# ▓██ ▒ ██▒▓█ ▀ ▒██▀ ▀█ ▒██▒ ██▒ ██ ▀█ █ ▓██ ▒ ▓ ██▒ ▓▒▓█░ █ ░█░ -# ▓██ ░▄█ ▒▒███ ▒▓█ ▄ ▒██░ ██▒▓██ ▀█ ██▒▒████ ░ ▒ ▓██░ ▒░▒█░ █ ░█ -# ▒██▀▀█▄ ▒▓█ ▄ ▒▓▓▄ ▄██▒▒██ ██░▓██▒ ▐▌██▒░▓█▒ ░ ░ ▓██▓ ░ ░█░ █ ░█ -# ░██▓ ▒██▒░▒████▒▒ ▓███▀ ░░ ████▓▒░▒██░ ▓██░░▒█░ ▒██▒ ░ ░░██▒██▓ -# ░ ▒▓ ░▒▓░░░ ▒░ ░░ ░▒ ▒ ░░ ▒░▒░▒░ ░ ▒░ ▒ ▒ ▒ ░ ▒ ░░ ░ ▓░▒ ▒ -# ░▒ ░ ▒░ ░ ░ ░ ░ ▒ ░ ▒ ▒░ ░ ░░ ░ ▒░ ░ ░ ▒ ░ ░ -# ░░ ░ ░ ░ ░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ░ ░ -# ░ ░ ░░ ░ ░ ░ ░ ░ -# -# by @six2dez - function banner_graber(){ source "${SCRIPTPATH}"/banners.txt randx=$(shuf -i 1-23 -n 1) @@ -30,9 +17,119 @@ function banner(){ ################################################### TOOLS ##################################################### ############################################################################################################### -rftw_util_version +function check_version(){ + timeout 10 git fetch + exit_status=$? + if [ $exit_status -eq 0 ]; then + BRANCH=$(git rev-parse --abbrev-ref HEAD) + HEADHASH=$(git rev-parse HEAD) + UPSTREAMHASH=$(git rev-parse "${BRANCH}"@\{upstream\}) + if [ "$HEADHASH" != "$UPSTREAMHASH" ]; then + printf "\n${yellow} There is a new version, run ./install.sh to get latest version${reset}\n\n" + fi + else + printf "\n${bred} Unable to check updates ${reset}\n\n" + fi +} + +function tools_installed(){ + + printf "\n\n${bgreen}#######################################################################${reset}\n" + printf "${bblue} Checking installed tools ${reset}\n\n" + + allinstalled=true + + [ -n "$GOPATH" ] || { printf "${bred} [*] GOPATH var [NO]${reset}\n"; allinstalled=false;} + [ -n "$GOROOT" ] || { printf "${bred} [*] GOROOT var [NO]${reset}\n"; allinstalled=false;} + [ -n "$PATH" ] || { printf "${bred} [*] PATH var [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/dorks_hunter/dorks_hunter.py" ] || { printf "${bred} [*] dorks_hunter [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/brutespray/brutespray.py" ] || { printf "${bred} [*] brutespray [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/fav-up/favUp.py" ] || { printf "${bred} [*] fav-up [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/Corsy/corsy.py" ] || { printf "${bred} [*] Corsy [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/testssl.sh/testssl.sh" ] || { printf "${bred} [*] testssl [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/CMSeeK/cmseek.py" ] || { printf "${bred} [*] CMSeeK [NO]${reset}\n"; allinstalled=false;} + [ -f "${fuzz_wordlist}" ] || { printf "${bred} [*] OneListForAll [NO]${reset}\n"; allinstalled=false;} + [ -f "${lfi_wordlist}" ] || { printf "${bred} [*] lfi_wordlist [NO]${reset}\n"; allinstalled=false;} + [ -f "${ssti_wordlist}" ] || { printf "${bred} [*] ssti_wordlist [NO]${reset}\n"; allinstalled=false;} + [ -f "${subs_wordlist}" ] || { printf "${bred} [*] subs_wordlist [NO]${reset}\n"; allinstalled=false;} + [ -f "${subs_wordlist_big}" ] || { printf "${bred} [*] subs_wordlist_big [NO]${reset}\n"; allinstalled=false;} + [ -f "${resolvers}" ] || { printf "${bred} [*] resolvers [NO]${reset}\n"; allinstalled=false;} + [ -f "${resolvers_trusted}" ] || { printf "${bred} [*] resolvers_trusted [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/xnLinkFinder/xnLinkFinder.py" ] || { printf "${bred} [*] xnLinkFinder [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/waymore/waymore.py" ] || { printf "${bred} [*] waymore [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/commix/commix.py" ] || { printf "${bred} [*] commix [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/getjswords.py" ] || { printf "${bred} [*] getjswords [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/JSA/jsa.py" ] || { printf "${bred} [*] JSA [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/cloud_enum/cloud_enum.py" ] || { printf "${bred} [*] cloud_enum [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/ultimate-nmap-parser/ultimate-nmap-parser.sh" ] || { printf "${bred} [*] nmap-parse-output [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/pydictor/pydictor.py" ] || { printf "${bred} [*] pydictor [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/urless/urless/urless.py" ] || { printf "${bred} [*] urless [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/smuggler/smuggler.py" ] || { printf "${bred} [*] smuggler [NO]${reset}\n"; allinstalled=false;} + [ -f "$tools/regulator/main.py" ] || { printf "${bred} [*] regulator [NO]${reset}\n"; allinstalled=false;} + which github-endpoints &>/dev/null || { printf "${bred} [*] github-endpoints [NO]${reset}\n"; allinstalled=false;} + which github-subdomains &>/dev/null || { printf "${bred} [*] github-subdomains [NO]${reset}\n"; allinstalled=false;} + which gitlab-subdomains &>/dev/null || { printf "${bred} [*] gitlab-subdomains [NO]${reset}\n"; allinstalled=false;} + which katana &>/dev/null || { printf "${bred} [*] katana [NO]${reset}\n"; allinstalled=false;} + which wafw00f &>/dev/null || { printf "${bred} [*] wafw00f [NO]${reset}\n"; allinstalled=false;} + which dnsvalidator &>/dev/null || { printf "${bred} [*] dnsvalidator [NO]${reset}\n"; allinstalled=false;} + which gowitness &>/dev/null || { printf "${bred} [*] gowitness [NO]${reset}\n"; allinstalled=false;} + which amass &>/dev/null || { printf "${bred} [*] Amass [NO]${reset}\n"; allinstalled=false;} + which dnsx &>/dev/null || { printf "${bred} [*] dnsx [NO]${reset}\n"; allinstalled=false;} + which gotator &>/dev/null || { printf "${bred} [*] gotator [NO]${reset}\n"; allinstalled=false;} + which nuclei &>/dev/null || { printf "${bred} [*] Nuclei [NO]${reset}\n"; allinstalled=false;} + [ -d ${NUCLEI_TEMPLATES_PATH} ] || { printf "${bred} [*] Nuclei templates [NO]${reset}\n"; allinstalled=false;} + [ -d ${tools}/fuzzing-templates ] || { printf "${bred} [*] Fuzzing templates [NO]${reset}\n"; allinstalled=false;} + which gf &>/dev/null || { printf "${bred} [*] Gf [NO]${reset}\n"; allinstalled=false;} + which Gxss &>/dev/null || { printf "${bred} [*] Gxss [NO]${reset}\n"; allinstalled=false;} + which subjs &>/dev/null || { printf "${bred} [*] subjs [NO]${reset}\n"; allinstalled=false;} + which ffuf &>/dev/null || { printf "${bred} [*] ffuf [NO]${reset}\n"; allinstalled=false;} + which massdns &>/dev/null || { printf "${bred} [*] Massdns [NO]${reset}\n"; allinstalled=false;} + which qsreplace &>/dev/null || { printf "${bred} [*] qsreplace [NO]${reset}\n"; allinstalled=false;} + which interlace &>/dev/null || { printf "${bred} [*] interlace [NO]${reset}\n"; allinstalled=false;} + which anew &>/dev/null || { printf "${bred} [*] Anew [NO]${reset}\n"; allinstalled=false;} + which unfurl &>/dev/null || { printf "${bred} [*] unfurl [NO]${reset}\n"; allinstalled=false;} + which crlfuzz &>/dev/null || { printf "${bred} [*] crlfuzz [NO]${reset}\n"; allinstalled=false;} + which httpx &>/dev/null || { printf "${bred} [*] Httpx [NO]${reset}\n${reset}"; allinstalled=false;} + which jq &>/dev/null || { printf "${bred} [*] jq [NO]${reset}\n${reset}"; allinstalled=false;} + which notify &>/dev/null || { printf "${bred} [*] notify [NO]${reset}\n${reset}"; allinstalled=false;} + which dalfox &>/dev/null || { printf "${bred} [*] dalfox [NO]${reset}\n${reset}"; allinstalled=false;} + which puredns &>/dev/null || { printf "${bred} [*] puredns [NO]${reset}\n${reset}"; allinstalled=false;} + which emailfinder &>/dev/null || { printf "${bred} [*] emailfinder [NO]${reset}\n"; allinstalled=false;} + which analyticsrelationships &>/dev/null || { printf "${bred} [*] analyticsrelationships [NO]${reset}\n"; allinstalled=false;} + which mapcidr &>/dev/null || { printf "${bred} [*] mapcidr [NO]${reset}\n"; allinstalled=false;} + which ppfuzz &>/dev/null || { printf "${bred} [*] ppfuzz [NO]${reset}\n"; allinstalled=false;} + which cdncheck &>/dev/null || { printf "${bred} [*] cdncheck [NO]${reset}\n"; allinstalled=false;} + which interactsh-client &>/dev/null || { printf "${bred} [*] interactsh-client [NO]${reset}\n"; allinstalled=false;} + which tlsx &>/dev/null || { printf "${bred} [*] tlsx [NO]${reset}\n"; allinstalled=false;} + which smap &>/dev/null || { printf "${bred} [*] smap [NO]${reset}\n"; allinstalled=false;} + which gitdorks_go &>/dev/null || { printf "${bred} [*] gitdorks_go [NO]${reset}\n"; allinstalled=false;} + which ripgen &>/dev/null || { printf "${bred} [*] ripgen [NO]${reset}\n${reset}"; allinstalled=false;} + which dsieve &>/dev/null || { printf "${bred} [*] dsieve [NO]${reset}\n${reset}"; allinstalled=false;} + which inscope &>/dev/null || { printf "${bred} [*] inscope [NO]${reset}\n${reset}"; allinstalled=false;} + which enumerepo &>/dev/null || { printf "${bred} [*] enumerepo [NO]${reset}\n${reset}"; allinstalled=false;} + which Web-Cache-Vulnerability-Scanner &>/dev/null || { printf "${bred} [*] Web-Cache-Vulnerability-Scanner [NO]${reset}\n"; allinstalled=false;} + which subfinder &>/dev/null || { printf "${bred} [*] subfinder [NO]${reset}\n${reset}"; allinstalled=false;} + which byp4xx &>/dev/null || { printf "${bred} [*] byp4xx [NO]${reset}\n${reset}"; allinstalled=false;} + which ghauri &>/dev/null || { printf "${bred} [*] ghauri [NO]${reset}\n${reset}"; allinstalled=false;} + which hakip2host &>/dev/null || { printf "${bred} [*] hakip2host [NO]${reset}\n${reset}"; allinstalled=false;} + which gau &>/dev/null || { printf "${bred} [*] gau [NO]${reset}\n${reset}"; allinstalled=false;} + which crt &>/dev/null || { printf "${bred} [*] crt [NO]${reset}\n${reset}"; allinstalled=false;} + which gitleaks &>/dev/null || { printf "${bred} [*] gitleaks [NO]${reset}\n${reset}"; allinstalled=false;} + which trufflehog &>/dev/null || { printf "${bred} [*] trufflehog [NO]${reset}\n${reset}"; allinstalled=false;} + which s3scanner &>/dev/null || { printf "${bred} [*] s3scanner [NO]${reset}\n${reset}"; allinstalled=false;} + + if [ "${allinstalled}" = true ]; then + printf "${bgreen} Good! All installed! ${reset}\n\n" + else + printf "\n${yellow} Try running the installer script again ./install.sh" + printf "\n${yellow} If it fails for any reason try to install manually the tools missed" + printf "\n${yellow} Finally remember to set the ${bred}\$tools${yellow} variable at the start of this script" + printf "\n${yellow} If nothing works and the world is gonna end you can always ping me :D ${reset}\n\n" + fi -rftw_util_tools -t $tools + printf "${bblue} Tools check finished\n" + printf "${bgreen}#######################################################################\n${reset}" +} ############################################################################################################### ################################################### OSINT ##################################################### @@ -40,7 +137,7 @@ rftw_util_tools -t $tools function google_dorks(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$GOOGLE_DORKS" = true ] && [ "$OSINT" = true ]; then - rftw_osint_googledorks -d "$domain" -o osint/dorks.txt || { echo "dorks_hunter command failed"; exit 1; } 2>>"$LOGFILE" >/dev/null 2>&1 + python3 $tools/dorks_hunter/dorks_hunter.py -d "$domain" -o osint/dorks.txt || { echo "dorks_hunter command failed"; exit 1; } end_func "Results are saved in $domain/osint/dorks.txt" "${FUNCNAME[0]}" else if [ "$GOOGLE_DORKS" = false ] || [ "$OSINT" = false ]; then @@ -54,7 +151,15 @@ function google_dorks(){ function github_dorks(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$GITHUB_DORKS" = true ] && [ "$OSINT" = true ]; then start_func "${FUNCNAME[0]}" "Github Dorks in process" - rftw_osint_gh_dorks -d "$domain" -t "${GITHUB_TOKENS}" -D "$DEEP" | anew -q osint/gitdorks.txt || { echo "gitdorks_go command failed"; exit 1; } 2>>"$LOGFILE" >/dev/null 2>&1 + if [ -s "${GITHUB_TOKENS}" ]; then + if [ "$DEEP" = true ]; then + gitdorks_go -gd $tools/gitdorks_go/Dorks/medium_dorks.txt -nws 20 -target "$domain" -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { echo "gitdorks_go/anew command failed"; exit 1; } + else + gitdorks_go -gd $tools/gitdorks_go/Dorks/smalldorks.txt -nws 20 -target $domain -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { echo "gitdorks_go/anew command failed"; exit 1; } + fi + else + printf "\n${bred} Required file ${GITHUB_TOKENS} not exists or empty${reset}\n" + fi end_func "Results are saved in $domain/osint/gitdorks.txt" "${FUNCNAME[0]}" else if [ "$GITHUB_DORKS" = false ] || [ "$OSINT" = false ]; then @@ -985,7 +1090,7 @@ function cdnprovider(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CDN_IP" = true ]; then start_func ${FUNCNAME[0]} "CDN provider check" [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try . | .a[]' | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u > .tmp/ips_cdn.txt - [ -s ".tmp/ips_cdn.txt" ] && cat .tmp/ips_cdn.txt | rftw_ip_cdnprovider | anew -q $dir/hosts/cdn_providers.txt + [ -s ".tmp/ips_cdn.txt" ] && cat .tmp/ips_cdn.txt | cdncheck -silent -resp -nc | anew -q $dir/hosts/cdn_providers.txt end_func "Results are saved in hosts/cdn_providers.txt" ${FUNCNAME[0]} else if [ "$CDN_IP" = false ]; then From b78c867f58cc9a04e46d8bddbfde6d8cc475153b Mon Sep 17 00:00:00 2001 From: six2dez Date: Wed, 8 Nov 2023 14:15:34 +0100 Subject: [PATCH 04/17] Revert "pip requirements fix" This reverts commit 70a7d8470bbd52b4eeac66dcf4a50e6d7305718e. --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 758b2755..e36da7fc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -34,4 +34,4 @@ tldextract # dorks_hunter tqdm # multiple ujson # multiple urllib3 # multiple -postleaksNg # Tool +postleaksNeg # Tool From 3010409016f2a5935c5f8c47d2c4b91c93c62e19 Mon Sep 17 00:00:00 2001 From: six2dez Date: Wed, 8 Nov 2023 14:16:02 +0100 Subject: [PATCH 05/17] Update requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e36da7fc..758b2755 100644 --- a/requirements.txt +++ b/requirements.txt @@ -34,4 +34,4 @@ tldextract # dorks_hunter tqdm # multiple ujson # multiple urllib3 # multiple -postleaksNeg # Tool +postleaksNg # Tool From 38a77db569a44d0099c7f3488914d3b851fb3387 Mon Sep 17 00:00:00 2001 From: six2dez Date: Fri, 10 Nov 2023 09:14:14 +0100 Subject: [PATCH 06/17] massive format and styling refactor --- reconftw.sh | 2352 +++++++++++++++++++++++++++++---------------------- 1 file changed, 1347 insertions(+), 1005 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 779be012..36599bab 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1,13 +1,13 @@ #!/usr/bin/env bash -function banner_graber(){ +function banner_graber() { source "${SCRIPTPATH}"/banners.txt randx=$(shuf -i 1-23 -n 1) - tmp="banner${randx}" + tmp="banner${randx}" banner_code=${!tmp} echo -e "${banner_code}" } -function banner(){ +function banner() { banner_code=$(banner_graber) printf "\n${bgreen}${banner_code}" printf "\n ${reconftw_version} by @six2dez${reset}\n" @@ -17,14 +17,14 @@ function banner(){ ################################################### TOOLS ##################################################### ############################################################################################################### -function check_version(){ +function check_version() { timeout 10 git fetch exit_status=$? - if [ $exit_status -eq 0 ]; then + if [[ "${exit_status}" -eq 0 ]]; then BRANCH=$(git rev-parse --abbrev-ref HEAD) HEADHASH=$(git rev-parse HEAD) UPSTREAMHASH=$(git rev-parse "${BRANCH}"@\{upstream\}) - if [ "$HEADHASH" != "$UPSTREAMHASH" ]; then + if [[ "${HEADHASH}" != "${UPSTREAMHASH}" ]]; then printf "\n${yellow} There is a new version, run ./install.sh to get latest version${reset}\n\n" fi else @@ -32,98 +32,332 @@ function check_version(){ fi } -function tools_installed(){ +function tools_installed() { printf "\n\n${bgreen}#######################################################################${reset}\n" printf "${bblue} Checking installed tools ${reset}\n\n" allinstalled=true - [ -n "$GOPATH" ] || { printf "${bred} [*] GOPATH var [NO]${reset}\n"; allinstalled=false;} - [ -n "$GOROOT" ] || { printf "${bred} [*] GOROOT var [NO]${reset}\n"; allinstalled=false;} - [ -n "$PATH" ] || { printf "${bred} [*] PATH var [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/dorks_hunter/dorks_hunter.py" ] || { printf "${bred} [*] dorks_hunter [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/brutespray/brutespray.py" ] || { printf "${bred} [*] brutespray [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/fav-up/favUp.py" ] || { printf "${bred} [*] fav-up [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/Corsy/corsy.py" ] || { printf "${bred} [*] Corsy [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/testssl.sh/testssl.sh" ] || { printf "${bred} [*] testssl [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/CMSeeK/cmseek.py" ] || { printf "${bred} [*] CMSeeK [NO]${reset}\n"; allinstalled=false;} - [ -f "${fuzz_wordlist}" ] || { printf "${bred} [*] OneListForAll [NO]${reset}\n"; allinstalled=false;} - [ -f "${lfi_wordlist}" ] || { printf "${bred} [*] lfi_wordlist [NO]${reset}\n"; allinstalled=false;} - [ -f "${ssti_wordlist}" ] || { printf "${bred} [*] ssti_wordlist [NO]${reset}\n"; allinstalled=false;} - [ -f "${subs_wordlist}" ] || { printf "${bred} [*] subs_wordlist [NO]${reset}\n"; allinstalled=false;} - [ -f "${subs_wordlist_big}" ] || { printf "${bred} [*] subs_wordlist_big [NO]${reset}\n"; allinstalled=false;} - [ -f "${resolvers}" ] || { printf "${bred} [*] resolvers [NO]${reset}\n"; allinstalled=false;} - [ -f "${resolvers_trusted}" ] || { printf "${bred} [*] resolvers_trusted [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/xnLinkFinder/xnLinkFinder.py" ] || { printf "${bred} [*] xnLinkFinder [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/waymore/waymore.py" ] || { printf "${bred} [*] waymore [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/commix/commix.py" ] || { printf "${bred} [*] commix [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/getjswords.py" ] || { printf "${bred} [*] getjswords [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/JSA/jsa.py" ] || { printf "${bred} [*] JSA [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/cloud_enum/cloud_enum.py" ] || { printf "${bred} [*] cloud_enum [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/ultimate-nmap-parser/ultimate-nmap-parser.sh" ] || { printf "${bred} [*] nmap-parse-output [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/pydictor/pydictor.py" ] || { printf "${bred} [*] pydictor [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/urless/urless/urless.py" ] || { printf "${bred} [*] urless [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/smuggler/smuggler.py" ] || { printf "${bred} [*] smuggler [NO]${reset}\n"; allinstalled=false;} - [ -f "$tools/regulator/main.py" ] || { printf "${bred} [*] regulator [NO]${reset}\n"; allinstalled=false;} - which github-endpoints &>/dev/null || { printf "${bred} [*] github-endpoints [NO]${reset}\n"; allinstalled=false;} - which github-subdomains &>/dev/null || { printf "${bred} [*] github-subdomains [NO]${reset}\n"; allinstalled=false;} - which gitlab-subdomains &>/dev/null || { printf "${bred} [*] gitlab-subdomains [NO]${reset}\n"; allinstalled=false;} - which katana &>/dev/null || { printf "${bred} [*] katana [NO]${reset}\n"; allinstalled=false;} - which wafw00f &>/dev/null || { printf "${bred} [*] wafw00f [NO]${reset}\n"; allinstalled=false;} - which dnsvalidator &>/dev/null || { printf "${bred} [*] dnsvalidator [NO]${reset}\n"; allinstalled=false;} - which gowitness &>/dev/null || { printf "${bred} [*] gowitness [NO]${reset}\n"; allinstalled=false;} - which amass &>/dev/null || { printf "${bred} [*] Amass [NO]${reset}\n"; allinstalled=false;} - which dnsx &>/dev/null || { printf "${bred} [*] dnsx [NO]${reset}\n"; allinstalled=false;} - which gotator &>/dev/null || { printf "${bred} [*] gotator [NO]${reset}\n"; allinstalled=false;} - which nuclei &>/dev/null || { printf "${bred} [*] Nuclei [NO]${reset}\n"; allinstalled=false;} - [ -d ${NUCLEI_TEMPLATES_PATH} ] || { printf "${bred} [*] Nuclei templates [NO]${reset}\n"; allinstalled=false;} - [ -d ${tools}/fuzzing-templates ] || { printf "${bred} [*] Fuzzing templates [NO]${reset}\n"; allinstalled=false;} - which gf &>/dev/null || { printf "${bred} [*] Gf [NO]${reset}\n"; allinstalled=false;} - which Gxss &>/dev/null || { printf "${bred} [*] Gxss [NO]${reset}\n"; allinstalled=false;} - which subjs &>/dev/null || { printf "${bred} [*] subjs [NO]${reset}\n"; allinstalled=false;} - which ffuf &>/dev/null || { printf "${bred} [*] ffuf [NO]${reset}\n"; allinstalled=false;} - which massdns &>/dev/null || { printf "${bred} [*] Massdns [NO]${reset}\n"; allinstalled=false;} - which qsreplace &>/dev/null || { printf "${bred} [*] qsreplace [NO]${reset}\n"; allinstalled=false;} - which interlace &>/dev/null || { printf "${bred} [*] interlace [NO]${reset}\n"; allinstalled=false;} - which anew &>/dev/null || { printf "${bred} [*] Anew [NO]${reset}\n"; allinstalled=false;} - which unfurl &>/dev/null || { printf "${bred} [*] unfurl [NO]${reset}\n"; allinstalled=false;} - which crlfuzz &>/dev/null || { printf "${bred} [*] crlfuzz [NO]${reset}\n"; allinstalled=false;} - which httpx &>/dev/null || { printf "${bred} [*] Httpx [NO]${reset}\n${reset}"; allinstalled=false;} - which jq &>/dev/null || { printf "${bred} [*] jq [NO]${reset}\n${reset}"; allinstalled=false;} - which notify &>/dev/null || { printf "${bred} [*] notify [NO]${reset}\n${reset}"; allinstalled=false;} - which dalfox &>/dev/null || { printf "${bred} [*] dalfox [NO]${reset}\n${reset}"; allinstalled=false;} - which puredns &>/dev/null || { printf "${bred} [*] puredns [NO]${reset}\n${reset}"; allinstalled=false;} - which emailfinder &>/dev/null || { printf "${bred} [*] emailfinder [NO]${reset}\n"; allinstalled=false;} - which analyticsrelationships &>/dev/null || { printf "${bred} [*] analyticsrelationships [NO]${reset}\n"; allinstalled=false;} - which mapcidr &>/dev/null || { printf "${bred} [*] mapcidr [NO]${reset}\n"; allinstalled=false;} - which ppfuzz &>/dev/null || { printf "${bred} [*] ppfuzz [NO]${reset}\n"; allinstalled=false;} - which cdncheck &>/dev/null || { printf "${bred} [*] cdncheck [NO]${reset}\n"; allinstalled=false;} - which interactsh-client &>/dev/null || { printf "${bred} [*] interactsh-client [NO]${reset}\n"; allinstalled=false;} - which tlsx &>/dev/null || { printf "${bred} [*] tlsx [NO]${reset}\n"; allinstalled=false;} - which smap &>/dev/null || { printf "${bred} [*] smap [NO]${reset}\n"; allinstalled=false;} - which gitdorks_go &>/dev/null || { printf "${bred} [*] gitdorks_go [NO]${reset}\n"; allinstalled=false;} - which ripgen &>/dev/null || { printf "${bred} [*] ripgen [NO]${reset}\n${reset}"; allinstalled=false;} - which dsieve &>/dev/null || { printf "${bred} [*] dsieve [NO]${reset}\n${reset}"; allinstalled=false;} - which inscope &>/dev/null || { printf "${bred} [*] inscope [NO]${reset}\n${reset}"; allinstalled=false;} - which enumerepo &>/dev/null || { printf "${bred} [*] enumerepo [NO]${reset}\n${reset}"; allinstalled=false;} - which Web-Cache-Vulnerability-Scanner &>/dev/null || { printf "${bred} [*] Web-Cache-Vulnerability-Scanner [NO]${reset}\n"; allinstalled=false;} - which subfinder &>/dev/null || { printf "${bred} [*] subfinder [NO]${reset}\n${reset}"; allinstalled=false;} - which byp4xx &>/dev/null || { printf "${bred} [*] byp4xx [NO]${reset}\n${reset}"; allinstalled=false;} - which ghauri &>/dev/null || { printf "${bred} [*] ghauri [NO]${reset}\n${reset}"; allinstalled=false;} - which hakip2host &>/dev/null || { printf "${bred} [*] hakip2host [NO]${reset}\n${reset}"; allinstalled=false;} - which gau &>/dev/null || { printf "${bred} [*] gau [NO]${reset}\n${reset}"; allinstalled=false;} - which crt &>/dev/null || { printf "${bred} [*] crt [NO]${reset}\n${reset}"; allinstalled=false;} - which gitleaks &>/dev/null || { printf "${bred} [*] gitleaks [NO]${reset}\n${reset}"; allinstalled=false;} - which trufflehog &>/dev/null || { printf "${bred} [*] trufflehog [NO]${reset}\n${reset}"; allinstalled=false;} - which s3scanner &>/dev/null || { printf "${bred} [*] s3scanner [NO]${reset}\n${reset}"; allinstalled=false;} - - if [ "${allinstalled}" = true ]; then + [ -n "$GOPATH" ] || { + printf "${bred} [*] GOPATH var [NO]${reset}\n" + allinstalled=false + } + [ -n "$GOROOT" ] || { + printf "${bred} [*] GOROOT var [NO]${reset}\n" + allinstalled=false + } + [ -n "$PATH" ] || { + printf "${bred} [*] PATH var [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/dorks_hunter/dorks_hunter.py" ] || { + printf "${bred} [*] dorks_hunter [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/brutespray/brutespray.py" ] || { + printf "${bred} [*] brutespray [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/fav-up/favUp.py" ] || { + printf "${bred} [*] fav-up [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/Corsy/corsy.py" ] || { + printf "${bred} [*] Corsy [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/testssl.sh/testssl.sh" ] || { + printf "${bred} [*] testssl [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/CMSeeK/cmseek.py" ] || { + printf "${bred} [*] CMSeeK [NO]${reset}\n" + allinstalled=false + } + [ -f "${fuzz_wordlist}" ] || { + printf "${bred} [*] OneListForAll [NO]${reset}\n" + allinstalled=false + } + [ -f "${lfi_wordlist}" ] || { + printf "${bred} [*] lfi_wordlist [NO]${reset}\n" + allinstalled=false + } + [ -f "${ssti_wordlist}" ] || { + printf "${bred} [*] ssti_wordlist [NO]${reset}\n" + allinstalled=false + } + [ -f "${subs_wordlist}" ] || { + printf "${bred} [*] subs_wordlist [NO]${reset}\n" + allinstalled=false + } + [ -f "${subs_wordlist_big}" ] || { + printf "${bred} [*] subs_wordlist_big [NO]${reset}\n" + allinstalled=false + } + [ -f "${resolvers}" ] || { + printf "${bred} [*] resolvers [NO]${reset}\n" + allinstalled=false + } + [ -f "${resolvers_trusted}" ] || { + printf "${bred} [*] resolvers_trusted [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/xnLinkFinder/xnLinkFinder.py" ] || { + printf "${bred} [*] xnLinkFinder [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/waymore/waymore.py" ] || { + printf "${bred} [*] waymore [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/commix/commix.py" ] || { + printf "${bred} [*] commix [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/getjswords.py" ] || { + printf "${bred} [*] getjswords [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/JSA/jsa.py" ] || { + printf "${bred} [*] JSA [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/cloud_enum/cloud_enum.py" ] || { + printf "${bred} [*] cloud_enum [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/ultimate-nmap-parser/ultimate-nmap-parser.sh" ] || { + printf "${bred} [*] nmap-parse-output [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/pydictor/pydictor.py" ] || { + printf "${bred} [*] pydictor [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/urless/urless/urless.py" ] || { + printf "${bred} [*] urless [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/smuggler/smuggler.py" ] || { + printf "${bred} [*] smuggler [NO]${reset}\n" + allinstalled=false + } + [ -f "${tools}/regulator/main.py" ] || { + printf "${bred} [*] regulator [NO]${reset}\n" + allinstalled=false + } + command -v github-endpoints &>/dev/null || { + printf "${bred} [*] github-endpoints [NO]${reset}\n" + allinstalled=false + } + command -v github-subdomains &>/dev/null || { + printf "${bred} [*] github-subdomains [NO]${reset}\n" + allinstalled=false + } + command -v gitlab-subdomains &>/dev/null || { + printf "${bred} [*] gitlab-subdomains [NO]${reset}\n" + allinstalled=false + } + command -v katana &>/dev/null || { + printf "${bred} [*] katana [NO]${reset}\n" + allinstalled=false + } + command -v wafw00f &>/dev/null || { + printf "${bred} [*] wafw00f [NO]${reset}\n" + allinstalled=false + } + command -v dnsvalidator &>/dev/null || { + printf "${bred} [*] dnsvalidator [NO]${reset}\n" + allinstalled=false + } + command -v gowitness &>/dev/null || { + printf "${bred} [*] gowitness [NO]${reset}\n" + allinstalled=false + } + command -v amass &>/dev/null || { + printf "${bred} [*] Amass [NO]${reset}\n" + allinstalled=false + } + command -v dnsx &>/dev/null || { + printf "${bred} [*] dnsx [NO]${reset}\n" + allinstalled=false + } + command -v gotator &>/dev/null || { + printf "${bred} [*] gotator [NO]${reset}\n" + allinstalled=false + } + command -v nuclei &>/dev/null || { + printf "${bred} [*] Nuclei [NO]${reset}\n" + allinstalled=false + } + [ -d ${NUCLEI_TEMPLATES_PATH} ] || { + printf "${bred} [*] Nuclei templates [NO]${reset}\n" + allinstalled=false + } + [ -d ${tools}/fuzzing-templates ] || { + printf "${bred} [*] Fuzzing templates [NO]${reset}\n" + allinstalled=false + } + command -v gf &>/dev/null || { + printf "${bred} [*] Gf [NO]${reset}\n" + allinstalled=false + } + command -v Gxss &>/dev/null || { + printf "${bred} [*] Gxss [NO]${reset}\n" + allinstalled=false + } + command -v subjs &>/dev/null || { + printf "${bred} [*] subjs [NO]${reset}\n" + allinstalled=false + } + command -v ffuf &>/dev/null || { + printf "${bred} [*] ffuf [NO]${reset}\n" + allinstalled=false + } + command -v massdns &>/dev/null || { + printf "${bred} [*] Massdns [NO]${reset}\n" + allinstalled=false + } + command -v qsreplace &>/dev/null || { + printf "${bred} [*] qsreplace [NO]${reset}\n" + allinstalled=false + } + command -v interlace &>/dev/null || { + printf "${bred} [*] interlace [NO]${reset}\n" + allinstalled=false + } + command -v anew &>/dev/null || { + printf "${bred} [*] Anew [NO]${reset}\n" + allinstalled=false + } + command -v unfurl &>/dev/null || { + printf "${bred} [*] unfurl [NO]${reset}\n" + allinstalled=false + } + command -v crlfuzz &>/dev/null || { + printf "${bred} [*] crlfuzz [NO]${reset}\n" + allinstalled=false + } + command -v httpx &>/dev/null || { + printf "${bred} [*] Httpx [NO]${reset}\n${reset}" + allinstalled=false + } + command -v jq &>/dev/null || { + printf "${bred} [*] jq [NO]${reset}\n${reset}" + allinstalled=false + } + command -v notify &>/dev/null || { + printf "${bred} [*] notify [NO]${reset}\n${reset}" + allinstalled=false + } + command -v dalfox &>/dev/null || { + printf "${bred} [*] dalfox [NO]${reset}\n${reset}" + allinstalled=false + } + command -v puredns &>/dev/null || { + printf "${bred} [*] puredns [NO]${reset}\n${reset}" + allinstalled=false + } + command -v emailfinder &>/dev/null || { + printf "${bred} [*] emailfinder [NO]${reset}\n" + allinstalled=false + } + command -v analyticsrelationships &>/dev/null || { + printf "${bred} [*] analyticsrelationships [NO]${reset}\n" + allinstalled=false + } + command -v mapcidr &>/dev/null || { + printf "${bred} [*] mapcidr [NO]${reset}\n" + allinstalled=false + } + command -v ppfuzz &>/dev/null || { + printf "${bred} [*] ppfuzz [NO]${reset}\n" + allinstalled=false + } + command -v cdncheck &>/dev/null || { + printf "${bred} [*] cdncheck [NO]${reset}\n" + allinstalled=false + } + command -v interactsh-client &>/dev/null || { + printf "${bred} [*] interactsh-client [NO]${reset}\n" + allinstalled=false + } + command -v tlsx &>/dev/null || { + printf "${bred} [*] tlsx [NO]${reset}\n" + allinstalled=false + } + command -v smap &>/dev/null || { + printf "${bred} [*] smap [NO]${reset}\n" + allinstalled=false + } + command -v gitdorks_go &>/dev/null || { + printf "${bred} [*] gitdorks_go [NO]${reset}\n" + allinstalled=false + } + command -v ripgen &>/dev/null || { + printf "${bred} [*] ripgen [NO]${reset}\n${reset}" + allinstalled=false + } + command -v dsieve &>/dev/null || { + printf "${bred} [*] dsieve [NO]${reset}\n${reset}" + allinstalled=false + } + command -v inscope &>/dev/null || { + printf "${bred} [*] inscope [NO]${reset}\n${reset}" + allinstalled=false + } + command -v enumerepo &>/dev/null || { + printf "${bred} [*] enumerepo [NO]${reset}\n${reset}" + allinstalled=false + } + command -v Web-Cache-Vulnerability-Scanner &>/dev/null || { + printf "${bred} [*] Web-Cache-Vulnerability-Scanner [NO]${reset}\n" + allinstalled=false + } + command -v subfinder &>/dev/null || { + printf "${bred} [*] subfinder [NO]${reset}\n${reset}" + allinstalled=false + } + command -v byp4xx &>/dev/null || { + printf "${bred} [*] byp4xx [NO]${reset}\n${reset}" + allinstalled=false + } + command -v ghauri &>/dev/null || { + printf "${bred} [*] ghauri [NO]${reset}\n${reset}" + allinstalled=false + } + command -v hakip2host &>/dev/null || { + printf "${bred} [*] hakip2host [NO]${reset}\n${reset}" + allinstalled=false + } + command -v gau &>/dev/null || { + printf "${bred} [*] gau [NO]${reset}\n${reset}" + allinstalled=false + } + command -v crt &>/dev/null || { + printf "${bred} [*] crt [NO]${reset}\n${reset}" + allinstalled=false + } + command -v gitleaks &>/dev/null || { + printf "${bred} [*] gitleaks [NO]${reset}\n${reset}" + allinstalled=false + } + command -v trufflehog &>/dev/null || { + printf "${bred} [*] trufflehog [NO]${reset}\n${reset}" + allinstalled=false + } + command -v s3scanner &>/dev/null || { + printf "${bred} [*] s3scanner [NO]${reset}\n${reset}" + allinstalled=false + } + + if [[ "${allinstalled}" = true ]]; then printf "${bgreen} Good! All installed! ${reset}\n\n" else printf "\n${yellow} Try running the installer script again ./install.sh" printf "\n${yellow} If it fails for any reason try to install manually the tools missed" - printf "\n${yellow} Finally remember to set the ${bred}\$tools${yellow} variable at the start of this script" + printf "\n${yellow} Finally remember to set the ${bred}\${tools}${yellow} variable at the start of this script" printf "\n${yellow} If nothing works and the world is gonna end you can always ping me :D ${reset}\n\n" fi @@ -135,12 +369,15 @@ function tools_installed(){ ################################################### OSINT ##################################################### ############################################################################################################### -function google_dorks(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$GOOGLE_DORKS" = true ] && [ "$OSINT" = true ]; then - python3 $tools/dorks_hunter/dorks_hunter.py -d "$domain" -o osint/dorks.txt || { echo "dorks_hunter command failed"; exit 1; } +function google_dorks() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$GOOGLE_DORKS" = true ]] && [[ "$OSINT" = true ]]; then + python3 ${tools}/dorks_hunter/dorks_hunter.py -d "$domain" -o osint/dorks.txt || { + echo "dorks_hunter command failed" + exit 1 + } end_func "Results are saved in $domain/osint/dorks.txt" "${FUNCNAME[0]}" else - if [ "$GOOGLE_DORKS" = false ] || [ "$OSINT" = false ]; then + if [[ "$GOOGLE_DORKS" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} are already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -148,21 +385,27 @@ function google_dorks(){ fi } -function github_dorks(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$GITHUB_DORKS" = true ] && [ "$OSINT" = true ]; then +function github_dorks() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$GITHUB_DORKS" = true ]] && [[ "$OSINT" = true ]]; then start_func "${FUNCNAME[0]}" "Github Dorks in process" - if [ -s "${GITHUB_TOKENS}" ]; then - if [ "$DEEP" = true ]; then - gitdorks_go -gd $tools/gitdorks_go/Dorks/medium_dorks.txt -nws 20 -target "$domain" -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { echo "gitdorks_go/anew command failed"; exit 1; } + if [[ -s "${GITHUB_TOKENS}" ]]; then + if [[ "$DEEP" = true ]]; then + gitdorks_go -gd ${tools}/gitdorks_go/Dorks/medium_dorks.txt -nws 20 -target "$domain" -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { + echo "gitdorks_go/anew command failed" + exit 1 + } else - gitdorks_go -gd $tools/gitdorks_go/Dorks/smalldorks.txt -nws 20 -target $domain -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { echo "gitdorks_go/anew command failed"; exit 1; } + gitdorks_go -gd ${tools}/gitdorks_go/Dorks/smalldorks.txt -nws 20 -target $domain -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { + echo "gitdorks_go/anew command failed" + exit 1 + } fi else printf "\n${bred} Required file ${GITHUB_TOKENS} not exists or empty${reset}\n" fi end_func "Results are saved in $domain/osint/gitdorks.txt" "${FUNCNAME[0]}" else - if [ "$GITHUB_DORKS" = false ] || [ "$OSINT" = false ]; then + if [[ "$GITHUB_DORKS" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -170,30 +413,30 @@ function github_dorks(){ fi } -function github_repos(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$GITHUB_REPOS" = true ] && [ "$OSINT" = true ]; then +function github_repos() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$GITHUB_REPOS" = true ]] && [[ "$OSINT" = true ]]; then start_func "${FUNCNAME[0]}" "Github Repos analysis in process" - if [ -s "${GITHUB_TOKENS}" ]; then + if [[ -s "${GITHUB_TOKENS}" ]]; then GH_TOKEN=$(cat ${GITHUB_TOKENS} | head -1) - echo $domain | unfurl format %r > .tmp/company_name.txt + echo $domain | unfurl format %r >.tmp/company_name.txt enumerepo -token-string "${GH_TOKEN}" -usernames .tmp/company_name.txt -o .tmp/company_repos.txt 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/company_repos.txt" ] && jq -r '.[].repos[]|.url' < .tmp/company_repos.txt > .tmp/company_repos_url.txt 2>>"$LOGFILE" + [ -s ".tmp/company_repos.txt" ] && jq -r '.[].repos[]|.url' <.tmp/company_repos.txt >.tmp/company_repos_url.txt 2>>"$LOGFILE" mkdir -p .tmp/github_repos 2>>"$LOGFILE" >>"$LOGFILE" mkdir -p .tmp/github 2>>"$LOGFILE" >>"$LOGFILE" [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "git clone _target_ .tmp/github_repos/_cleantarget_" 2>>"$LOGFILE" >/dev/null 2>&1 - [ -d ".tmp/github/" ] && ls .tmp/github_repos > .tmp/github_repos_folders.txt + [ -d ".tmp/github/" ] && ls .tmp/github_repos >.tmp/github_repos_folders.txt [ -s ".tmp/github_repos_folders.txt" ] && interlace -tL .tmp/github_repos_folders.txt -threads ${INTERLACE_THREADS} -c "gitleaks detect --source .tmp/github_repos/_target_ --no-banner --no-color -r .tmp/github/gh_secret_cleantarget_.json" 2>>"$LOGFILE" >/dev/null [ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "trufflehog git _target_ -j 2>&1 | jq -c > _output_/_cleantarget_" -o .tmp/github/ >>"$LOGFILE" 2>&1 - if [ -d ".tmp/github/" ]; then - cat .tmp/github/* 2>/dev/null | jq -c | jq -r > osint/github_company_secrets.json 2>>"$LOGFILE" + if [[ -d ".tmp/github/" ]]; then + cat .tmp/github/* 2>/dev/null | jq -c | jq -r >osint/github_company_secrets.json 2>>"$LOGFILE" fi else printf "\n${bred} Required file ${GITHUB_TOKENS} not exists or empty${reset}\n" fi end_func "Results are saved in $domain/osint/github_company_secrets.json" ${FUNCNAME[0]} else - if [ "$GITHUB_REPOS" = false ] || [ "$OSINT" = false ]; then + if [[ "$GITHUB_REPOS" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -201,20 +444,23 @@ function github_repos(){ fi } -function metadata(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$METADATA" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then +function metadata() { + if { [[ ! -f "${called_fn_dir}/.${FUNCNAME[0]}" ]] || [[ "${DIFF}" = true ]]; } && [[ "${METADATA}" = true ]] && [[ "${OSINT}" = true ]] && ! [[ ${domain} =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Scanning metadata in public files" - metafinder -d "$domain" -l $METAFINDER_LIMIT -o osint -go -bi -ba &>> "$LOGFILE" || { echo "metafinder command failed"; exit 1; } + metafinder -d "$domain" -l $METAFINDER_LIMIT -o osint -go -bi -ba &>>"$LOGFILE" || { + echo "metafinder command failed" + exit 1 + } mv "osint/${domain}/"*".txt" "osint/" 2>>"$LOGFILE" rm -rf "osint/${domain}" 2>>"$LOGFILE" end_func "Results are saved in $domain/osint/[software/authors/metadata_results].txt" ${FUNCNAME[0]} else - if [ "$METADATA" = false ] || [ "$OSINT" = false ]; then + if [[ "$METADATA" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [ "$METADATA" = false ] || [ "$OSINT" = false ]; then + if [[ "$METADATA" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -223,20 +469,23 @@ function metadata(){ fi } -function postleaks(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$POSTMAN_LEAKS" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then +function postleaks() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$POSTMAN_LEAKS" = true ]] && [[ "$OSINT" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Scanning for leaks in postman public directory" - postleaksNg -k "$domain" > .tmp/postleaks.txt || { echo "postleaksNg command failed"; exit 1; } + postleaksNg -k "$domain" >.tmp/postleaks.txt || { + echo "postleaksNg command failed" + exit 1 + } end_func "Results are saved in $domain/osint/[software/authors/metadata_results].txt" ${FUNCNAME[0]} else - if [ "$POSTMAN_LEAKS" = false ] || [ "$OSINT" = false ]; then + if [[ "$POSTMAN_LEAKS" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [ "$POSTMAN_LEAKS" = false ] || [ "$OSINT" = false ]; then + if [[ "$POSTMAN_LEAKS" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -245,20 +494,23 @@ function postleaks(){ fi } -function emails(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$EMAILS" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then +function emails() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$EMAILS" = true ]] && [[ "$OSINT" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Searching emails/users/passwords leaks" - emailfinder -d $domain 2>>"$LOGFILE" | anew -q .tmp/emailfinder.txt || { echo "emailfinder command failed"; exit 1; } + emailfinder -d $domain 2>>"$LOGFILE" | anew -q .tmp/emailfinder.txt || { + echo "emailfinder command failed" + exit 1 + } [ -s ".tmp/emailfinder.txt" ] && cat .tmp/emailfinder.txt | grep "@" | grep -iv "|_" | anew -q osint/emails.txt end_func "Results are saved in $domain/osint/emails.txt" ${FUNCNAME[0]} else - if [ "$EMAILS" = false ] || [ "$OSINT" = false ]; then + if [[ "$EMAILS" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [ "$EMAILS" = false ] || [ "$OSINT" = false ]; then + if [[ "$EMAILS" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -267,24 +519,24 @@ function emails(){ fi } -function domain_info(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$DOMAIN_INFO" = true ] && [ "$OSINT" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then +function domain_info() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$DOMAIN_INFO" = true ]] && [[ "$OSINT" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Searching domain info (whois, registrant name/email domains)" - whois -H $domain > osint/domain_info_general.txt || { echo "whois command failed"; exit 1; } - if [ "$DEEP" = true ] || [ "$REVERSE_WHOIS" = true ]; then - timeout -k 1m ${AMASS_INTEL_TIMEOUT}m amass intel -d ${domain} -whois -timeout $AMASS_INTEL_TIMEOUT -o osint/domain_info_reverse_whois.txt 2>>"$LOGFILE" &>/dev/null + whois -H $domain >osint/domain_info_general.txt || { echo "whois command failed"; } + if [[ "$DEEP" = true ]] || [[ "$REVERSE_WHOIS" = true ]]; then + timeout -k 1m ${AMASS_INTEL_TIMEOUT}m amass intel -d ${domain} -whois -timeout $AMASS_INTEL_TIMEOUT -o osint/domain_info_reverse_whois.txt 2>>"$LOGFILE" >> /dev/null fi - - curl -s "https://aadinternals.azurewebsites.net/api/tenantinfo?domainName=${domain}" -H "Origin: https://aadinternals.com" | jq -r .domains[].name > osint/azure_tenant_domains.txt + + curl -s "https://aadinternals.azurewebsites.net/api/tenantinfo?domainName=${domain}" -H "Origin: https://aadinternals.com" | jq -r .domains[].name >osint/azure_tenant_domains.txt end_func "Results are saved in $domain/osint/domain_info_[general/name/email/ip].txt" ${FUNCNAME[0]} else - if [ "$DOMAIN_INFO" = false ] || [ "$OSINT" = false ]; then + if [[ "$DOMAIN_INFO" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [ "$DOMAIN_INFO" = false ] || [ "$OSINT" = false ]; then + if [[ "$DOMAIN_INFO" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -293,10 +545,10 @@ function domain_info(){ fi } -function ip_info(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$IP_INFO" = true ] && [ "$OSINT" = true ] && [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then +function ip_info() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$IP_INFO" = true ]] && [[ "$OSINT" = true ]] && [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Searching ip info" - if [ -n "$WHOISXML_API" ]; then + if [[ -n "$WHOISXML_API" ]]; then curl "https://reverse-ip.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ip=${domain}" 2>/dev/null | jq -r '.result[].name' 2>>"$LOGFILE" | sed -e "s/$/ ${domain}/" | anew -q osint/ip_${domain}_relations.txt curl "https://www.whoisxmlapi.com/whoisserver/WhoisService?apiKey=${WHOISXML_API}&domainName=${domain}&outputFormat=json&da=2®istryRawText=1®istrarRawText=1&ignoreRawTexts=1" 2>/dev/null | jq 2>>"$LOGFILE" | anew -q osint/ip_${domain}_whois.txt curl "https://ip-geolocation.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ipAddress=${domain}" 2>/dev/null | jq -r '.ip,.location' 2>>"$LOGFILE" | anew -q osint/ip_${domain}_location.txt @@ -305,12 +557,12 @@ function ip_info(){ printf "\n${yellow} No WHOISXML_API var defined, skipping function ${reset}\n" fi else - if [ "$IP_INFO" = false ] || [ "$OSINT" = false ]; then + if [[ "$IP_INFO" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ ! $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [ "$IP_INFO" = false ] || [ "$OSINT" = false ]; then + if [[ "$IP_INFO" = false ]] || [[ "$OSINT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -323,7 +575,7 @@ function ip_info(){ ############################################### SUBDOMAINS #################################################### ############################################################################################################### -function subdomains_full(){ +function subdomains_full() { NUMOFLINES_subs="0" NUMOFLINES_probed="0" printf "${bgreen}#######################################################################\n\n" @@ -332,13 +584,13 @@ function subdomains_full(){ [ -s "subdomains/subdomains.txt" ] && cp subdomains/subdomains.txt .tmp/subdomains_old.txt [ -s "webs/webs.txt" ] && cp webs/webs.txt .tmp/probed_old.txt - if ( [ ! -f "$called_fn_dir/.sub_active" ] || [ ! -f "$called_fn_dir/.sub_brute" ] || [ ! -f "$called_fn_dir/.sub_permut" ] || [ ! -f "$called_fn_dir/.sub_recursive_brute" ] ) || [ "$DIFF" = true ] ; then + if ([[ ! -f "$called_fn_dir/.sub_active" ]] || [[ ! -f "$called_fn_dir/.sub_brute" ]] || [[ ! -f "$called_fn_dir/.sub_permut" ]] || [[ ! -f "$called_fn_dir/.sub_recursive_brute" ]]) || [[ "$DIFF" = true ]]; then resolvers_update fi [ -s "${inScope_file}" ] && cat ${inScope_file} | anew -q subdomains/subdomains.txt - if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && [ "$SUBDOMAINS_GENERAL" = true ]; then + if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && [[ "$SUBDOMAINS_GENERAL" = true ]]; then sub_passive sub_crt sub_active @@ -352,17 +604,17 @@ function subdomains_full(){ sub_dns sub_scraping sub_analytics - else + else notification "IP/CIDR detected, subdomains search skipped" info echo $domain | anew -q subdomains/subdomains.txt fi webprobe_simple - if [ -s "subdomains/subdomains.txt" ]; then + if [[ -s "subdomains/subdomains.txt" ]]; then [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file subdomains/subdomains.txt NUMOFLINES_subs=$(cat subdomains/subdomains.txt 2>>"$LOGFILE" | anew .tmp/subdomains_old.txt | sed '/^$/d' | wc -l) fi - if [ -s "webs/webs.txt" ]; then + if [[ -s "webs/webs.txt" ]]; then [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file webs/webs.txt NUMOFLINES_probed=$(cat webs/webs.txt 2>>"$LOGFILE" | anew .tmp/probed_old.txt | sed '/^$/d' | wc -l) fi @@ -376,25 +628,27 @@ function subdomains_full(){ printf "${bgreen}#######################################################################\n\n" } -function sub_passive(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPASSIVE" = true ]; then +function sub_passive() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBPASSIVE" = true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Passive Subdomain Enumeration" - [[ $RUNAMASS == true ]] && timeout -k 1m ${AMASS_ENUM_TIMEOUT} amass enum -passive -d $domain -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT -json .tmp/amass_json.json 2>>"$LOGFILE" &>/dev/null + if [[ $RUNAMASS == true ]]; then + timeout -k 1m ${AMASS_ENUM_TIMEOUT} amass enum -passive -d $domain -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT -json .tmp/amass_json.json 2>>"$LOGFILE" >> /dev/null + fi [ -s ".tmp/amass_json.json" ] && cat .tmp/amass_json.json | jq -r '.name' | anew -q .tmp/amass_psub.txt [[ $RUNSUBFINDER == true ]] && subfinder -all -d "$domain" -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null - - if [ -s "${GITHUB_TOKENS}" ]; then - if [ "$DEEP" = true ]; then + + if [[ -s "${GITHUB_TOKENS}" ]]; then + if [[ "$DEEP" = true ]]; then github-subdomains -d $domain -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null else github-subdomains -d $domain -k -q -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null fi fi - if [ -s "${GITLAB_TOKENS}" ]; then - gitlab-subdomains -d $domain -t $GITLAB_TOKENS > .tmp/gitlab_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null + if [[ -s "${GITLAB_TOKENS}" ]]; then + gitlab-subdomains -d "$domain" -t "$GITLAB_TOKENS" 2>>"$LOGFILE" | tee .tmp/gitlab_subdomains_psub.txt >/dev/null fi - if [ "$INSCOPE" = true ]; then + if [[ "$INSCOPE" = true ]]; then check_inscope .tmp/amass_psub.txt 2>>"$LOGFILE" >/dev/null check_inscope .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null check_inscope .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null @@ -403,7 +657,7 @@ function sub_passive(){ NUMOFLINES=$(find .tmp -type f -iname "*_psub.txt" -exec cat {} + | sed "s/*.//" | anew .tmp/passive_subs.txt | sed '/^$/d' | wc -l) end_subfunc "${NUMOFLINES} new subs (passive)" ${FUNCNAME[0]} else - if [ "$SUBPASSIVE" = false ]; then + if [[ "$SUBPASSIVE" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -411,15 +665,15 @@ function sub_passive(){ fi } -function sub_crt(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBCRT" = true ]; then +function sub_crt() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBCRT" = true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Crtsh Subdomain Enumeration" - crt -s -json -l ${CTR_LIMIT} $domain 2>>"$LOGFILE" | jq -r '.[].subdomain' 2>>"$LOGFILE" | sed -e "s/^\\*\\.//" | anew -q .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null - [[ "$INSCOPE" = true ]] && check_inscope .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null + crt -s -json -l ${CTR_LIMIT} $domain 2>>"$LOGFILE" | jq -r '.[].subdomain' 2>>"$LOGFILE" | sed -e 's/^\*\.//' | anew -q .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null + [[ $INSCOPE == true ]] && check_inscope .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES=$(cat .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" | sed 's/\*.//g' | anew .tmp/crtsh_subs.txt | sed '/^$/d' | wc -l) end_subfunc "${NUMOFLINES} new subs (cert transparency)" ${FUNCNAME[0]} else - if [ "$SUBCRT" = false ]; then + if [[ "$SUBCRT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -427,25 +681,25 @@ function sub_crt(){ fi } -function sub_active(){ - if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then +function sub_active() { + if [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Active Subdomain Enumeration" find .tmp -type f -iname "*_subs.txt" -exec cat {} + | anew -q .tmp/subs_no_resolved.txt [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then resolvers_update_quick_local - [ -s ".tmp/subs_no_resolved.txt" ] && puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/subs_no_resolved.txt" ] && puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else resolvers_update_quick_axiom [ -s ".tmp/subs_no_resolved.txt" ] && axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi echo $domain | dnsx -retry 3 -silent -r $resolvers_trusted 2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt - if [ "$DEEP" = true ]; then + if [[ "$DEEP" = true ]]; then cat .tmp/subdomains_tmp.txt | tlsx -san -cn -silent -ro -c $TLSX_THREADS -p $TLS_PORTS | anew -q .tmp/subdomains_tmp.txt else cat .tmp/subdomains_tmp.txt | tlsx -san -cn -silent -ro -c $TLSX_THREADS | anew -q .tmp/subdomains_tmp.txt fi - [[ "$INSCOPE" = true ]] && check_inscope .tmp/subdomains_tmp.txt 2>>"$LOGFILE" >/dev/null + [[ $INSCOPE == true ]] && check_inscope .tmp/subdomains_tmp.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES=$(cat .tmp/subdomains_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) end_subfunc "${NUMOFLINES} subs DNS resolved from passive" ${FUNCNAME[0]} else @@ -453,24 +707,24 @@ function sub_active(){ fi } -function sub_noerror(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBNOERROR" = true ]; then +function sub_noerror() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBNOERROR" = true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Checking NOERROR DNS response" - if [[ $(echo "${RANDOM}thistotallynotexist${RANDOM}.$domain" | dnsx -r $resolvers -rcode noerror,nxdomain -retry 3 -silent | cut -d' ' -f2) == "[NXDOMAIN]" ]]; then + if [[ $(echo "${RANDOM}thistotallynotexist${RANDOM}.$domain" | dnsx -r $resolvers -rcode noerror,nxdomain -retry 3 -silent | cut -d' ' -f2) == "[NXDOMAIN]" ]]; then resolvers_update_quick_local - if [ "$DEEP" = true ]; then + if [[ "$DEEP" = true ]]; then dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist_big | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null else dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null fi - [[ "$INSCOPE" = true ]] && check_inscope .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null + [[ $INSCOPE == true ]] && check_inscope .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES=$(cat .tmp/subs_noerror.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) end_subfunc "${NUMOFLINES} new subs (DNS noerror)" ${FUNCNAME[0]} - else - printf "\n${yellow} Detected DNSSEC black lies, skipping this technique ${reset}\n" + else + printf "\n${yellow} Detected DNSSEC black lies, skipping this technique ${reset}\n" fi else - if [ "$SUBBRUTE" = false ]; then + if [[ "$SUBBRUTE" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -478,16 +732,16 @@ function sub_noerror(){ fi } -function sub_dns(){ - if [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; then +function sub_dns() { + if [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; then start_subfunc ${FUNCNAME[0]} "Running : DNS Subdomain Enumeration and PTR search" - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then [ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | dnsx -r $resolvers_trusted -a -aaaa -cname -ns -ptr -mx -soa -silent -retry 3 -json -o subdomains/subdomains_dnsregs.json 2>>"$LOGFILE" >/dev/null [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' 2>/dev/null | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subdomains_dns.txt [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | hakip2host | cut -d' ' -f 3 | unfurl -u domains | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subdomains_dns.txt [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try "\(.host) - \(.a[])"' 2>/dev/null | sort -u -k2 | anew -q subdomains/subdomains_ips.txt resolvers_update_quick_local - [ -s ".tmp/subdomains_dns.txt" ] && puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/subdomains_dns.txt" ] && puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else [ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -json -o subdomains/subdomains_dnsregs.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | anew -q .tmp/subdomains_dns_a_records.txt @@ -497,7 +751,7 @@ function sub_dns(){ resolvers_update_quick_axiom [ -s ".tmp/subdomains_dns.txt" ] && axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_dns_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi - [[ "$INSCOPE" = true ]] && check_inscope .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" >/dev/null + [[ $INSCOPE == true ]] && check_inscope .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]} else @@ -505,31 +759,31 @@ function sub_dns(){ fi } -function sub_brute(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBBRUTE" = true ]; then +function sub_brute() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBBRUTE" = true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Bruteforce Subdomain Enumeration" - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then resolvers_update_quick_local - if [ "$DEEP" = true ]; then - puredns bruteforce $subs_wordlist_big $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + if [[ "$DEEP" = true ]]; then + puredns bruteforce $subs_wordlist_big $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else - puredns bruteforce $subs_wordlist $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + puredns bruteforce $subs_wordlist $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null fi - [ -s ".tmp/subs_brute.txt" ] && puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/subs_brute.txt" ] && puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else resolvers_update_quick_axiom - if [ "$DEEP" = true ]; then + if [[ "$DEEP" = true ]]; then axiom-scan $subs_wordlist_big -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else axiom-scan $subs_wordlist -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi [ -s ".tmp/subs_brute.txt" ] && axiom-scan .tmp/subs_brute.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute_valid.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi - [[ "$INSCOPE" = true ]] && check_inscope .tmp/subs_brute_valid.txt 2>>"$LOGFILE" >/dev/null + [[ $INSCOPE == true ]] && check_inscope .tmp/subs_brute_valid.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES=$(cat .tmp/subs_brute_valid.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) end_subfunc "${NUMOFLINES} new subs (bruteforce)" ${FUNCNAME[0]} else - if [ "$SUBBRUTE" = false ]; then + if [[ "$SUBBRUTE" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -537,20 +791,20 @@ function sub_brute(){ fi } -function sub_scraping(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBSCRAPING" = true ]; then +function sub_scraping() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBSCRAPING" = true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Source code scraping subdomain search" touch .tmp/scrap_subs.txt - if [ -s "$dir/subdomains/subdomains.txt" ]; then - if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] || [ "$DEEP" = true ] ; then - if [ ! "$AXIOM" = true ]; then + if [[ -s "$dir/subdomains/subdomains.txt" ]]; then + if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] || [[ "$DEEP" = true ]]; then + if [[ ! "$AXIOM" = true ]]; then resolvers_update_quick_local cat subdomains/subdomains.txt | httpx -follow-host-redirects -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - if [ "$DEEP" = true ]; then + if [[ "$DEEP" = true ]]; then [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null @@ -561,7 +815,7 @@ function sub_scraping(){ [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - if [ "$DEEP" = true ]; then + if [[ "$DEEP" = true ]]; then [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 3 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 2 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -569,14 +823,14 @@ function sub_scraping(){ fi sed -i '/^.\{2048\}./d' .tmp/katana.txt [ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | unfurl -u domains 2>>"$LOGFILE" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/scrap_subs.txt" ] && puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - if [ "$INSCOPE" = true ]; then + [ -s ".tmp/scrap_subs.txt" ] && puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + if [[ "$INSCOPE" = true ]]; then check_inscope .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" >/dev/null fi NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | sed '/^$/d' | wc -l) [ -s ".tmp/diff_scrap.txt" ] && cat .tmp/diff_scrap.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info3.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info3.txt" ] && cat .tmp/web_full_info3.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - cat .tmp/web_full_info1.txt .tmp/web_full_info2.txt .tmp/web_full_info3.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" > .tmp/web_full_info.txt + cat .tmp/web_full_info1.txt .tmp/web_full_info2.txt .tmp/web_full_info3.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" >.tmp/web_full_info.txt end_subfunc "${NUMOFLINES} new subs (code scraping)" ${FUNCNAME[0]} else end_subfunc "Skipping Subdomains Web Scraping: Too Many Subdomains" ${FUNCNAME[0]} @@ -585,7 +839,7 @@ function sub_scraping(){ end_subfunc "No subdomains to search (code scraping)" ${FUNCNAME[0]} fi else - if [ "$SUBSCRAPING" = false ]; then + if [[ "$SUBSCRAPING" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -593,27 +847,27 @@ function sub_scraping(){ fi } -function sub_analytics(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBANALYTICS" = true ]; then +function sub_analytics() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBANALYTICS" = true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Analytics Subdomain Enumeration" - if [ -s ".tmp/probed_tmp_scrap.txt" ]; then + if [[ -s ".tmp/probed_tmp_scrap.txt" ]]; then mkdir -p .tmp/output_analytics/ - analyticsrelationships -ch < .tmp/probed_tmp_scrap.txt >> .tmp/analytics_subs_tmp.txt 2>>"$LOGFILE" + analyticsrelationships -ch <.tmp/probed_tmp_scrap.txt >>.tmp/analytics_subs_tmp.txt 2>>"$LOGFILE" [ -s ".tmp/analytics_subs_tmp.txt" ] && cat .tmp/analytics_subs_tmp.txt | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/|__ //" | anew -q .tmp/analytics_subs_clean.txt - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then resolvers_update_quick_local - [ -s ".tmp/analytics_subs_clean.txt" ] && puredns resolve .tmp/analytics_subs_clean.txt -w .tmp/analytics_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/analytics_subs_clean.txt" ] && puredns resolve .tmp/analytics_subs_clean.txt -w .tmp/analytics_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else resolvers_update_quick_axiom [ -s ".tmp/analytics_subs_clean.txt" ] && axiom-scan .tmp/analytics_subs_clean.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/analytics_subs_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi fi - [[ "$INSCOPE" = true ]] && check_inscope .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" >/dev/null + [[ $INSCOPE == true ]] && check_inscope .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES=$(cat .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) end_subfunc "${NUMOFLINES} new subs (analytics relationship)" ${FUNCNAME[0]} else - if [ "$SUBANALYTICS" = false ]; then + if [[ "$SUBANALYTICS" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -621,56 +875,56 @@ function sub_analytics(){ fi } -function sub_permut(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBPERMUTE" = true ]; then +function sub_permut() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBPERMUTE" = true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Permutations Subdomain Enumeration" - if [ "$DEEP" = true ] || [ "$(cat subdomains/subdomains.txt | wc -l)" -le $DEEP_LIMIT ] ; then - if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then - [ -s "subdomains/subdomains.txt" ] && gotator -sub subdomains/subdomains.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt + if [[ "$DEEP" = true ]] || [[ "$(cat subdomains/subdomains.txt | wc -l)" -le $DEEP_LIMIT ]]; then + if [[ "$PERMUTATIONS_OPTION" = "gotator" ]]; then + [ -s "subdomains/subdomains.txt" ] && gotator -sub subdomains/subdomains.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt else - [ -s "subdomains/subdomains.txt" ] && ripgen -d subdomains/subdomains.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt + [ -s "subdomains/subdomains.txt" ] && ripgen -d subdomains/subdomains.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt fi - elif [ "$(cat .tmp/subs_no_resolved.txt | wc -l)" -le $DEEP_LIMIT2 ]; then - if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then - [ -s ".tmp/subs_no_resolved.txt" ] && gotator -sub .tmp/subs_no_resolved.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt + elif [[ "$(cat .tmp/subs_no_resolved.txt | wc -l)" -le $DEEP_LIMIT2 ]]; then + if [[ "$PERMUTATIONS_OPTION" = "gotator" ]]; then + [ -s ".tmp/subs_no_resolved.txt" ] && gotator -sub .tmp/subs_no_resolved.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt else - [ -s ".tmp/subs_no_resolved.txt" ] && ripgen -d .tmp/subs_no_resolved.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1.txt + [ -s ".tmp/subs_no_resolved.txt" ] && ripgen -d .tmp/subs_no_resolved.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt fi else end_subfunc "Skipping Permutations: Too Many Subdomains" ${FUNCNAME[0]} return 1 fi - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then resolvers_update_quick_local - [ -s ".tmp/gotator1.txt" ] && puredns resolve .tmp/gotator1.txt -w .tmp/permute1.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/gotator1.txt" ] && puredns resolve .tmp/gotator1.txt -w .tmp/permute1.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else resolvers_update_quick_axiom [ -s ".tmp/gotator1.txt" ] && axiom-scan .tmp/gotator1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi - - if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then - [ -s ".tmp/permute1.txt" ] && gotator -sub .tmp/permute1.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2.txt + + if [[ "$PERMUTATIONS_OPTION" = "gotator" ]]; then + [ -s ".tmp/permute1.txt" ] && gotator -sub .tmp/permute1.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2.txt else - [ -s ".tmp/permute1.txt" ] && ripgen -d .tmp/permute1.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2.txt + [ -s ".tmp/permute1.txt" ] && ripgen -d .tmp/permute1.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2.txt fi - if [ ! "$AXIOM" = true ]; then - [ -s ".tmp/gotator2.txt" ] && puredns resolve .tmp/gotator2.txt -w .tmp/permute2.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + if [[ ! "$AXIOM" = true ]]; then + [ -s ".tmp/gotator2.txt" ] && puredns resolve .tmp/gotator2.txt -w .tmp/permute2.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/gotator2.txt" ] && axiom-scan .tmp/gotator2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt - if [ -s ".tmp/permute_subs.txt" ]; then + if [[ -s ".tmp/permute_subs.txt" ]]; then [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/permute_subs.txt - [[ "$INSCOPE" = true ]] && check_inscope .tmp/permute_subs.txt 2>>"$LOGFILE" >/dev/null + [[ $INSCOPE == true ]] && check_inscope .tmp/permute_subs.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES=$(cat .tmp/permute_subs.txt 2>>"$LOGFILE" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) else NUMOFLINES=0 fi end_subfunc "${NUMOFLINES} new subs (permutations)" ${FUNCNAME[0]} else - if [ "$SUBPERMUTE" = false ]; then + if [[ "$SUBPERMUTE" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -678,31 +932,37 @@ function sub_permut(){ fi } -function sub_regex_permut(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBREGEXPERMUTE" = true ]; then +function sub_regex_permut() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBREGEXPERMUTE" = true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Permutations by regex analysis" - cd "$tools/regulator" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "${tools}/regulator" || { + echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } python3 main.py -t $domain -f ${dir}/subdomains/subdomains.txt -o ${dir}/.tmp/${domain}.brute - cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$dir" || { + echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then resolvers_update_quick_local [ -s ".tmp/${domain}.brute" ] && puredns resolve .tmp/${domain}.brute -w .tmp/regulator.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else resolvers_update_quick_axiom [ -s ".tmp/${domain}.brute" ] && axiom-scan .tmp/${domain}.brute -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/regulator.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi - - if [ -s ".tmp/regulator.txt" ]; then + + if [[ -s ".tmp/regulator.txt" ]]; then [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/regulator.txt - [[ "$INSCOPE" = true ]] && check_inscope .tmp/regulator.txt 2>>"$LOGFILE" >/dev/null + [[ $INSCOPE == true ]] && check_inscope .tmp/regulator.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES=$(cat .tmp/regulator.txt 2>>"$LOGFILE" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) else NUMOFLINES=0 fi end_subfunc "${NUMOFLINES} new subs (permutations by regex)" ${FUNCNAME[0]} else - if [ "$SUBREGEXPERMUTE" = false ]; then + if [[ "$SUBREGEXPERMUTE" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -710,26 +970,26 @@ function sub_regex_permut(){ fi } -function sub_recursive_passive(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUB_RECURSIVE_PASSIVE" = true ] && [ -s "subdomains/subdomains.txt" ]; then +function sub_recursive_passive() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUB_RECURSIVE_PASSIVE" = true ]] && [[ -s "subdomains/subdomains.txt" ]]; then start_subfunc ${FUNCNAME[0]} "Running : Subdomains recursive search passive" # Passive recursive - [ -s "subdomains/subdomains.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE > .tmp/subdomains_recurs_top.txt - if [ ! "$AXIOM" = true ]; then + [ -s "subdomains/subdomains.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE >.tmp/subdomains_recurs_top.txt + if [[ ! "$AXIOM" = true ]]; then resolvers_update_quick_local [ -s ".tmp/subdomains_recurs_top.txt" ] && timeout -k 1m ${AMASS_ENUM_TIMEOUT}m amass enum -passive -df .tmp/subdomains_recurs_top.txt -nf subdomains/subdomains.txt -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt - [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" &>/dev/null + [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else resolvers_update_quick_axiom [ -s ".tmp/subdomains_recurs_top.txt" ] && axiom-scan .tmp/subdomains_recurs_top.txt -m amass -passive -o .tmp/amass_prec.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/amass_prec.txt" ] && cat .tmp/amass_prec.txt | anew -q .tmp/passive_recursive.txt + [ -s ".tmp/amass_prec.txt" ] && cat .tmp/amass_prec.txt | anew -q .tmp/passive_recursive.txt [ -s ".tmp/passive_recursive.txt" ] && axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/passive_recurs_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi - [[ "$INSCOPE" = true ]] && check_inscope .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" >/dev/null + [[ $INSCOPE == true ]] && check_inscope .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES=$(cat .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]} else - if [ "$SUB_RECURSIVE_PASSIVE" = false ]; then + if [[ "$SUB_RECURSIVE_PASSIVE" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -737,41 +997,41 @@ function sub_recursive_passive(){ fi } -function sub_recursive_brute(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUB_RECURSIVE_BRUTE" = true ] && [ -s "subdomains/subdomains.txt" ]; then +function sub_recursive_brute() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUB_RECURSIVE_BRUTE" = true ]] && [[ -s "subdomains/subdomains.txt" ]]; then start_subfunc ${FUNCNAME[0]} "Running : Subdomains recursive search active" - if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] ; then - [ ! -s ".tmp/subdomains_recurs_top.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE > .tmp/subdomains_recurs_top.txt - ripgen -d .tmp/subdomains_recurs_top.txt -w $subs_wordlist > .tmp/brute_recursive_wordlist.txt - if [ ! "$AXIOM" = true ]; then + if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]]; then + [ ! -s ".tmp/subdomains_recurs_top.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE >.tmp/subdomains_recurs_top.txt + ripgen -d .tmp/subdomains_recurs_top.txt -w $subs_wordlist >.tmp/brute_recursive_wordlist.txt + if [[ ! "$AXIOM" = true ]]; then resolvers_update_quick_local - [ -s ".tmp/brute_recursive_wordlist.txt" ] && puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -w .tmp/brute_recursive_result.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/brute_recursive_wordlist.txt" ] && puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -w .tmp/brute_recursive_result.txt 2>>"$LOGFILE" >/dev/null else resolvers_update_quick_axiom [ -s ".tmp/brute_recursive_wordlist.txt" ] && axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_recursive_result.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi [ -s ".tmp/brute_recursive_result.txt" ] && cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt - if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then - [ -s ".tmp/brute_recursive.txt" ] && gotator -sub .tmp/brute_recursive.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1_recursive.txt + if [[ "$PERMUTATIONS_OPTION" = "gotator" ]]; then + [ -s ".tmp/brute_recursive.txt" ] && gotator -sub .tmp/brute_recursive.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1_recursive.txt else - [ -s ".tmp/brute_recursive.txt" ] && ripgen -d .tmp/brute_recursive.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator1_recursive.txt + [ -s ".tmp/brute_recursive.txt" ] && ripgen -d .tmp/brute_recursive.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1_recursive.txt fi - - if [ ! "$AXIOM" = true ]; then - [ -s ".tmp/gotator1_recursive.txt" ] && puredns resolve .tmp/gotator1_recursive.txt -w .tmp/permute1_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + + if [[ ! "$AXIOM" = true ]]; then + [ -s ".tmp/gotator1_recursive.txt" ] && puredns resolve .tmp/gotator1_recursive.txt -w .tmp/permute1_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/gotator1_recursive.txt" ] && axiom-scan .tmp/gotator1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi - if [ "$PERMUTATIONS_OPTION" = "gotator" ] ; then - [ -s ".tmp/permute1_recursive.txt" ] && gotator -sub .tmp/permute1_recursive.txt -perm $tools/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2_recursive.txt + if [[ "$PERMUTATIONS_OPTION" = "gotator" ]]; then + [ -s ".tmp/permute1_recursive.txt" ] && gotator -sub .tmp/permute1_recursive.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2_recursive.txt else - [ -s ".tmp/permute1_recursive.txt" ] && ripgen -d .tmp/permute1_recursive.txt -w $tools/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT > .tmp/gotator2_recursive.txt + [ -s ".tmp/permute1_recursive.txt" ] && ripgen -d .tmp/permute1_recursive.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2_recursive.txt fi - - if [ ! "$AXIOM" = true ]; then - [ -s ".tmp/gotator2_recursive.txt" ] && puredns resolve .tmp/gotator2_recursive.txt -w .tmp/permute2_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null + + if [[ ! "$AXIOM" = true ]]; then + [ -s ".tmp/gotator2_recursive.txt" ] && puredns resolve .tmp/gotator2_recursive.txt -w .tmp/permute2_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/gotator2_recursive.txt" ] && axiom-scan .tmp/gotator2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi @@ -779,14 +1039,14 @@ function sub_recursive_brute(){ else end_subfunc "skipped in this mode or defined in reconftw.cfg" ${FUNCNAME[0]} fi - if [ "$INSCOPE" = true ]; then + if [[ "$INSCOPE" = true ]]; then check_inscope .tmp/permute_recursive.txt 2>>"$LOGFILE" >/dev/null check_inscope .tmp/brute_recursive.txt 2>>"$LOGFILE" >/dev/null fi # Last validation cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/brute_perm_recursive.txt - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then [ -s ".tmp/brute_recursive.txt" ] && puredns resolve .tmp/brute_perm_recursive.txt -w .tmp/brute_perm_recursive_final.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/brute_recursive.txt" ] && axiom-scan .tmp/brute_perm_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_perm_recursive_final.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -795,7 +1055,7 @@ function sub_recursive_brute(){ NUMOFLINES=$(cat .tmp/brute_perm_recursive_final.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (recursive active)" ${FUNCNAME[0]} else - if [ "$SUB_RECURSIVE_BRUTE" = false ]; then + if [[ "$SUB_RECURSIVE_BRUTE" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -803,12 +1063,12 @@ function sub_recursive_brute(){ fi } -function subtakeover(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SUBTAKEOVER" = true ]; then +function subtakeover() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBTAKEOVER" = true ]]; then start_func ${FUNCNAME[0]} "Looking for possible subdomain and DNS takeover" touch .tmp/tko.txt [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then nuclei -update 2>>"$LOGFILE" >/dev/null cat subdomains/subdomains.txt .tmp/webs_all.txt 2>/dev/null | nuclei -silent -nh -tags takeover -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -t ${NUCLEI_TEMPLATES_PATH} -o .tmp/tko.txt else @@ -823,12 +1083,12 @@ function subtakeover(){ sed -i '/^$/d' .tmp/tko.txt NUMOFLINES=$(cat .tmp/tko.txt 2>>"$LOGFILE" | anew webs/takeover.txt | sed '/^$/d' | wc -l) - if [ "$NUMOFLINES" -gt 0 ]; then + if [[ "$NUMOFLINES" -gt 0 ]]; then notification "${NUMOFLINES} new possible takeovers found" info fi end_func "Results are saved in $domain/webs/takeover.txt" ${FUNCNAME[0]} else - if [ "$SUBTAKEOVER" = false ]; then + if [[ "$SUBTAKEOVER" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -836,21 +1096,21 @@ function subtakeover(){ fi } -function zonetransfer(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$ZONETRANSFER" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then +function zonetransfer() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$ZONETRANSFER" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Zone transfer check" - for ns in $(dig +short ns "$domain"); do dig axfr "$domain" @"$ns" >> subdomains/zonetransfer.txt; done - if [ -s "subdomains/zonetransfer.txt" ]; then - if ! grep -q "Transfer failed" subdomains/zonetransfer.txt ; then notification "Zone transfer found on ${domain}!" info; fi + for ns in $(dig +short ns "$domain"); do dig axfr "$domain" @"$ns" >>subdomains/zonetransfer.txt; done + if [[ -s "subdomains/zonetransfer.txt" ]]; then + if ! grep -q "Transfer failed" subdomains/zonetransfer.txt; then notification "Zone transfer found on ${domain}!" info; fi fi end_func "Results are saved in $domain/subdomains/zonetransfer.txt" ${FUNCNAME[0]} else - if [ "$ZONETRANSFER" = false ]; then + if [[ "$ZONETRANSFER" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [ "$ZONETRANSFER" = false ]; then + if [[ "$ZONETRANSFER" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -859,11 +1119,11 @@ function zonetransfer(){ fi } -function s3buckets(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$S3BUCKETS" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then +function s3buckets() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$S3BUCKETS" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "AWS S3 buckets search" # S3Scanner - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then [ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt else axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -874,22 +1134,22 @@ function s3buckets(){ python3 ~/Tools/cloud_enum/cloud_enum.py -k $keyword -qs -l .tmp/output_cloud.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES1=$(cat .tmp/output_cloud.txt 2>>"$LOGFILE" | sed '/^#/d' | sed '/^$/d' | anew subdomains/cloud_assets.txt | wc -l) - if [ "$NUMOFLINES1" -gt 0 ]; then + if [[ "$NUMOFLINES1" -gt 0 ]]; then notification "${NUMOFLINES1} new cloud assets found" info fi NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -aiv "not_exist" | grep -aiv "Warning:" | grep -aiv "invalid_name" | grep -aiv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l) - if [ "$NUMOFLINES2" -gt 0 ]; then + if [[ "$NUMOFLINES2" -gt 0 ]]; then notification "${NUMOFLINES2} new S3 buckets found" info fi end_func "Results are saved in subdomains/s3buckets.txt and subdomains/cloud_assets.txt" ${FUNCNAME[0]} else - if [ "$S3BUCKETS" = false ]; then + if [[ "$S3BUCKETS" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [ "$S3BUCKETS" = false ]; then + if [[ "$S3BUCKETS" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -902,27 +1162,27 @@ function s3buckets(){ ########################################### WEB DETECTION ##################################################### ############################################################################################################### -function webprobe_simple(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBPROBESIMPLE" = true ]; then +function webprobe_simple() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WEBPROBESIMPLE" = true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Http probing $domain" - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then cat subdomains/subdomains.txt | httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt 2>>"$LOGFILE" >/dev/null else axiom-scan subdomains/subdomains.txt -m httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi - cat .tmp/web_full_info.txt .tmp/web_full_info_probe.txt webs/web_full_info.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" > webs/web_full_info.txt + cat .tmp/web_full_info.txt .tmp/web_full_info_probe.txt webs/web_full_info.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" >webs/web_full_info.txt [ -s "webs/web_full_info.txt" ] && cat webs/web_full_info.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew -q .tmp/probed_tmp.txt [ -s "webs/web_full_info.txt" ] && cat webs/web_full_info.txt | jq -r 'try . |"\(.url) [\(.status_code)] [\(.title)] [\(.webserver)] \(.tech)"' | grep "$domain" | anew -q webs/web_full_info_plain.txt [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/probed_tmp.txt NUMOFLINES=$(cat .tmp/probed_tmp.txt 2>>"$LOGFILE" | anew webs/webs.txt | sed '/^$/d' | wc -l) cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt end_subfunc "${NUMOFLINES} new websites resolved" ${FUNCNAME[0]} - if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs.txt| wc -l) -le $DEEP_LIMIT2 ]]; then + if [[ "$PROXY" = true ]] && [[ -n "$proxy_url" ]] && [[ $(cat webs/webs.txt | wc -l) -le $DEEP_LIMIT2 ]]; then notification "Sending websites to proxy" info ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null fi else - if [ "$WEBPROBESIMPLE" = false ]; then + if [[ "$WEBPROBESIMPLE" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -930,24 +1190,24 @@ function webprobe_simple(){ fi } -function webprobe_full(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBPROBEFULL" = true ]; then +function webprobe_full() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WEBPROBEFULL" = true ]]; then start_func ${FUNCNAME[0]} "Http probing non standard ports" - if [ -s "subdomains/subdomains.txt" ]; then - if [ ! "$AXIOM" = true ]; then - if [ -s "subdomains/subdomains.txt" ]; then + if [[ -s "subdomains/subdomains.txt" ]]; then + if [[ ! "$AXIOM" = true ]]; then + if [[ -s "subdomains/subdomains.txt" ]]; then cat subdomains/subdomains.txt | httpx -follow-host-redirects -random-agent -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" >/dev/null fi else - if [ -s "subdomains/subdomains.txt" ]; then + if [[ -s "subdomains/subdomains.txt" ]]; then axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi fi fi [ -s ".tmp/web_full_info_uncommon.txt" ] && cat .tmp/web_full_info_uncommon.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew -q .tmp/probed_uncommon_ports_tmp.txt [ -s ".tmp/web_full_info_uncommon.txt" ] && cat .tmp/web_full_info_uncommon.txt | jq -r 'try . |"\(.url) [\(.status_code)] [\(.title)] [\(.webserver)] \(.tech)"' | anew -q webs/web_full_info_uncommon_plain.txt - if [ -s ".tmp/web_full_info_uncommon.txt" ]; then - if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + if [[ -s ".tmp/web_full_info_uncommon.txt" ]]; then + if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then cat .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" | anew -q webs/web_full_info_uncommon.txt else cat .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" | grep "$domain" | anew -q webs/web_full_info_uncommon.txt @@ -958,12 +1218,12 @@ function webprobe_full(){ [ -s "webs/webs_uncommon_ports.txt" ] && cat webs/webs_uncommon_ports.txt cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" ${FUNCNAME[0]} - if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/webs_uncommon_ports.txt| wc -l) -le $DEEP_LIMIT2 ]]; then + if [[ "$PROXY" = true ]] && [[ -n "$proxy_url" ]] && [[ $(cat webs/webs_uncommon_ports.txt | wc -l) -le $DEEP_LIMIT2 ]]; then notification "Sending websites with uncommon ports to proxy" info ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null fi else - if [ "$WEBPROBEFULL" = false ]; then + if [[ "$WEBPROBEFULL" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -971,22 +1231,22 @@ function webprobe_full(){ fi } -function screenshot(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBSCREENSHOT" = true ]; then +function screenshot() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WEBSCREENSHOT" = true ]]; then start_func ${FUNCNAME[0]} "Web Screenshots" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - - num_lines=$(wc -l < .tmp/webs_all.txt) + + num_lines=$(wc -l <.tmp/webs_all.txt) dynamic_gowitness_timeout=$(expr $num_lines \* $GOWITNESS_TIMEOUT_PER_SITE) - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then [ -s ".tmp/webs_all.txt" ] && timeout -k 1m ${dynamic_gowitness_timeout}s gowitness file -f .tmp/webs_all.txt -t $GOWITNESS_THREADS $GOWITNESS_FLAGS 2>>"$LOGFILE" else timeout -k 1m ${dynamic_gowitness_timeout}s axiom-scan .tmp/webs_all.txt -m gowitness -t $GOWITNESS_THREADS $GOWITNESS_FLAGS -o screenshots $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi end_func "Results are saved in $domain/screenshots folder" ${FUNCNAME[0]} else - if [ "$WEBSCREENSHOT" = false ]; then + if [[ "$WEBSCREENSHOT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -994,11 +1254,11 @@ function screenshot(){ fi } -function virtualhosts(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$VIRTUALHOSTS" = true ]; then +function virtualhosts() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$VIRTUALHOSTS" = true ]]; then start_func ${FUNCNAME[0]} "Virtual Hosts dicovery" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ -s ".tmp/webs_all.txt" ]; then + if [[ -s ".tmp/webs_all.txt" ]]; then mkdir -p $dir/virtualhosts $dir/.tmp/virtualhosts interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf -ac -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -H \"Host: FUZZ._cleantarget_\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_ -of json -o _output_/_cleantarget_.json" -o $dir/.tmp/virtualhosts 2>>"$LOGFILE" >/dev/null for sub in $(cat .tmp/webs_all.txt); do @@ -1011,7 +1271,7 @@ function virtualhosts(){ end_func "No $domain/web/webs.txts file found, virtualhosts skipped " ${FUNCNAME[0]} fi else - if [ "$VIRTUALHOSTS" = false ]; then + if [[ "$VIRTUALHOSTS" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1023,27 +1283,33 @@ function virtualhosts(){ ############################################# HOST SCAN ####################################################### ############################################################################################################### -function favicon(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$FAVICON" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then +function favicon() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$FAVICON" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Favicon Ip Lookup" - cd "$tools/fav-up" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "${tools}/fav-up" || { + echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } python3 favUp.py -w "$domain" -sc -o favicontest.json 2>>"$LOGFILE" >/dev/null - if [ -s "favicontest.json" ]; then - cat favicontest.json | jq -r 'try .found_ips' 2>>"$LOGFILE" | grep -v "not-found" > favicontest.txt + if [[ -s "favicontest.json" ]]; then + cat favicontest.json | jq -r 'try .found_ips' 2>>"$LOGFILE" | grep -v "not-found" >favicontest.txt sed -i "s/|/\n/g" favicontest.txt cat favicontest.txt 2>>"$LOGFILE" mv favicontest.txt $dir/hosts/favicontest.txt 2>>"$LOGFILE" rm -f favicontest.json 2>>"$LOGFILE" fi - cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$dir" || { + echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } end_func "Results are saved in hosts/favicontest.txt" ${FUNCNAME[0]} else - if [ "$FAVICON" = false ]; then + if [[ "$FAVICON" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [ "$FAVICON" = false ]; then + if [[ "$FAVICON" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1052,25 +1318,26 @@ function favicon(){ fi } -function portscan(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PORTSCANNER" = true ]; then +function portscan() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$PORTSCANNER" = true ]]; then start_func ${FUNCNAME[0]} "Port scan" if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try . | "\(.host) \(.a[0])"' | anew -q .tmp/subs_ips.txt [ -s ".tmp/subs_ips.txt" ] && awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt [ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt - else echo $domain | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt + else + echo $domain | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt fi - [ ! -s "hosts/cdn_providers.txt" ] && cat hosts/ips.txt 2>/dev/null | cdncheck -silent -resp -nc 2>/dev/null > hosts/cdn_providers.txt + [ ! -s "hosts/cdn_providers.txt" ] && cat hosts/ips.txt 2>/dev/null | cdncheck -silent -resp -nc 2>/dev/null >hosts/cdn_providers.txt [ -s "hosts/ips.txt" ] && comm -23 <(cat hosts/ips.txt | sort -u) <(cat hosts/cdn_providers.txt | cut -d'[' -f1 | sed 's/[[:space:]]*$//' | sort -u) | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u | anew -q .tmp/ips_nocdn.txt - printf "${bblue}\n Resolved IP addresses (No CDN) ${reset}\n\n"; + printf "${bblue}\n Resolved IP addresses (No CDN) ${reset}\n\n" [ -s ".tmp/ips_nocdn.txt" ] && cat .tmp/ips_nocdn.txt | sort - printf "${bblue}\n Scanning ports... ${reset}\n\n"; - if [ "$PORTSCAN_PASSIVE" = true ] && [ ! -f "hosts/portscan_passive.txt" ] && [ -s ".tmp/ips_nocdn.txt" ] ; then - smap -iL .tmp/ips_nocdn.txt > hosts/portscan_passive.txt + printf "${bblue}\n Scanning ports... ${reset}\n\n" + if [[ "$PORTSCAN_PASSIVE" = true ]] && [[ ! -f "hosts/portscan_passive.txt" ]] && [[ -s ".tmp/ips_nocdn.txt" ]]; then + smap -iL .tmp/ips_nocdn.txt >hosts/portscan_passive.txt fi - if [ "$PORTSCAN_ACTIVE" = true ]; then - if [ ! "$AXIOM" = true ]; then + if [[ "$PORTSCAN_ACTIVE" = true ]]; then + if [[ ! "$AXIOM" = true ]]; then [ -s ".tmp/ips_nocdn.txt" ] && $SUDO nmap --top-ports 200 -sV -n --max-retries 2 -Pn --open --script vulners -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx --top-ports 200 -sV -n -Pn --open --max-retries 2 --script vulners -oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1078,7 +1345,7 @@ function portscan(){ fi end_func "Results are saved in hosts/portscan_[passive|active].txt" ${FUNCNAME[0]} else - if [ "$PORTSCANNER" = false ]; then + if [[ "$PORTSCANNER" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1086,14 +1353,14 @@ function portscan(){ fi } -function cdnprovider(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CDN_IP" = true ]; then +function cdnprovider() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$CDN_IP" = true ]]; then start_func ${FUNCNAME[0]} "CDN provider check" - [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try . | .a[]' | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u > .tmp/ips_cdn.txt + [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try . | .a[]' | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u >.tmp/ips_cdn.txt [ -s ".tmp/ips_cdn.txt" ] && cat .tmp/ips_cdn.txt | cdncheck -silent -resp -nc | anew -q $dir/hosts/cdn_providers.txt end_func "Results are saved in hosts/cdn_providers.txt" ${FUNCNAME[0]} else - if [ "$CDN_IP" = false ]; then + if [[ "$CDN_IP" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1105,18 +1372,18 @@ function cdnprovider(){ ############################################# WEB SCAN ######################################################## ############################################################################################################### -function waf_checks(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WAF_DETECTION" = true ]; then +function waf_checks() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WAF_DETECTION" = true ]]; then start_func ${FUNCNAME[0]} "Website's WAF detection" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ -s ".tmp/webs_all.txt" ]; then - if [ ! "$AXIOM" = true ]; then + if [[ -s ".tmp/webs_all.txt" ]]; then + if [[ ! "$AXIOM" = true ]]; then wafw00f -i .tmp/webs_all.txt -o .tmp/wafs.txt 2>>"$LOGFILE" >/dev/null else axiom-scan .tmp/webs_all.txt -m wafw00f -o .tmp/wafs.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi - if [ -s ".tmp/wafs.txt" ]; then - cat .tmp/wafs.txt | sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' | tr -s "\t" ";" > webs/webs_wafs.txt + if [[ -s ".tmp/wafs.txt" ]]; then + cat .tmp/wafs.txt | sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' | tr -s "\t" ";" >webs/webs_wafs.txt NUMOFLINES=$(cat webs/webs_wafs.txt 2>>"$LOGFILE" | sed '/^$/d' | wc -l) notification "${NUMOFLINES} websites protected by waf" info end_func "Results are saved in $domain/webs/webs_wafs.txt" ${FUNCNAME[0]} @@ -1127,7 +1394,7 @@ function waf_checks(){ end_func "No websites to scan" ${FUNCNAME[0]} fi else - if [ "$WAF_DETECTION" = false ]; then + if [[ "$WAF_DETECTION" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1135,26 +1402,24 @@ function waf_checks(){ fi } -function nuclei_check(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$NUCLEICHECK" = true ]; then +function nuclei_check() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$NUCLEICHECK" = true ]]; then start_func ${FUNCNAME[0]} "Templates based web scanner" nuclei -update 2>>"$LOGFILE" >/dev/null mkdir -p nuclei_output [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt [ ! -s ".tmp/webs_subs.txt" ] && cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt - if [ ! "$AXIOM" = true ]; then # avoid globbing (expansion of *). - IFS=',' read -ra severity_array <<< "$NUCLEI_SEVERITY" - for crit in "${severity_array[@]}" - do + if [[ ! "$AXIOM" = true ]]; then # avoid globbing (expansion of *). + IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY" + for crit in "${severity_array[@]}"; do printf "${yellow}\n Running : Nuclei $crit ${reset}\n\n" cat .tmp/webs_subs.txt 2>/dev/null | nuclei $NUCLEI_FLAGS -severity $crit -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt done printf "\n\n" else - if [ -s ".tmp/webs_subs.txt" ]; then - IFS=',' read -ra severity_array <<< "$NUCLEI_SEVERITY" - for crit in "${severity_array[@]}" - do + if [[ -s ".tmp/webs_subs.txt" ]]; then + IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY" + for crit in "${severity_array[@]}"; do printf "${yellow}\n Running : Nuclei $crit, check results on nuclei_output folder${reset}\n\n" axiom-scan .tmp/webs_subs.txt -m nuclei --nuclei-templates ${NUCLEI_TEMPLATES_PATH} -severity ${crit} -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null [ -s "nuclei_output/${crit}.txt" ] && cat nuclei_output/${crit}.txt @@ -1164,7 +1429,7 @@ function nuclei_check(){ fi end_func "Results are saved in $domain/nuclei_output folder" ${FUNCNAME[0]} else - if [ "$NUCLEICHECK" = false ]; then + if [[ "$NUCLEICHECK" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1172,13 +1437,13 @@ function nuclei_check(){ fi } -function fuzz(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$FUZZ" = true ]; then +function fuzz() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$FUZZ" = true ]]; then start_func ${FUNCNAME[0]} "Web directory fuzzing" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ -s ".tmp/webs_all.txt" ]; then + if [[ -s ".tmp/webs_all.txt" ]]; then mkdir -p $dir/fuzzing $dir/.tmp/fuzzing - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -o _output_/_cleantarget_.json" -o $dir/.tmp/fuzzing 2>>"$LOGFILE" >/dev/null for sub in $(cat .tmp/webs_all.txt); do sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') @@ -1201,7 +1466,7 @@ function fuzz(){ end_func "No $domain/web/webs.txts file found, fuzzing skipped " ${FUNCNAME[0]} fi else - if [ "$FUZZ" = false ]; then + if [[ "$FUZZ" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1209,31 +1474,31 @@ function fuzz(){ fi } -function cms_scanner(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CMS_SCANNER" = true ]; then +function cms_scanner() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$CMS_SCANNER" = true ]]; then start_func ${FUNCNAME[0]} "CMS Scanner" mkdir -p $dir/cms && rm -rf $dir/cms/* [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ -s ".tmp/webs_all.txt" ]; then - tr '\n' ',' < .tmp/webs_all.txt > .tmp/cms.txt - timeout -k 1m ${CMSSCAN_TIMEOUT}s python3 $tools/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r 2>>"$LOGFILE" &>/dev/null + if [[ -s ".tmp/webs_all.txt" ]]; then + tr '\n' ',' <.tmp/webs_all.txt >.tmp/cms.txt 2>>"$LOGFILE" + timeout -k 1m ${CMSSCAN_TIMEOUT}s python3 ${tools}/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r &>>"$LOGFILE" exit_status=$? - if [[ $exit_status -eq 125 ]]; then - echo "TIMEOUT cmseek.py - investigate manually for $dir" >> "$LOGFILE" + if [[ "${exit_status}" -eq 125 ]]; then + echo "TIMEOUT cmseek.py - investigate manually for $dir" >>"$LOGFILE" end_func "TIMEOUT cmseek.py - investigate manually for $dir" ${FUNCNAME[0]} return - elif [[ $exit_status -ne 0 ]]; then - echo "ERROR cmseek.py - investigate manually for $dir" >> "$LOGFILE" + elif [[ "${exit_status}" -ne 0 ]]; then + echo "ERROR cmseek.py - investigate manually for $dir" >>"$LOGFILE" end_func "ERROR cmseek.py - investigate manually for $dir" ${FUNCNAME[0]} return - fi # otherwise Assume we have a successfully exited cmseek + fi # otherwise Assume we have a successfully exited cmseek for sub in $(cat .tmp/webs_all.txt); do sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') - cms_id=$(cat $tools/CMSeeK/Result/${sub_out}/cms.json 2>/dev/null | jq -r 'try .cms_id') - if [ -z "$cms_id" ]; then - rm -rf $tools/CMSeeK/Result/${sub_out} + cms_id=$(cat ${tools}/CMSeeK/Result/${sub_out}/cms.json 2>/dev/null | jq -r 'try .cms_id') + if [[ -z "$cms_id" ]]; then + rm -rf ${tools}/CMSeeK/Result/${sub_out} else - mv -f $tools/CMSeeK/Result/${sub_out} $dir/cms/ 2>>"$LOGFILE" + mv -f ${tools}/CMSeeK/Result/${sub_out} $dir/cms/ 2>>"$LOGFILE" fi done end_func "Results are saved in $domain/cms/*subdomain* folder" ${FUNCNAME[0]} @@ -1241,7 +1506,7 @@ function cms_scanner(){ end_func "No $domain/web/webs.txts file found, cms scanner skipped" ${FUNCNAME[0]} fi else - if [ "$CMS_SCANNER" = false ]; then + if [[ "$CMS_SCANNER" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1249,29 +1514,29 @@ function cms_scanner(){ fi } -function urlchecks(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$URL_CHECK" = true ]; then +function urlchecks() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$URL_CHECK" = true ]]; then start_func ${FUNCNAME[0]} "URL Extraction" mkdir -p js [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ -s ".tmp/webs_all.txt" ]; then - if [ ! "$AXIOM" = true ]; then - if [ "$URL_CHECK_PASSIVE" = true ]; then - if [ "$DEEP" = true ]; then - cat .tmp/webs_all.txt | unfurl -u domains > .tmp/waymore_input.txt - python3 $tools/waymore/waymore.py -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null + if [[ -s ".tmp/webs_all.txt" ]]; then + if [[ ! "$AXIOM" = true ]]; then + if [[ "$URL_CHECK_PASSIVE" = true ]]; then + if [[ "$DEEP" = true ]]; then + cat .tmp/webs_all.txt | unfurl -u domains >.tmp/waymore_input.txt + python3 ${tools}/waymore/waymore.py -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null else cat .tmp/webs_all.txt | gau --threads $GAU_THREADS | anew -q .tmp/url_extract_tmp.txt fi - if [ -s "${GITHUB_TOKENS}" ]; then + if [[ -s "${GITHUB_TOKENS}" ]]; then github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt fi fi diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt 2>>"$LOGFILE") <(sort -u .tmp/webs_all.txt 2>>"$LOGFILE") | wc -l) - if [ $diff_webs != "0" ] || [ ! -s ".tmp/katana.txt" ]; then - if [ "$URL_CHECK_ACTIVE" = true ]; then - if [ "$DEEP" = true ]; then + if [[ $diff_webs != "0" ]] || [[ ! -s ".tmp/katana.txt" ]]; then + if [[ "$URL_CHECK_ACTIVE" = true ]]; then + if [[ "$DEEP" = true ]]; then katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null else katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null @@ -1279,47 +1544,47 @@ function urlchecks(){ fi fi else - if [ "$URL_CHECK_PASSIVE" = true ]; then - if [ "$DEEP" = true ]; then - cat .tmp/webs_all.txt | unfurl -u domains > .tmp/waymore_input.txt + if [[ "$URL_CHECK_PASSIVE" = true ]]; then + if [[ "$DEEP" = true ]]; then + cat .tmp/webs_all.txt | unfurl -u domains >.tmp/waymore_input.txt axiom-scan .tmp/waymore_input.txt -m waymore -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else axiom-scan .tmp/webs_all.txt -m gau -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi - if [ -s "${GITHUB_TOKENS}" ]; then + fi + if [[ -s "${GITHUB_TOKENS}" ]]; then github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt fi fi diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u .tmp/webs_all.txt) | wc -l) - if [ $diff_webs != "0" ] || [ ! -s ".tmp/katana.txt" ]; then - if [ "$URL_CHECK_ACTIVE" = true ]; then - if [ "$DEEP" = true ]; then + if [[ $diff_webs != "0" ]] || [[ ! -s ".tmp/katana.txt" ]]; then + if [[ "$URL_CHECK_ACTIVE" = true ]]; then + if [[ "$DEEP" = true ]]; then axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 3 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 2 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - fi + fi fi fi fi [ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt [ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | anew -q .tmp/url_extract_tmp.txt [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep -aEi "\.(js)" | anew -q .tmp/url_extract_js.txt - if [ "$DEEP" = true ]; then - [ -s ".tmp/url_extract_js.txt" ] && interlace -tL .tmp/url_extract_js.txt -threads 10 -c "python3 $tools/JSA/jsa.py -f target | anew -q .tmp/url_extract_tmp.txt" &>/dev/null + if [[ "$DEEP" = true ]]; then + [ -s ".tmp/url_extract_js.txt" ] && interlace -tL .tmp/url_extract_js.txt -threads 10 -c "python3 ${tools}/JSA/jsa.py -f target | anew -q .tmp/url_extract_tmp.txt" &>/dev/null fi - [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt - [ -s ".tmp/url_extract_tmp2.txt" ] && cat .tmp/url_extract_tmp2.txt | python3 $tools/urless/urless/urless.py | anew -q .tmp/url_extract_uddup.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt + [ -s ".tmp/url_extract_tmp2.txt" ] && cat .tmp/url_extract_tmp2.txt | python3 ${tools}/urless/urless/urless.py | anew -q .tmp/url_extract_uddup.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | sed '/^$/d' | wc -l) notification "${NUMOFLINES} new urls with params" info end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} - if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + if [[ "$PROXY" = true ]] && [[ -n "$proxy_url" ]] && [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then notification "Sending urls to proxy" info ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null fi fi else - if [ "$URL_CHECK" = false ]; then + if [[ "$URL_CHECK" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1327,11 +1592,11 @@ function urlchecks(){ fi } -function url_gf(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$URL_GF" = true ]; then +function url_gf() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$URL_GF" = true ]]; then start_func ${FUNCNAME[0]} "Vulnerable Pattern Search" mkdir -p gf - if [ -s "webs/url_extract.txt" ]; then + if [[ -s "webs/url_extract.txt" ]]; then gf xss webs/url_extract.txt | anew -q gf/xss.txt gf ssti webs/url_extract.txt | anew -q gf/ssti.txt gf ssrf webs/url_extract.txt | anew -q gf/ssrf.txt @@ -1339,13 +1604,13 @@ function url_gf(){ gf redirect webs/url_extract.txt | anew -q gf/redirect.txt [ -s "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt gf rce webs/url_extract.txt | anew -q gf/rce.txt - gf potential webs/url_extract.txt | cut -d ':' -f3-5 |anew -q gf/potential.txt + gf potential webs/url_extract.txt | cut -d ':' -f3-5 | anew -q gf/potential.txt [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q gf/endpoints.txt gf lfi webs/url_extract.txt | anew -q gf/lfi.txt fi end_func "Results are saved in $domain/gf folder" ${FUNCNAME[0]} else - if [ "$URL_GF" = false ]; then + if [[ "$URL_GF" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1353,23 +1618,23 @@ function url_gf(){ fi } -function url_ext(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$URL_EXT" = true ]; then - if [ -s ".tmp/url_extract_tmp.txt" ]; then +function url_ext() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$URL_EXT" = true ]]; then + if [[ -s ".tmp/url_extract_tmp.txt" ]]; then start_func ${FUNCNAME[0]} "Urls by extension" ext=("7z" "achee" "action" "adr" "apk" "arj" "ascx" "asmx" "asp" "aspx" "axd" "backup" "bak" "bat" "bin" "bkf" "bkp" "bok" "cab" "cer" "cfg" "cfm" "cfml" "cgi" "cnf" "conf" "config" "cpl" "crt" "csr" "csv" "dat" "db" "dbf" "deb" "dmg" "dmp" "doc" "docx" "drv" "email" "eml" "emlx" "env" "exe" "gadget" "gz" "html" "ica" "inf" "ini" "iso" "jar" "java" "jhtml" "json" "jsp" "key" "log" "lst" "mai" "mbox" "mbx" "md" "mdb" "msg" "msi" "nsf" "ods" "oft" "old" "ora" "ost" "pac" "passwd" "pcf" "pdf" "pem" "pgp" "php" "php3" "php4" "php5" "phtm" "phtml" "pkg" "pl" "plist" "pst" "pwd" "py" "rar" "rb" "rdp" "reg" "rpm" "rtf" "sav" "sh" "shtm" "shtml" "skr" "sql" "swf" "sys" "tar" "tar.gz" "tmp" "toast" "tpl" "txt" "url" "vcd" "vcf" "wml" "wpd" "wsdl" "wsf" "xls" "xlsm" "xlsx" "xml" "xsd" "yaml" "yml" "z" "zip") #echo "" > webs/url_extract.txt for t in "${ext[@]}"; do NUMOFLINES=$(cat .tmp/url_extract_tmp.txt | grep -aEi "\.(${t})($|\/|\?)" | sort -u | sed '/^$/d' | wc -l) if [[ ${NUMOFLINES} -gt 0 ]]; then - echo -e "\n############################\n + ${t} + \n############################\n" >> webs/urls_by_ext.txt - cat .tmp/url_extract_tmp.txt | grep -aEi "\.(${t})($|\/|\?)" >> webs/urls_by_ext.txt + echo -e "\n############################\n + ${t} + \n############################\n" >>webs/urls_by_ext.txt + cat .tmp/url_extract_tmp.txt | grep -aEi "\.(${t})($|\/|\?)" >>webs/urls_by_ext.txt fi done end_func "Results are saved in $domain/webs/urls_by_ext.txt" ${FUNCNAME[0]} fi else - if [ "$URL_EXT" = false ]; then + if [[ "$URL_EXT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1377,48 +1642,48 @@ function url_ext(){ fi } -function jschecks(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$JSCHECKS" = true ]; then +function jschecks() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$JSCHECKS" = true ]]; then start_func ${FUNCNAME[0]} "Javascript Scan" - if [ -s ".tmp/url_extract_js.txt" ]; then + if [[ -s ".tmp/url_extract_js.txt" ]]; then printf "${yellow} Running : Fetching Urls 1/5${reset}\n" - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then cat .tmp/url_extract_js.txt | subjs -ua "Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" -c 40 | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subjslinks.txt else axiom-scan .tmp/url_extract_js.txt -m subjs -o .tmp/subjslinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi [ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | egrep -iv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)" | anew -q js/nojs_links.txt [ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | grep -iE "\.js($|\?)" | anew -q .tmp/url_extract_js.txt - cat .tmp/url_extract_js.txt | python3 $tools/urless/urless/urless.py | anew -q js/url_extract_js.txt 2>>"$LOGFILE" >/dev/null + cat .tmp/url_extract_js.txt | python3 ${tools}/urless/urless/urless.py | anew -q js/url_extract_js.txt 2>>"$LOGFILE" >/dev/null printf "${yellow} Running : Resolving JS Urls 2/5${reset}\n" - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then [ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -status-code -content-type -retries 2 -no-color | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt else [ -s "js/url_extract_js.txt" ] && axiom-scan js/url_extract_js.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -content-type -retries 2 -no-color -o .tmp/js_livelinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null [ -s ".tmp/js_livelinks.txt" ] && cat .tmp/js_livelinks.txt | anew .tmp/web_full_info.txt | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt fi printf "${yellow} Running : Gathering endpoints 3/5${reset}\n" - [ -s "js/js_livelinks.txt" ] && python3 $tools/xnLinkFinder/xnLinkFinder.py -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d $XNLINKFINDER_DEPTH -o .tmp/js_endpoints.txt 2>>"$LOGFILE" >/dev/null + [ -s "js/js_livelinks.txt" ] && python3 ${tools}/xnLinkFinder/xnLinkFinder.py -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d $XNLINKFINDER_DEPTH -o .tmp/js_endpoints.txt 2>>"$LOGFILE" >/dev/null [ -s "parameters.txt" ] && rm -f parameters.txt 2>>"$LOGFILE" >/dev/null - if [ -s ".tmp/js_endpoints.txt" ]; then + if [[ -s ".tmp/js_endpoints.txt" ]]; then sed -i '/^\//!d' .tmp/js_endpoints.txt cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt fi printf "${yellow} Running : Gathering secrets 4/5${reset}\n" - if [ ! "$AXIOM" = true ]; then + if [[ ! "$AXIOM" = true ]]; then [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | Mantra -ua ${HEADER} -s | anew -q js/js_secrets.txt else [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua \"${HEADER}\" -s -o js/js_secrets.txt $AXIOM_EXTRA_ARGS &>/dev/null fi [ -s "js/js_secrets.txt" ] && sed -r "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2};?)?)?[mGK]//g" -i js/js_secrets.txt printf "${yellow} Running : Building wordlist 5/5${reset}\n" - [ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "python3 $tools/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" >/dev/null + [ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "python3 ${tools}/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" >/dev/null end_func "Results are saved in $domain/js folder" ${FUNCNAME[0]} else end_func "No JS urls found for $domain, function skipped" ${FUNCNAME[0]} fi else - if [ "$JSCHECKS" = false ]; then + if [[ "$JSCHECKS" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1426,10 +1691,10 @@ function jschecks(){ fi } -function wordlist_gen(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WORDLIST" = true ]; then +function wordlist_gen() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WORDLIST" = true ]]; then start_func ${FUNCNAME[0]} "Wordlist generation" - if [ -s ".tmp/url_extract_tmp.txt" ]; then + if [[ -s ".tmp/url_extract_tmp.txt" ]]; then cat .tmp/url_extract_tmp.txt | unfurl -u keys 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_params.txt cat .tmp/url_extract_tmp.txt | unfurl -u values 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_values.txt cat .tmp/url_extract_tmp.txt | tr "[:punct:]" "\n" | anew -q webs/dict_words.txt @@ -1437,12 +1702,12 @@ function wordlist_gen(){ [ -s ".tmp/js_endpoints.txt" ] && cat .tmp/js_endpoints.txt | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q webs/all_paths.txt [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q webs/all_paths.txt end_func "Results are saved in $domain/webs/dict_[words|paths].txt" ${FUNCNAME[0]} - if [ "$PROXY" = true ] && [ -n "$proxy_url" ] && [[ $(cat webs/all_paths.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + if [[ "$PROXY" = true ]] && [[ -n "$proxy_url" ]] && [[ $(cat webs/all_paths.txt | wc -l) -le $DEEP_LIMIT2 ]]; then notification "Sending urls to proxy" info ffuf -mc all -w webs/all_paths.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null fi else - if [ "$WORDLIST" = false ]; then + if [[ "$WORDLIST" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1450,16 +1715,16 @@ function wordlist_gen(){ fi } -function wordlist_gen_roboxtractor(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$ROBOTSWORDLIST" = true ]; then +function wordlist_gen_roboxtractor() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$ROBOTSWORDLIST" = true ]]; then start_func ${FUNCNAME[0]} "Robots wordlist generation" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ -s ".tmp/webs_all.txt" ]; then + if [[ -s ".tmp/webs_all.txt" ]]; then cat .tmp/webs_all.txt | roboxtractor -m 1 -wb 2>/dev/null | anew -q webs/robots_wordlist.txt fi end_func "Results are saved in $domain/webs/robots_wordlist.txt" ${FUNCNAME[0]} else - if [ "$ROBOTSWORDLIST" = false ]; then + if [[ "$ROBOTSWORDLIST" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1467,14 +1732,14 @@ function wordlist_gen_roboxtractor(){ fi } -function password_dict(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PASSWORD_DICT" = true ]; then +function password_dict() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$PASSWORD_DICT" = true ]]; then start_func ${FUNCNAME[0]} "Password dictionary generation" word=${domain%%.*} - python3 $tools/pydictor/pydictor.py -extend $word --leet 0 1 2 11 21 --len ${PASSWORD_MIN_LENGTH} ${PASSWORD_MAX_LENGTH} -o webs/password_dict.txt 2>>"$LOGFILE" >/dev/null + python3 ${tools}/pydictor/pydictor.py -extend $word --leet 0 1 2 11 21 --len ${PASSWORD_MIN_LENGTH} ${PASSWORD_MAX_LENGTH} -o webs/password_dict.txt 2>>"$LOGFILE" >/dev/null end_func "Results are saved in $domain/webs/password_dict.txt" ${FUNCNAME[0]} else - if [ "$PASSWORD_DICT" = false ]; then + if [[ "$PASSWORD_DICT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1486,13 +1751,13 @@ function password_dict(){ ######################################### VULNERABILITIES ##################################################### ############################################################################################################### -function brokenLinks(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$BROKENLINKS" = true ] ; then +function brokenLinks() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$BROKENLINKS" = true ]]; then start_func ${FUNCNAME[0]} "Broken links checks" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ ! "$AXIOM" = true ]; then - if [ ! -s ".tmp/katana.txt" ]; then - if [ "$DEEP" = true ]; then + if [[ ! "$AXIOM" = true ]]; then + if [[ ! -s ".tmp/katana.txt" ]]; then + if [[ "$DEEP" = true ]]; then [ -s ".tmp/webs_all.txt" ] && katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/webs_all.txt" ] && katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null @@ -1500,8 +1765,8 @@ function brokenLinks(){ fi [ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt else - if [ ! -s ".tmp/katana.txt" ]; then - if [ "$DEEP" = true ]; then + if [[ ! -s ".tmp/katana.txt" ]]; then + if [[ "$DEEP" = true ]]; then [ -s ".tmp/webs_all.txt" ] && axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 3 -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/webs_all.txt" ] && axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 2 -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1514,7 +1779,7 @@ function brokenLinks(){ notification "${NUMOFLINES} new broken links found" info end_func "Results are saved in vulns/brokenLinks.txt" ${FUNCNAME[0]} else - if [ "$BROKENLINKS" = false ]; then + if [[ "$BROKENLINKS" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1522,13 +1787,13 @@ function brokenLinks(){ fi } -function xss(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$XSS" = true ] && [ -s "gf/xss.txt" ]; then +function xss() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$XSS" = true ]] && [[ -s "gf/xss.txt" ]]; then start_func ${FUNCNAME[0]} "XSS Analysis" [ -s "gf/xss.txt" ] && cat gf/xss.txt | qsreplace FUZZ | sed '/FUZZ/!d' | Gxss -c 100 -p Xss | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/xss_reflected.txt - if [ ! "$AXIOM" = true ]; then - if [ "$DEEP" = true ]; then - if [ -n "$XSS_SERVER" ]; then + if [[ ! "$AXIOM" = true ]]; then + if [[ "$DEEP" = true ]]; then + if [[ -n "$XSS_SERVER" ]]; then [ -s ".tmp/xss_reflected.txt" ] && cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --only-poc r --ignore-return 302,404,403 --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" @@ -1536,7 +1801,7 @@ function xss(){ fi else if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le $DEEP_LIMIT ]]; then - if [ -n "$XSS_SERVER" ]; then + if [[ -n "$XSS_SERVER" ]]; then cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --skip-bav --skip-mining-dom --skip-mining-dict --only-poc r --ignore-return 302,404,403 -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" @@ -1547,8 +1812,8 @@ function xss(){ fi fi else - if [ "$DEEP" = true ]; then - if [ -n "$XSS_SERVER" ]; then + if [[ "$DEEP" = true ]]; then + if [[ -n "$XSS_SERVER" ]]; then [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" @@ -1556,7 +1821,7 @@ function xss(){ fi else if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le $DEEP_LIMIT ]]; then - if [ -n "$XSS_SERVER" ]; then + if [[ -n "$XSS_SERVER" ]]; then axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" @@ -1569,24 +1834,24 @@ function xss(){ fi end_func "Results are saved in vulns/xss.txt" ${FUNCNAME[0]} else - if [ "$XSS" = false ]; then + if [[ "$XSS" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - elif [ ! -s "gf/xss.txt" ]; then - printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to XSS ${reset}\n\n" + elif [[ ! -s "gf/xss.txt" ]]; then + printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to XSS ${reset}\n\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } -function cors(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CORS" = true ]; then +function cors() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$CORS" = true ]]; then start_func ${FUNCNAME[0]} "CORS Scan" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - [ -s ".tmp/webs_all.txt" ] && python3 $tools/Corsy/corsy.py -i .tmp/webs_all.txt -o vulns/cors.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/webs_all.txt" ] && python3 ${tools}/Corsy/corsy.py -i .tmp/webs_all.txt -o vulns/cors.txt 2>>"$LOGFILE" >/dev/null end_func "Results are saved in vulns/cors.txt" ${FUNCNAME[0]} else - if [ "$CORS" = false ]; then + if [[ "$CORS" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1594,12 +1859,12 @@ function cors(){ fi } -function open_redirect(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$OPEN_REDIRECT" = true ] && [ -s "gf/redirect.txt" ]; then +function open_redirect() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$OPEN_REDIRECT" = true ]] && [[ -s "gf/redirect.txt" ]]; then start_func ${FUNCNAME[0]} "Open redirects checks" - if [ "$DEEP" = true ] || [[ $(cat gf/redirect.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ "$DEEP" = true ]] || [[ $(cat gf/redirect.txt | wc -l) -le $DEEP_LIMIT ]]; then cat gf/redirect.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_redirect.txt - python3 $tools/Oralyzer/oralyzer.py -l .tmp/tmp_redirect.txt -p $tools/Oralyzer/payloads.txt > vulns/redirect.txt + python3 ${tools}/Oralyzer/oralyzer.py -l .tmp/tmp_redirect.txt -p ${tools}/Oralyzer/payloads.txt >vulns/redirect.txt sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" vulns/redirect.txt end_func "Results are saved in vulns/redirect.txt" ${FUNCNAME[0]} else @@ -1607,9 +1872,9 @@ function open_redirect(){ printf "${bgreen}#######################################################################${reset}\n" fi else - if [ "$OPEN_REDIRECT" = false ]; then + if [[ "$OPEN_REDIRECT" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - elif [ ! -s "gf/redirect.txt" ]; then + elif [[ ! -s "gf/redirect.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to Open Redirect ${reset}\n\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1617,10 +1882,10 @@ function open_redirect(){ fi } -function ssrf_checks(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SSRF_CHECKS" = true ] && [ -s "gf/ssrf.txt" ]; then +function ssrf_checks() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SSRF_CHECKS" = true ]] && [[ -s "gf/ssrf.txt" ]]; then start_func ${FUNCNAME[0]} "SSRF checks" - if [ -z "$COLLAB_SERVER" ]; then + if [[ -z "$COLLAB_SERVER" ]]; then interactsh-client &>.tmp/ssrf_callback.txt & sleep 2 COLLAB_SERVER_FIX="FFUFHASH.$(cat .tmp/ssrf_callback.txt | tail -n1 | cut -c 16-)" @@ -1630,12 +1895,12 @@ function ssrf_checks(){ COLLAB_SERVER_FIX="FFUFHASH.$(echo ${COLLAB_SERVER} | sed -r "s/https?:\/\///")" INTERACT=false fi - if [ "$DEEP" = true ] || [[ $(cat gf/ssrf.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ "$DEEP" = true ]] || [[ $(cat gf/ssrf.txt | wc -l) -le $DEEP_LIMIT ]]; then cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_FIX} | anew -q .tmp/tmp_ssrf.txt cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_URL} | anew -q .tmp/tmp_ssrf.txt ffuf -v -H "${HEADER}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -w .tmp/tmp_ssrf.txt -u FUZZ 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf_requested_url.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -u W1 2>/dev/null | anew -q vulns/ssrf_requested_headers.txt - ffuf -v -w .tmp/tmp_ssrf.txt:W1,$tools/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -u W1 2>/dev/null | anew -q vulns/ssrf_requested_headers.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,${tools}/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -u W1 2>/dev/null | anew -q vulns/ssrf_requested_headers.txt + ffuf -v -w .tmp/tmp_ssrf.txt:W1,${tools}/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -u W1 2>/dev/null | anew -q vulns/ssrf_requested_headers.txt sleep 5 [ -s ".tmp/ssrf_callback.txt" ] && cat .tmp/ssrf_callback.txt | tail -n+11 | anew -q vulns/ssrf_callback.txt && NUMOFLINES=$(cat .tmp/ssrf_callback.txt | tail -n+12 | sed '/^$/d' | wc -l) [ "$INTERACT" = true ] && notification "SSRF: ${NUMOFLINES} callbacks received" info @@ -1645,28 +1910,28 @@ function ssrf_checks(){ fi pkill -f interactsh-client & else - if [ "$SSRF_CHECKS" = false ]; then + if [[ "$SSRF_CHECKS" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - elif [ ! -s "gf/ssrf.txt" ]; then - printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to SSRF ${reset}\n\n" + elif [[ ! -s "gf/ssrf.txt" ]]; then + printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to SSRF ${reset}\n\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi } -function crlf_checks(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$CRLF_CHECKS" = true ]; then +function crlf_checks() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$CRLF_CHECKS" = true ]]; then start_func ${FUNCNAME[0]} "CRLF checks" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ "$DEEP" = true ] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ "$DEEP" = true ]] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then crlfuzz -l .tmp/webs_all.txt -o vulns/crlf.txt 2>>"$LOGFILE" >/dev/null end_func "Results are saved in vulns/crlf.txt" ${FUNCNAME[0]} else end_func "Skipping CRLF: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [ "$CRLF_CHECKS" = false ]; then + if [[ "$CRLF_CHECKS" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1674,12 +1939,12 @@ function crlf_checks(){ fi } -function lfi(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$LFI" = true ] && [ -s "gf/lfi.txt" ]; then +function lfi() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$LFI" = true ]] && [[ -s "gf/lfi.txt" ]]; then start_func ${FUNCNAME[0]} "LFI checks" - if [ -s "gf/lfi.txt" ]; then + if [[ -s "gf/lfi.txt" ]]; then cat gf/lfi.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_lfi.txt - if [ "$DEEP" = true ] || [[ $(cat .tmp/tmp_lfi.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ "$DEEP" = true ]] || [[ $(cat .tmp/tmp_lfi.txt | wc -l) -le $DEEP_LIMIT ]]; then interlace -tL .tmp/tmp_lfi.txt -threads ${INTERLACE_THREADS} -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${lfi_wordlist} -u \"_target_\" -mr \"root:\" " 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt end_func "Results are saved in vulns/lfi.txt" ${FUNCNAME[0]} else @@ -1687,9 +1952,9 @@ function lfi(){ fi fi else - if [ "$LFI" = false ]; then + if [[ "$LFI" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - elif [ ! -s "gf/lfi.txt" ]; then + elif [[ ! -s "gf/lfi.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to LFI ${reset}\n\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1697,12 +1962,12 @@ function lfi(){ fi } -function ssti(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SSTI" = true ] && [ -s "gf/ssti.txt" ]; then +function ssti() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SSTI" = true ]] && [[ -s "gf/ssti.txt" ]]; then start_func ${FUNCNAME[0]} "SSTI checks" - if [ -s "gf/ssti.txt" ]; then - cat gf/ssti.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_ssti.txt - if [ "$DEEP" = true ] || [[ $(cat .tmp/tmp_ssti.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ -s "gf/ssti.txt" ]]; then + cat gf/ssti.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_ssti.txt + if [[ "$DEEP" = true ]] || [[ $(cat .tmp/tmp_ssti.txt | wc -l) -le $DEEP_LIMIT ]]; then interlace -tL .tmp/tmp_ssti.txt -threads ${INTERLACE_THREADS} -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${ssti_wordlist} -u \"_target_\" -mr \"ssti49\" " 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt end_func "Results are saved in vulns/ssti.txt" ${FUNCNAME[0]} else @@ -1710,9 +1975,9 @@ function ssti(){ fi fi else - if [ "$SSTI" = false ]; then + if [[ "$SSTI" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - elif [ ! -s "gf/ssti.txt" ]; then + elif [[ ! -s "gf/ssti.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to SSTI ${reset}\n\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1720,16 +1985,16 @@ function ssti(){ fi } -function sqli(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SQLI" = true ] && [ -s "gf/sqli.txt" ]; then +function sqli() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SQLI" = true ]] && [[ -s "gf/sqli.txt" ]]; then start_func ${FUNCNAME[0]} "SQLi checks" cat gf/sqli.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_sqli.txt - if [ "$DEEP" = true ] || [[ $(cat .tmp/tmp_sqli.txt | wc -l) -le $DEEP_LIMIT ]]; then - if [ "$SQLMAP" = true ];then - python3 $tools/sqlmap/sqlmap.py -m .tmp/tmp_sqli.txt -b -o --smart --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap 2>>"$LOGFILE" >/dev/null + if [[ "$DEEP" = true ]] || [[ $(cat .tmp/tmp_sqli.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ "$SQLMAP" = true ]]; then + python3 ${tools}/sqlmap/sqlmap.py -m .tmp/tmp_sqli.txt -b -o --smart --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap 2>>"$LOGFILE" >/dev/null fi - if [ "$GHAURI" = true ];then + if [[ "$GHAURI" = true ]]; then interlace -tL .tmp/tmp_sqli.txt -threads ${INTERLACE_THREADS} -c "ghauri -u _target_ --batch -H \"${HEADER}\" --force-ssl >> vulns/ghauri_log.txt" 2>>"$LOGFILE" >/dev/null fi end_func "Results are saved in vulns/sqlmap folder" ${FUNCNAME[0]} @@ -1737,9 +2002,9 @@ function sqli(){ end_func "Skipping SQLi: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [ "$SQLI" = false ]; then + if [[ "$SQLI" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - elif [ ! -s "gf/sqli.txt" ]; then + elif [[ ! -s "gf/sqli.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to SQLi ${reset}\n\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1747,13 +2012,13 @@ function sqli(){ fi } -function test_ssl(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$TEST_SSL" = true ]; then +function test_ssl() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$TEST_SSL" = true ]]; then start_func ${FUNCNAME[0]} "SSL Test" - $tools/testssl.sh/testssl.sh --quiet --color 0 -U -iL hosts/ips.txt 2>>"$LOGFILE" > vulns/testssl.txt + ${tools}/testssl.sh/testssl.sh --quiet --color 0 -U -iL hosts/ips.txt 2>>"$LOGFILE" >vulns/testssl.txt end_func "Results are saved in vulns/testssl.txt" ${FUNCNAME[0]} else - if [ "$TEST_SSL" = false ]; then + if [[ "$TEST_SSL" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1761,15 +2026,21 @@ function test_ssl(){ fi } -function spraying(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SPRAY" = true ]; then +function spraying() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SPRAY" = true ]]; then start_func ${FUNCNAME[0]} "Password spraying" - cd "$tools/brutespray" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "${tools}/brutespray" || { + echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } python3 brutespray.py --file $dir/hosts/portscan_active.gnmap --threads $BRUTESPRAY_THREADS --hosts $BRUTESPRAY_CONCURRENCE -o $dir/vulns/brutespray 2>>"$LOGFILE" >/dev/null - cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$dir" || { + echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } end_func "Results are saved in vulns/brutespray folder" ${FUNCNAME[0]} else - if [ "$SPRAY" = false ]; then + if [[ "$SPRAY" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1777,20 +2048,20 @@ function spraying(){ fi } -function command_injection(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$COMM_INJ" = true ] && [ -s "gf/rce.txt" ]; then +function command_injection() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$COMM_INJ" = true ]] && [[ -s "gf/rce.txt" ]]; then start_func ${FUNCNAME[0]} "Command Injection checks" - [ -s "gf/rce.txt" ] && cat gf/rce.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_rce.txt - if [ "$DEEP" = true ] || [[ $(cat .tmp/tmp_rce.txt | wc -l) -le $DEEP_LIMIT ]]; then - [ -s ".tmp/tmp_rce.txt" ] && python3 $tools/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection.txt 2>>"$LOGFILE" >/dev/null + [ -s "gf/rce.txt" ] && cat gf/rce.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_rce.txt + if [[ "$DEEP" = true ]] || [[ $(cat .tmp/tmp_rce.txt | wc -l) -le $DEEP_LIMIT ]]; then + [ -s ".tmp/tmp_rce.txt" ] && python3 ${tools}/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection.txt 2>>"$LOGFILE" >/dev/null end_func "Results are saved in vulns/command_injection folder" ${FUNCNAME[0]} else end_func "Skipping Command injection: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [ "$COMM_INJ" = false ]; then + if [[ "$COMM_INJ" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - elif [ ! -s "gf/rce.txt" ]; then + elif [[ ! -s "gf/rce.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to Command Injection ${reset}\n\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1798,21 +2069,27 @@ function command_injection(){ fi } -function 4xxbypass(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$BYPASSER4XX" = true ]; then - if [[ $(cat fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 | wc -l) -le 1000 ]] || [ "$DEEP" = true ]; then +function 4xxbypass() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$BYPASSER4XX" = true ]]; then + if [[ $(cat fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 | wc -l) -le 1000 ]] || [[ "$DEEP" = true ]]; then start_func "403 bypass" - cat $dir/fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 > $dir/.tmp/403test.txt - cd "$tools/byp4xx" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - byp4xx -threads $BYP4XX_THREADS $dir/.tmp/403test.txt > $dir/.tmp/byp4xx.txt - cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cat $dir/fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 >$dir/.tmp/403test.txt + cd "${tools}/byp4xx" || { + echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + byp4xx -threads $BYP4XX_THREADS $dir/.tmp/403test.txt >$dir/.tmp/byp4xx.txt + cd "$dir" || { + echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } [ -s ".tmp/byp4xx.txt" ] && cat .tmp/byp4xx.txt | anew -q vulns/byp4xx.txt end_func "Results are saved in vulns/byp4xx.txt" ${FUNCNAME[0]} else notification "Too many urls to bypass, skipping" warn fi else - if [ "$BYPASSER4XX" = false ]; then + if [[ "$BYPASSER4XX" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1820,10 +2097,10 @@ function 4xxbypass(){ fi } -function prototype_pollution(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$PROTO_POLLUTION" = true ] ; then +function prototype_pollution() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$PROTO_POLLUTION" = true ]]; then start_func ${FUNCNAME[0]} "Prototype Pollution checks" - if [ "$DEEP" = true ] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ "$DEEP" = true ]] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT ]]; then [ -s "webs/url_extract.txt" ] && ppfuzz -l webs/url_extract.txt -c $PPFUZZ_THREADS 2>/dev/null | anew -q .tmp/prototype_pollution.txt [ -s ".tmp/prototype_pollution.txt" ] && cat .tmp/prototype_pollution.txt | sed -e '1,8d' | sed '/^\[ERR/d' | anew -q vulns/prototype_pollution.txt end_func "Results are saved in vulns/prototype_pollution.txt" ${FUNCNAME[0]} @@ -1831,7 +2108,7 @@ function prototype_pollution(){ end_func "Skipping Prototype Pollution: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [ "$PROTO_POLLUTION" = false ]; then + if [[ "$PROTO_POLLUTION" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1839,21 +2116,27 @@ function prototype_pollution(){ fi } -function smuggling(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$SMUGGLING" = true ] ; then +function smuggling() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SMUGGLING" = true ]]; then start_func ${FUNCNAME[0]} "HTTP Request Smuggling checks" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ "$DEEP" = true ] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then - cd "$tools/smuggler" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + if [[ "$DEEP" = true ]] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then + cd "${tools}/smuggler" || { + echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } cat $dir/.tmp/webs_all.txt | python3 smuggler.py -q --no-color 2>/dev/null | anew -q $dir/.tmp/smuggling.txt - cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$dir" || { + echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } [ -s ".tmp/smuggling.txt" ] && cat .tmp/smuggling.txt | anew -q vulns/smuggling.txt end_func "Results are saved in vulns/smuggling.txt" ${FUNCNAME[0]} else end_func "Skipping Prototype Pollution: Too many webs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [ "$SMUGGLING" = false ]; then + if [[ "$SMUGGLING" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1861,21 +2144,27 @@ function smuggling(){ fi } -function webcache(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$WEBCACHE" = true ] ; then +function webcache() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WEBCACHE" = true ]]; then start_func ${FUNCNAME[0]} "Web Cache Poisoning checks" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [ "$DEEP" = true ] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then - cd "$tools/Web-Cache-Vulnerability-Scanner" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + if [[ "$DEEP" = true ]] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then + cd "${tools}/Web-Cache-Vulnerability-Scanner" || { + echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } Web-Cache-Vulnerability-Scanner -u file:$dir/.tmp/webs_all.txt -v 0 2>/dev/null | anew -q $dir/.tmp/webcache.txt - cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$dir" || { + echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } [ -s ".tmp/webcache.txt" ] && cat .tmp/webcache.txt | anew -q vulns/webcache.txt end_func "Results are saved in vulns/webcache.txt" ${FUNCNAME[0]} else end_func "Web Cache Poisoning: Too many webs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [ "$WEBCACHE" = false ]; then + if [[ "$WEBCACHE" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1883,14 +2172,14 @@ function webcache(){ fi } -function fuzzparams(){ - if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$FUZZPARAMS" = true ] ; then +function fuzzparams() { + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$FUZZPARAMS" = true ]]; then start_func ${FUNCNAME[0]} "Fuzzing params values checks" - if [ "$DEEP" = true ] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then - if [ ! "$AXIOM" = true ]; then + if [[ "$DEEP" = true ]] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + if [[ ! "$AXIOM" = true ]]; then nuclei -update 2>>"$LOGFILE" >/dev/null - git -C $tools/fuzzing-templates pull - cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t $tools/fuzzing-templates -o .tmp/fuzzparams.txt + git -C ${tools}/fuzzing-templates pull + cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t ${tools}/fuzzing-templates -o .tmp/fuzzparams.txt else axiom-exec "git clone https://github.com/projectdiscovery/fuzzing-templates /home/op/fuzzing-templates" &>/dev/null axiom-scan webs/url_extract.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -rl $NUCLEI_RATELIMIT -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1901,7 +2190,7 @@ function fuzzparams(){ end_func "Fuzzing params values: Too many entries to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [ "$FUZZPARAMS" = false ]; then + if [[ "$FUZZPARAMS" = false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1913,12 +2202,10 @@ function fuzzparams(){ ########################################## OPTIONS & MGMT ##################################################### ############################################################################################################### -function deleteOutScoped(){ - if [ -s "$1" ]; then - cat $1 | while read outscoped - do - if grep -q "^[*]" <<< $outscoped - then +function deleteOutScoped() { + if [[ -s "$1" ]]; then + cat $1 | while read outscoped; do + if grep -q "^[*]" <<<$outscoped; then outscoped="${outscoped:1}" sed -i /"$outscoped$"/d $2 else @@ -1931,13 +2218,13 @@ function deleteOutScoped(){ function getElapsedTime { runtime="" local T=$2-$1 - local D=$((T/60/60/24)) - local H=$((T/60/60%24)) - local M=$((T/60%60)) - local S=$((T%60)) - (( $D > 0 )) && runtime="$runtime$D days, " - (( $H > 0 )) && runtime="$runtime$H hours, " - (( $M > 0 )) && runtime="$runtime$M minutes, " + local D=$((T / 60 / 60 / 24)) + local H=$((T / 60 / 60 % 24)) + local M=$((T / 60 % 60)) + local S=$((T % 60)) + ((D > 0)) && runtime="$runtime$D days, " + ((H > 0)) && runtime="$runtime$H hours, " + ((M > 0)) && runtime="$runtime$M minutes, " runtime="$runtime$S seconds." } @@ -1947,7 +2234,7 @@ function zipSnedOutputFolder { (cd "$dir" && zip -r "$zip_name" .) echo "Sending zip file "${dir}/${zip_name}"" - if [ -s "${dir}/$zip_name" ]; then + if [[ -s "${dir}/$zip_name" ]]; then sendToNotify "$dir/$zip_name" rm -f "${dir}/$zip_name" else @@ -1956,67 +2243,67 @@ function zipSnedOutputFolder { } function isAsciiText { - IS_ASCII="False"; + IS_ASCII="False" if [[ $(file $1 | grep -o 'ASCII text$') == "ASCII text" ]]; then - IS_ASCII="True"; + IS_ASCII="True" else - IS_ASCII="False"; + IS_ASCII="False" fi } -function output(){ +function output() { mkdir -p $dir_output cp -r $dir $dir_output [[ "$(dirname $dir)" != "$dir_output" ]] && rm -rf "$dir" } -function remove_big_files(){ +function remove_big_files() { eval rm -rf .tmp/gotator*.txt 2>>"$LOGFILE" eval rm -rf .tmp/brute_recursive_wordlist.txt 2>>"$LOGFILE" - eval rm -rf .tmp/subs_dns_tko.txt 2>>"$LOGFILE" + eval rm -rf .tmp/subs_dns_tko.txt 2>>"$LOGFILE" eval rm -rf .tmp/subs_no_resolved.txt .tmp/subdomains_dns.txt .tmp/brute_dns_tko.txt .tmp/scrap_subs.txt .tmp/analytics_subs_clean.txt .tmp/gotator1.txt .tmp/gotator2.txt .tmp/passive_recursive.txt .tmp/brute_recursive_wordlist.txt .tmp/gotator1_recursive.txt .tmp/gotator2_recursive.txt 2>>"$LOGFILE" eval find .tmp -type f -size +200M -exec rm -f {} + 2>>"$LOGFILE" } -function notification(){ - if [ -n "$1" ] && [ -n "$2" ]; then +function notification() { + if [[ -n "$1" ]] && [[ -n "$2" ]]; then case $2 in - info) - text="\n${bblue} ${1} ${reset}" - printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY + info) + text="\n${bblue} ${1} ${reset}" + printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY ;; - warn) - text="\n${yellow} ${1} ${reset}" - printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY + warn) + text="\n${yellow} ${1} ${reset}" + printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY ;; - error) - text="\n${bred} ${1} ${reset}" - printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY + error) + text="\n${bred} ${1} ${reset}" + printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY ;; - good) - text="\n${bgreen} ${1} ${reset}" - printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY + good) + text="\n${bgreen} ${1} ${reset}" + printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY ;; esac fi } -function transfer { - if [ $# -eq 0 ]; then - echo "No arguments specified.\nUsage:\n transfer \n ... | transfer ">&2 +function transfer { + if [[ $# -eq 0 ]]; then + echo "No arguments specified.\nUsage:\n transfer \n ... | transfer " >&2 return 1 fi - if tty -s; then + if tty -s; then file="$1" file_name=$(basename "$file") - if [ ! -e "$file" ]; then - echo "$file: No such file or directory">&2 + if [[ ! -e "$file" ]]; then + echo "$file: No such file or directory" >&2 return 1 fi - if [ -d "$file" ]; then + if [[ -d "$file" ]]; then file_name="$file_name.zip" - (cd "$file"&&zip -r -q - .) | curl --progress-bar --upload-file "-" "https://transfer.sh/$file_name" | tee /dev/null - else + (cd "$file" && zip -r -q - .) | curl --progress-bar --upload-file "-" "https://transfer.sh/$file_name" | tee /dev/null + else cat "$file" | curl --progress-bar --upload-file "-" "https://transfer.sh/$file_name" | tee /dev/null fi else @@ -2026,121 +2313,121 @@ function transfer { } function sendToNotify { - if [[ -z "$1" ]]; then + if [[ -z $1 ]]; then printf "\n${yellow} no file provided to send ${reset}\n" else - if [[ -z "$NOTIFY_CONFIG" ]]; then + if [[ -z $NOTIFY_CONFIG ]]; then NOTIFY_CONFIG=~/.config/notify/provider-config.yaml fi - if [ -n "$(find "${1}" -prune -size +8000000c)" ]; then - printf '%s is larger than 8MB, sending over transfer.sh\n' "${1}" + if [[ -n "$(find "${1}" -prune -size +8000000c)" ]]; then + printf '%s is larger than 8MB, sending over transfer.sh\n' "${1}" transfer "${1}" | notify return 0 fi - if grep -q '^ telegram\|^telegram\|^ telegram' $NOTIFY_CONFIG ; then + if grep -q '^ telegram\|^telegram\|^ telegram' $NOTIFY_CONFIG; then notification "Sending ${domain} data over Telegram" info telegram_chat_id=$(cat ${NOTIFY_CONFIG} | grep '^ telegram_chat_id\|^telegram_chat_id\|^ telegram_chat_id' | xargs | cut -d' ' -f2) - telegram_key=$(cat ${NOTIFY_CONFIG} | grep '^ telegram_api_key\|^telegram_api_key\|^ telegram_apikey' | xargs | cut -d' ' -f2 ) + telegram_key=$(cat ${NOTIFY_CONFIG} | grep '^ telegram_api_key\|^telegram_api_key\|^ telegram_apikey' | xargs | cut -d' ' -f2) curl -F document=@${1} "https://api.telegram.org/bot${telegram_key}/sendDocument?chat_id=${telegram_chat_id}" 2>>"$LOGFILE" >/dev/null fi - if grep -q '^ discord\|^discord\|^ discord' $NOTIFY_CONFIG ; then + if grep -q '^ discord\|^discord\|^ discord' $NOTIFY_CONFIG; then notification "Sending ${domain} data over Discord" info discord_url=$(cat ${NOTIFY_CONFIG} | grep '^ discord_webhook_url\|^discord_webhook_url\|^ discord_webhook_url' | xargs | cut -d' ' -f2) curl -v -i -H "Accept: application/json" -H "Content-Type: multipart/form-data" -X POST -F file1=@${1} $discord_url 2>>"$LOGFILE" >/dev/null fi - if [[ -n "$slack_channel" ]] && [[ -n "$slack_auth" ]]; then + if [[ -n $slack_channel ]] && [[ -n $slack_auth ]]; then notification "Sending ${domain} data over Slack" info curl -F file=@${1} -F "initial_comment=reconftw zip file" -F channels=${slack_channel} -H "Authorization: Bearer ${slack_auth}" https://slack.com/api/files.upload 2>>"$LOGFILE" >/dev/null fi fi } -function start_func(){ +function start_func() { printf "${bgreen}#######################################################################" notification "${2}" info - echo "[ $(date +"%F %T") ] Start function : ${1} " >> "${LOGFILE}" + echo "[ $(date +"%F %T") ] Start function : ${1} " >>"${LOGFILE}" start=$(date +%s) } -function end_func(){ +function end_func() { touch $called_fn_dir/.${2} end=$(date +%s) getElapsedTime $start $end notification "${2} Finished in ${runtime}" info - echo "[ $(date +"%F %T") ] End function : ${2} " >> "${LOGFILE}" + echo "[ $(date +"%F %T") ] End function : ${2} " >>"${LOGFILE}" printf "${bblue} ${1} ${reset}\n" printf "${bgreen}#######################################################################${reset}\n" } -function start_subfunc(){ +function start_subfunc() { notification "${2}" warn - echo "[ $(date +"%F %T") ] Start subfunction : ${1} " >> "${LOGFILE}" + echo "[ $(date +"%F %T") ] Start subfunction : ${1} " >>"${LOGFILE}" start_sub=$(date +%s) } -function end_subfunc(){ +function end_subfunc() { touch $called_fn_dir/.${2} end_sub=$(date +%s) getElapsedTime $start_sub $end_sub notification "${1} in ${runtime}" good - echo "[ $(date +"%F %T") ] End subfunction : ${1} " >> "${LOGFILE}" + echo "[ $(date +"%F %T") ] End subfunction : ${1} " >>"${LOGFILE}" } -function check_inscope(){ - cat $1 | inscope > $1_tmp && cp $1_tmp $1 && rm -f $1_tmp +function check_inscope() { + cat $1 | inscope >$1_tmp && cp $1_tmp $1 && rm -f $1_tmp } -function resolvers_update(){ - if [ "$generate_resolvers" = true ]; then - if [ ! "$AXIOM" = true ]; then - if [ ! -s "$resolvers" ] || [[ $(find "$resolvers" -mtime +1 -print) ]] ; then +function resolvers_update() { + if [[ "$generate_resolvers" = true ]]; then + if [[ ! "$AXIOM" = true ]]; then + if [[ ! -s "$resolvers" ]] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then notification "Resolvers seem older than 1 day\n Generating custom resolvers..." warn eval rm -f $resolvers 2>>"$LOGFILE" dnsvalidator -tL https://public-dns.info/nameservers.txt -threads $DNSVALIDATOR_THREADS -o $resolvers 2>>"$LOGFILE" >/dev/null dnsvalidator -tL https://raw.githubusercontent.com/blechschmidt/massdns/master/lists/resolvers.txt -threads $DNSVALIDATOR_THREADS -o tmp_resolvers 2>>"$LOGFILE" >/dev/null [ -s "tmp_resolvers" ] && cat tmp_resolvers | anew -q $resolvers [ -s "tmp_resolvers" ] && rm -f tmp_resolvers 2>>"$LOGFILE" >/dev/null - [ ! -s "$resolvers" ] && wget -q -O - ${resolvers_url} > $resolvers - [ ! -s "$resolvers_trusted" ] && wget -q -O - ${resolvers_trusted_url} > $resolvers_trusted + [ ! -s "$resolvers" ] && wget -q -O - ${resolvers_url} >$resolvers + [ ! -s "$resolvers_trusted" ] && wget -q -O - ${resolvers_trusted_url} >$resolvers_trusted notification "Updated\n" good - fi + fi else notification "Checking resolvers lists...\n Accurate resolvers are the key to great results\n This may take around 10 minutes if it's not updated" warn # shellcheck disable=SC2016 - axiom-exec 'if [ $(find "/home/op/lists/resolvers.txt" -mtime +1 -print) ] || [ $(cat /home/op/lists/resolvers.txt | wc -l) -le 40 ] ; then dnsvalidator -tL https://public-dns.info/nameservers.txt -threads 200 -o /home/op/lists/resolvers.txt ; fi' &>/dev/null + axiom-exec 'if [[ $(find "/home/op/lists/resolvers.txt" -mtime +1 -print) ]] || [[ $(cat /home/op/lists/resolvers.txt | wc -l) -le 40 ] ; then dnsvalidator -tL https://public-dns.info/nameservers.txt -threads 200 -o /home/op/lists/resolvers.txt ; fi' &>/dev/null axiom-exec "wget -q -O - ${resolvers_url} > /home/op/lists/resolvers.txt" 2>>"$LOGFILE" >/dev/null axiom-exec "wget -q -O - ${resolvers_trusted_url} > /home/op/lists/resolvers_trusted.txt" 2>>"$LOGFILE" >/dev/null notification "Updated\n" good fi generate_resolvers=false else - - if [ ! -s "$resolvers" ] || [[ $(find "$resolvers" -mtime +1 -print) ]] ; then + + if [[ ! -s "$resolvers" ]] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then notification "Resolvers seem older than 1 day\n Downloading new resolvers..." warn - wget -q -O - ${resolvers_url} > $resolvers - wget -q -O - ${resolvers_trusted_url} > $resolvers_trusted + wget -q -O - ${resolvers_url} >$resolvers + wget -q -O - ${resolvers_trusted_url} >$resolvers_trusted notification "Resolvers updated\n" good fi fi } -function resolvers_update_quick_local(){ - if [ "$update_resolvers" = true ]; then - wget -q -O - ${resolvers_url} > $resolvers - wget -q -O - ${resolvers_trusted_url} > $resolvers_trusted +function resolvers_update_quick_local() { + if [[ "$update_resolvers" = true ]]; then + wget -q -O - ${resolvers_url} >$resolvers + wget -q -O - ${resolvers_trusted_url} >$resolvers_trusted fi } -function resolvers_update_quick_axiom(){ +function resolvers_update_quick_axiom() { axiom-exec "wget -q -O - ${resolvers_url} > /home/op/lists/resolvers.txt" 2>>"$LOGFILE" >/dev/null axiom-exec "wget -q -O - ${resolvers_trusted_url} > /home/op/lists/resolvers_trusted.txt" 2>>"$LOGFILE" >/dev/null } -function ipcidr_target(){ +function ipcidr_target() { IP_CIDR_REGEX='(((25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?))(\/([8-9]|[1-2][0-9]|3[0-2]))([^0-9.]|$)|(((25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)$)' if [[ $1 =~ ^$IP_CIDR_REGEX ]]; then echo $1 | mapcidr -silent | anew -q target_reconftw_ipcidr.txt - if [ -s "./target_reconftw_ipcidr.txt" ]; then + if [[ -s "./target_reconftw_ipcidr.txt" ]]; then [ "$REVERSE_IP" = true ] && cat ./target_reconftw_ipcidr.txt | hakip2host | cut -d' ' -f 3 | unfurl -u domains 2>/dev/null | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | anew -q ./target_reconftw_ipcidr.txt if [[ $(cat ./target_reconftw_ipcidr.txt | wc -l) -eq 1 ]]; then domain=$(cat ./target_reconftw_ipcidr.txt) @@ -2149,16 +2436,16 @@ function ipcidr_target(){ list=${PWD}/target_reconftw_ipcidr.txt fi fi - if [ -n "$2" ]; then + if [[ -n "$2" ]]; then cat $list | anew -q $2 sed -i '/\/[0-9]*$/d' $2 fi fi } -function axiom_lauch(){ +function axiom_lauch() { # let's fire up a FLEET! - if [ "$AXIOM_FLEET_LAUNCH" = true ] && [ -n "$AXIOM_FLEET_NAME" ] && [ -n "$AXIOM_FLEET_COUNT" ]; then + if [[ "$AXIOM_FLEET_LAUNCH" = true ]] && [[ -n "$AXIOM_FLEET_NAME" ]] && [[ -n "$AXIOM_FLEET_COUNT" ]]; then start_func ${FUNCNAME[0]} "Launching our Axiom fleet" python3 -m pip install --upgrade linode-cli 2>>"$LOGFILE" >/dev/null # Check to see if we have a fleet already, if so, SKIP THIS! @@ -2170,7 +2457,7 @@ function axiom_lauch(){ if [[ $NUMOFNODES -eq 0 ]]; then startcount=$AXIOM_FLEET_COUNT else - startcount=$((AXIOM_FLEET_COUNT-NUMOFNODES)) + startcount=$((AXIOM_FLEET_COUNT - NUMOFNODES)) fi AXIOM_ARGS=" -i $startcount" # Temporarily disabled multiple axiom regions @@ -2179,21 +2466,21 @@ function axiom_lauch(){ echo "axiom-fleet ${AXIOM_FLEET_NAME} ${AXIOM_ARGS}" axiom-fleet ${AXIOM_FLEET_NAME} ${AXIOM_ARGS} axiom-select "$AXIOM_FLEET_NAME*" - if [ -n "$AXIOM_POST_START" ]; then + if [[ -n "$AXIOM_POST_START" ]]; then eval "$AXIOM_POST_START" 2>>"$LOGFILE" >/dev/null fi - NUMOFNODES=$(timeout 30 axiom-ls | grep -c "$AXIOM_FLEET_NAME" ) + NUMOFNODES=$(timeout 30 axiom-ls | grep -c "$AXIOM_FLEET_NAME") echo "Axiom fleet $AXIOM_FLEET_NAME launched w/ $NUMOFNODES instances" | $NOTIFY end_func "Axiom fleet $AXIOM_FLEET_NAME launched w/ $NUMOFNODES instances" fi fi } -function axiom_shutdown(){ - if [ "$AXIOM_FLEET_LAUNCH" = true ] && [ "$AXIOM_FLEET_SHUTDOWN" = true ] && [ -n "$AXIOM_FLEET_NAME" ]; then - #if [ "$mode" == "subs_menu" ] || [ "$mode" == "list_recon" ] || [ "$mode" == "passive" ] || [ "$mode" == "all" ]; then - if [ "$mode" == "subs_menu" ] || [ "$mode" == "passive" ] || [ "$mode" == "all" ]; then +function axiom_shutdown() { + if [[ "$AXIOM_FLEET_LAUNCH" = true ]] && [[ "$AXIOM_FLEET_SHUTDOWN" = true ]] && [[ -n "$AXIOM_FLEET_NAME" ]]; then + #if [[ "$mode" == "subs_menu" ]] || [[ "$mode" == "list_recon" ]] || [[ "$mode" == "passive" ]] || [[ "$mode" == "all" ]]; then + if [[ "$mode" == "subs_menu" ]] || [[ "$mode" == "passive" ]] || [[ "$mode" == "all" ]]; then notification "Automatic Axiom fleet shutdown is not enabled in this mode" info return fi @@ -2203,7 +2490,7 @@ function axiom_shutdown(){ fi } -function axiom_selected(){ +function axiom_selected() { if [[ ! $(axiom-ls | tail -n +2 | sed '$ d' | wc -l) -gt 0 ]]; then notification "\n\n${bred} No axiom instances running ${reset}\n\n" error @@ -2216,58 +2503,60 @@ function axiom_selected(){ fi } -function start(){ +function start() { global_start=$(date +%s) - if [ "$NOTIFICATION" = true ]; then + if [[ "$NOTIFICATION" = true ]]; then NOTIFY="notify -silent" else - NOTIFY="" + NOTIFY="" fi - + printf "\n${bgreen}#######################################################################${reset}" notification "Recon succesfully started on ${domain}" good [ "$SOFT_NOTIFICATION" = true ] && echo "Recon succesfully started on ${domain}" | notify -silent printf "${bgreen}#######################################################################${reset}\n" - if [ "$upgrade_before_running" = true ]; then + if [[ "$upgrade_before_running" = true ]]; then ${SCRIPTPATH}/install.sh --tools fi tools_installed #[[ -n "$domain" ]] && ipcidr_target $domain - - if [ -z "$domain" ]; then - if [ -n "$list" ]; then - if [ -z "$domain" ]; then + if [[ -z "$domain" ]]; then + if [[ -n "$list" ]]; then + if [[ -z "$domain" ]]; then domain="Multi" - dir="$SCRIPTPATH/Recon/$domain" + dir="${SCRIPTPATH}/Recon/$domain" called_fn_dir="$dir"/.called_fn fi - if [[ "$list" = /* ]]; then + if [[ $list == /* ]]; then install -D "$list" "$dir"/webs/webs.txt else - install -D "$SCRIPTPATH"/"$list" "$dir"/webs/webs.txt + install -D "${SCRIPTPATH}"/"$list" "$dir"/webs/webs.txt fi fi else - dir="$SCRIPTPATH/Recon/$domain" + dir="${SCRIPTPATH}/Recon/$domain" called_fn_dir="$dir"/.called_fn fi - if [ -z "$domain" ]; then + if [[ -z "$domain" ]]; then notification "\n\n${bred} No domain or list provided ${reset}\n\n" error exit fi - if [ ! -d "$called_fn_dir" ]; then + if [[ ! -d "$called_fn_dir" ]]; then mkdir -p "$called_fn_dir" fi mkdir -p "$dir" - cd "$dir" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - if [ "$AXIOM" = true ]; then - if [ -n "$domain" ]; then + cd "$dir" || { + echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + if [[ "$AXIOM" = true ]]; then + if [[ -n "$domain" ]]; then echo "$domain" | anew -q target.txt list="${dir}/target.txt" fi @@ -2278,40 +2567,40 @@ function start(){ NOWT=$(date +"%T") LOGFILE="${dir}/.log/${NOW}_${NOWT}.txt" touch .log/${NOW}_${NOWT}.txt - echo "Start ${NOW} ${NOWT}" > "${LOGFILE}" + echo "Start ${NOW} ${NOWT}" >"${LOGFILE}" printf "\n" printf "${bred} Target: ${domain}\n\n" } -function end(){ +function end() { find $dir -type f -empty -print | grep -v '.called_fn' | grep -v '.log' | grep -v '.tmp' | xargs rm -f 2>>"$LOGFILE" >/dev/null find $dir -type d -empty -print -delete 2>>"$LOGFILE" >/dev/null - echo "End $(date +"%F") $(date +"%T")" >> "${LOGFILE}" + echo "End $(date +"%F") $(date +"%T")" >>"${LOGFILE}" - if [ ! "$PRESERVE" = true ]; then + if [[ ! "$PRESERVE" = true ]]; then find $dir -type f -empty | grep -v "called_fn" | xargs rm -f 2>>"$LOGFILE" >/dev/null find $dir -type d -empty | grep -v "called_fn" | xargs rm -rf 2>>"$LOGFILE" >/dev/null fi - if [ "$REMOVETMP" = true ]; then + if [[ "$REMOVETMP" = true ]]; then rm -rf $dir/.tmp fi - if [ "$REMOVELOG" = true ]; then - rm -rf $dir/.log - fi + if [[ "$REMOVELOG" = true ]]; then + rm -rf $dir/.log + fi - if [ -n "$dir_output" ]; then + if [[ -n "$dir_output" ]]; then output finaldir=$dir_output else finaldir=$dir fi #Zip the output folder and send it via tg/discord/slack - if [ "$SENDZIPNOTIFY" = true ]; then + if [[ "$SENDZIPNOTIFY" = true ]]; then zipSnedOutputFolder fi global_end=$(date +%s) @@ -2328,7 +2617,7 @@ function end(){ ########################################### MODES & MENUS ##################################################### ############################################################################################################### -function passive(){ +function passive() { start domain_info ip_info @@ -2345,7 +2634,7 @@ function passive(){ SUBREGEXPERMUTE=false SUB_RECURSIVE_BRUTE=false WEBPROBESIMPLE=false - if [ "$AXIOM" = true ]; then + if [[ "$AXIOM" = true ]]; then axiom_lauch axiom_selected fi @@ -2356,22 +2645,22 @@ function passive(){ cdnprovider PORTSCAN_ACTIVE=false portscan - - if [ "$AXIOM" = true ]; then + + if [[ "$AXIOM" = true ]]; then axiom_shutdown fi end } -function all(){ +function all() { start recon vulns end } -function osint(){ +function osint() { domain_info ip_info emails @@ -2383,8 +2672,8 @@ function osint(){ favicon } -function vulns(){ - if [ "$VULNS_GENERAL" = true ]; then +function vulns() { + if [[ "$VULNS_GENERAL" = true ]]; then cors open_redirect ssrf_checks @@ -2405,19 +2694,19 @@ function vulns(){ fi } -function multi_osint(){ +function multi_osint() { global_start=$(date +%s) - if [ "$NOTIFICATION" = true ]; then + if [[ "$NOTIFICATION" = true ]]; then NOTIFY="notify -silent" else - NOTIFY="" + NOTIFY="" fi #[[ -n "$domain" ]] && ipcidr_target $domain - if [ -s "$list" ]; then + if [[ -s "$list" ]]; then sed -i 's/\r$//' $list targets=$(cat $list) else @@ -2425,28 +2714,37 @@ function multi_osint(){ exit fi - workdir=$SCRIPTPATH/Recon/$multi - mkdir -p $workdir || { echo "Failed to create directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - cd "$workdir" || { echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + workdir=${SCRIPTPATH}/Recon/$multi + mkdir -p $workdir || { + echo "Failed to create directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + cd "$workdir" || { + echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } mkdir -p .tmp .called_fn osint subdomains webs hosts vulns NOW=$(date +"%F") NOWT=$(date +"%T") LOGFILE="${workdir}/.log/${NOW}_${NOWT}.txt" touch .log/${NOW}_${NOWT}.txt - echo "Start ${NOW} ${NOWT}" > "${LOGFILE}" + echo "Start ${NOW} ${NOWT}" >"${LOGFILE}" for domain in $targets; do dir=$workdir/targets/$domain called_fn_dir=$dir/.called_fn mkdir -p $dir - cd "$dir" || { echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$dir" || { + echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } mkdir -p .tmp .called_fn osint subdomains webs hosts vulns NOW=$(date +"%F") NOWT=$(date +"%T") LOGFILE="${dir}/.log/${NOW}_${NOWT}.txt" touch .log/${NOW}_${NOWT}.txt - echo "Start ${NOW} ${NOWT}" > "${LOGFILE}" + echo "Start ${NOW} ${NOWT}" >"${LOGFILE}" domain_info ip_info emails @@ -2457,14 +2755,16 @@ function multi_osint(){ zonetransfer favicon done - cd "$workdir" || { echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$workdir" || { + echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } dir=$workdir domain=$multi end } - -function recon(){ +function recon() { domain_info ip_info emails @@ -2475,7 +2775,7 @@ function recon(){ zonetransfer favicon - if [ "$AXIOM" = true ]; then + if [[ "$AXIOM" = true ]]; then axiom_lauch axiom_selected fi @@ -2486,7 +2786,7 @@ function recon(){ remove_big_files s3buckets screenshot -# virtualhosts + # virtualhosts cdnprovider portscan waf_checks @@ -2495,7 +2795,7 @@ function recon(){ urlchecks jschecks - if [ "$AXIOM" = true ]; then + if [[ "$AXIOM" = true ]]; then axiom_shutdown fi @@ -2507,52 +2807,60 @@ function recon(){ url_ext } -function multi_recon(){ - +function multi_recon() { global_start=$(date +%s) - if [ "$NOTIFICATION" = true ]; then + if [[ "$NOTIFICATION" = true ]]; then NOTIFY="notify -silent" else - NOTIFY="" + NOTIFY="" fi #[[ -n "$domain" ]] && ipcidr_target $domain - if [ -s "$list" ]; then - sed -i 's/\r$//' $list + if [[ -s "$list" ]]; then + sed -i 's/\r$//' $list targets=$(cat $list) else notification "Target list not provided" error exit fi - workdir=$SCRIPTPATH/Recon/$multi - mkdir -p $workdir || { echo "Failed to create directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - cd "$workdir" || { echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + workdir=${SCRIPTPATH}/Recon/$multi + mkdir -p $workdir || { + echo "Failed to create directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + cd "$workdir" || { + echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } mkdir -p .tmp .log .called_fn osint subdomains webs hosts vulns NOW=$(date +"%F") NOWT=$(date +"%T") LOGFILE="${workdir}/.log/${NOW}_${NOWT}.txt" touch .log/${NOW}_${NOWT}.txt - echo "Start ${NOW} ${NOWT}" > "${LOGFILE}" + echo "Start ${NOW} ${NOWT}" >"${LOGFILE}" - [ -n "$flist" ] && LISTTOTAL=$(cat "$flist" | wc -l ) + [ -n "$flist" ] && LISTTOTAL=$(cat "$flist" | wc -l) for domain in $targets; do dir=$workdir/targets/$domain called_fn_dir=$dir/.called_fn mkdir -p $dir - cd "$dir" || { echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$dir" || { + echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } mkdir -p .tmp .log .called_fn osint subdomains webs hosts vulns NOW=$(date +"%F") NOWT=$(date +"%T") LOGFILE="${dir}/.log/${NOW}_${NOWT}.txt" touch .log/${NOW}_${NOWT}.txt - echo "Start ${NOW} ${NOWT}" > "${LOGFILE}" + echo "Start ${NOW} ${NOWT}" >"${LOGFILE}" loopstart=$(date +%s) domain_info @@ -2569,15 +2877,18 @@ function multi_recon(){ getElapsedTime $loopstart $loopend printf "${bgreen}#######################################################################${reset}\n" printf "${bgreen} $domain finished 1st loop in ${runtime} $currently ${reset}\n" - if [ -n "$flist" ]; then + if [[ -n "$flist" ]]; then POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" fi printf "${bgreen}#######################################################################${reset}\n" done - cd "$workdir" || { echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$workdir" || { + echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } - if [ "$AXIOM" = true ]; then + if [[ "$AXIOM" = true ]]; then axiom_lauch axiom_selected fi @@ -2586,13 +2897,16 @@ function multi_recon(){ loopstart=$(date +%s) dir=$workdir/targets/$domain called_fn_dir=$dir/.called_fn - cd "$dir" || { echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$dir" || { + echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } subdomains_full webprobe_full subtakeover remove_big_files screenshot -# virtualhosts + # virtualhosts cdnprovider portscan currently=$(date +"%H:%M:%S") @@ -2600,13 +2914,16 @@ function multi_recon(){ getElapsedTime $loopstart $loopend printf "${bgreen}#######################################################################${reset}\n" printf "${bgreen} $domain finished 2nd loop in ${runtime} $currently ${reset}\n" - if [ -n "$flist" ]; then + if [[ -n "$flist" ]]; then POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" fi printf "${bgreen}#######################################################################${reset}\n" done - cd "$workdir" || { echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$workdir" || { + echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } notification "############################# Total data ############################" info NUMOFLINES_users_total=$(find . -type f -name 'users.txt' -exec cat {} + | anew osint/users.txt | sed '/^$/d' | wc -l) @@ -2619,9 +2936,9 @@ function multi_recon(){ NUMOFLINES_webs_total=$(find . -type f -name 'webs_uncommon_ports.txt' -exec cat {} + | anew webs/webs_uncommon_ports.txt | sed '/^$/d' | wc -l) NUMOFLINES_ips_total=$(find . -type f -name 'ips.txt' -exec cat {} + | anew hosts/ips.txt | sed '/^$/d' | wc -l) NUMOFLINES_cloudsprov_total=$(find . -type f -name 'cdn_providers.txt' -exec cat {} + | anew hosts/cdn_providers.txt | sed '/^$/d' | wc -l) - find . -type f -name 'portscan_active.txt' -exec cat {} + | tee -a hosts/portscan_active.txt >> "$LOGFILE" 2>&1 >/dev/null + find . -type f -name 'portscan_active.txt' -exec cat {} + | tee -a hosts/portscan_active.txt >>"$LOGFILE" 2>&1 >/dev/null find . -type f -name 'portscan_active.gnmap' -exec cat {} + | tee hosts/portscan_active.gnmap 2>>"$LOGFILE" >/dev/null - find . -type f -name 'portscan_passive.txt' -exec cat {} + | tee hosts/portscan_passive.txt 2>&1 >> "$LOGFILE" >/dev/null + find . -type f -name 'portscan_passive.txt' -exec cat {} + | tee hosts/portscan_passive.txt 2>&1 >>"$LOGFILE" >/dev/null notification "- ${NUMOFLINES_users_total} total users found" good notification "- ${NUMOFLINES_pwndb_total} total creds leaked" good @@ -2639,7 +2956,10 @@ function multi_recon(){ loopstart=$(date +%s) dir=$workdir/targets/$domain called_fn_dir=$dir/.called_fn - cd "$dir" || { echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$dir" || { + echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } loopstart=$(date +%s) fuzz urlchecks @@ -2649,22 +2969,25 @@ function multi_recon(){ getElapsedTime $loopstart $loopend printf "${bgreen}#######################################################################${reset}\n" printf "${bgreen} $domain finished 3rd loop in ${runtime} $currently ${reset}\n" - if [ -n "$flist" ]; then + if [[ -n "$flist" ]]; then POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" fi printf "${bgreen}#######################################################################${reset}\n" done - if [ "$AXIOM" = true ]; then + if [[ "$AXIOM" = true ]]; then axiom_shutdown fi for domain in $targets; do loopstart=$(date +%s) dir=$workdir/targets/$domain - called_fn_dir=$dir/.called_fn - cd "$dir" || { echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + called_fn_dir=$dir/.called_fn + cd "$dir" || { + echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } cms_scanner url_gf wordlist_gen @@ -2676,22 +2999,25 @@ function multi_recon(){ getElapsedTime $loopstart $loopend printf "${bgreen}#######################################################################${reset}\n" printf "${bgreen} $domain finished final loop in ${runtime} $currently ${reset}\n" - if [ -n "$flist" ]; then + if [[ -n "$flist" ]]; then POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" fi printf "${bgreen}#######################################################################${reset}\n" done - cd "$workdir" || { echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } + cd "$workdir" || { + echo "Failed to cd directory '$workdir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } dir=$workdir domain=$multi end } -function subs_menu(){ +function subs_menu() { start - if [ "$AXIOM" = true ]; then + if [[ "$AXIOM" = true ]]; then axiom_lauch axiom_selected fi @@ -2701,22 +3027,22 @@ function subs_menu(){ subtakeover remove_big_files screenshot -# virtualhosts + # virtualhosts zonetransfer s3buckets - if [ "$AXIOM" = true ]; then + if [[ "$AXIOM" = true ]]; then axiom_shutdown fi end } -function webs_menu(){ +function webs_menu() { subtakeover remove_big_files screenshot -# virtualhosts + # virtualhosts waf_checks nuclei_check cms_scanner @@ -2732,7 +3058,7 @@ function webs_menu(){ end } -function help(){ +function help() { printf "\n Usage: $0 [-d domain.tld] [-m name] [-l list.txt] [-x oos.txt] [-i in.txt] " printf "\n [-r] [-s] [-p] [-a] [-w] [-n] [-i] [-h] [-f] [--deep] [-o OUTPUT]\n\n" printf " ${bblue}TARGET OPTIONS${reset}\n" @@ -2794,23 +3120,25 @@ function help(){ # webserver initialization, thanks @lur1el, @d3vchac, @mx61tt and @dd4n1b0y <3 - -function webserver(){ +function webserver() { printf "${bgreen} Web Interface by @lur1el, @d3vchac, @mx61tt and @dd4n1b0y ${reset}\n" ver=$(python3 -V 2>&1 | sed 's/.* \([0-9]\).\([0-9]\).*/\1\2/') - - if [ "$ver" -lt "31" ]; then - echo "The web interface requires python 3.10 or greater" - exit 1 - fi - if [ "$1" == "start" ]; then - ipAddress=$(curl -s ifconfig.me) + if [[ "$ver" -lt "31" ]]; then + echo "The web interface requires python 3.10 or greater" + exit 1 + fi - if [ "$ipAddress" != "" ]; then + if [[ "$1" == "start" ]]; then + ipAddress=$(curl -s ifconfig.me) + + if [[ "$ipAddress" != "" ]]; then printf "\n ${bblue}Starting web server... ${reset}\n" - cd $SCRIPTPATH/web || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - $SUDO source $SCRIPTPATH/web/.venv/bin/activate + cd ${SCRIPTPATH}/web || { + echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + $SUDO source ${SCRIPTPATH}/web/.venv/bin/activate $SUDO screen -S ReconftwWebserver -X kill &>/dev/null $SUDO screen -dmS ReconftwWebserver python3 manage.py runserver $ipAddress:8001 &>/dev/null $SUDO service redis-server start &>/dev/null @@ -2824,7 +3152,7 @@ function webserver(){ printf "\n" printf " ${bblue}Check if the server has internet connection.${reset}\n" fi - elif [ "$1" == "stop" ]; then + elif [[ "$1" == "stop" ]]; then printf "\n ${bblue}Stoping web server... ${reset}\n" # $SUDO service postgresql stop $SUDO screen -S ReconftwWebserver -X kill &>/dev/null @@ -2844,180 +3172,189 @@ function webserver(){ ############################################################################################################### # macOS PATH initialization, thanks @0xtavian <3 -if [[ "$OSTYPE" == "darwin"* ]]; then +if [[ $OSTYPE == "darwin"* ]]; then PATH="/usr/local/opt/gnu-getopt/bin:$PATH" PATH="/usr/local/opt/coreutils/libexec/gnubin:$PATH" fi PROGARGS=$(getopt -o 'd:m:l:x:i:o:f:q:c:rspanwvh::' --long 'domain:,list:,recon,subdomains,passive,all,web,osint,deep,web-server,help,vps' -n 'reconFTW' -- "$@") - # Note the quotes around "$PROGARGS": they are essential! eval set -- "$PROGARGS" unset PROGARGS while true; do - case "$1" in - '-d'|'--domain') - domain=$2 - ipcidr_target $2 - shift 2 - continue - ;; - '-m') - multi=$2 - shift 2 - continue - ;; - '-l'|'--list') - list=$2 - for t in $(cat $list); do - ipcidr_target $t $list - done - shift 2 - continue - ;; - '-x') - outOfScope_file=$2 - shift 2 - continue - ;; - '-i') - inScope_file=$2 - shift 2 - continue - ;; - - # modes - '-r'|'--recon') - opt_mode='r' - shift - continue - ;; - '-s'|'--subdomains') - opt_mode='s' - shift - continue - ;; - '-p'|'--passive') - opt_mode='p' - shift - continue - ;; - '-a'|'--all') - opt_mode='a' - shift - continue - ;; - '-w'|'--web') - opt_mode='w' - shift - continue - ;; - '-n'|'--osint') - opt_mode='n' - shift - continue - ;; - '-c'|'--custom') - custom_function=$2 - opt_mode='c' - shift 2 - continue - ;; - # extra stuff - '-o') - if [[ "$2" != /* ]]; then - dir_output=$PWD/$2 - else - dir_output=$2 - fi - shift 2 - continue - ;; - '-v'|'--vps') - which axiom-ls &>/dev/null || { printf "\n Axiom is needed for this mode and is not installed \n You have to install it manually \n" && exit; allinstalled=false;} - AXIOM=true - shift - continue - ;; - '-f') - CUSTOM_CONFIG=$2 - shift 2 - continue - ;; - '-q') - rate_limit=$2 - shift 2 - continue - ;; - '--deep') - opt_deep=true - shift - continue - ;; - - '--') - shift - break - ;; - '--web-server') - . ./reconftw.cfg - banner - webserver $3 - exit 1 - ;; - '--help'| '-h'| *) - # echo "Unknown argument: $1" - . ./reconftw.cfg - banner - help - tools_installed - exit 1 - ;; - esac + case "$1" in + '-d' | '--domain') + domain=$2 + ipcidr_target $2 + shift 2 + continue + ;; + '-m') + multi=$2 + shift 2 + continue + ;; + '-l' | '--list') + list=$2 + for t in $(cat $list); do + ipcidr_target $t $list + done + shift 2 + continue + ;; + '-x') + outOfScope_file=$2 + shift 2 + continue + ;; + '-i') + inScope_file=$2 + shift 2 + continue + ;; + + # modes + '-r' | '--recon') + opt_mode='r' + shift + continue + ;; + '-s' | '--subdomains') + opt_mode='s' + shift + continue + ;; + '-p' | '--passive') + opt_mode='p' + shift + continue + ;; + '-a' | '--all') + opt_mode='a' + shift + continue + ;; + '-w' | '--web') + opt_mode='w' + shift + continue + ;; + '-n' | '--osint') + opt_mode='n' + shift + continue + ;; + '-c' | '--custom') + custom_function=$2 + opt_mode='c' + shift 2 + continue + ;; + # extra stuff + '-o') + if [[ $2 != /* ]]; then + dir_output=$PWD/$2 + else + dir_output=$2 + fi + shift 2 + continue + ;; + '-v' | '--vps') + command -v axiom-ls &>/dev/null || { + printf "\n Axiom is needed for this mode and is not installed \n You have to install it manually \n" && exit + allinstalled=false + } + AXIOM=true + shift + continue + ;; + '-f') + CUSTOM_CONFIG=$2 + shift 2 + continue + ;; + '-q') + rate_limit=$2 + shift 2 + continue + ;; + '--deep') + opt_deep=true + shift + continue + ;; + + '--') + shift + break + ;; + '--web-server') + . ./reconftw.cfg + banner + webserver $3 + exit 1 + ;; + '--help' | '-h' | *) + # echo "Unknown argument: $1" + . ./reconftw.cfg + banner + help + tools_installed + exit 1 + ;; + esac done # This is the first thing to do to read in alternate config -SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 || exit ; pwd -P )" -. "$SCRIPTPATH"/reconftw.cfg || { echo "Error importing reconftw.ctg"; exit 1; } -if [ -s "$CUSTOM_CONFIG" ]; then -# shellcheck source=/home/six2dez/Tools/reconftw/custom_config.cfg -. "${CUSTOM_CONFIG}" || { echo "Error importing reconftw.ctg"; exit 1; } +SCRIPTPATH="$( + cd "$(dirname "$0")" >/dev/null 2>&1 || exit + pwd -P +)" +. "${SCRIPTPATH}"/reconftw.cfg || { + echo "Error importing reconftw.ctg" + exit 1 +} +if [[ -s "$CUSTOM_CONFIG" ]]; then + # shellcheck source=/home/six2dez/Tools/reconftw/custom_config.cfg + . "${CUSTOM_CONFIG}" || { + echo "Error importing reconftw.ctg" + exit 1 + } fi -if [ $opt_deep ]; then - DEEP=true +if [[ $opt_deep ]]; then + DEEP=true fi -if [ $rate_limit ]; then - NUCLEI_RATELIMIT=$rate_limit +if [[ $rate_limit ]]; then + NUCLEI_RATELIMIT=$rate_limit FFUF_RATELIMIT=$rate_limit HTTPX_RATELIMIT=$rate_limit fi -if [ -n "$outOfScope_file" ]; then - isAsciiText $outOfScope_file - if [ "False" = "$IS_ASCII" ] - then - printf "\n\n${bred} Out of Scope file is not a text file${reset}\n\n" - exit - fi +if [[ -n "$outOfScope_file" ]]; then + isAsciiText $outOfScope_file + if [[ "False" = "$IS_ASCII" ]]; then + printf "\n\n${bred} Out of Scope file is not a text file${reset}\n\n" + exit + fi fi -if [ -n "$inScope_file" ]; then - isAsciiText $inScope_file - if [ "False" = "$IS_ASCII" ] - then - printf "\n\n${bred} In Scope file is not a text file${reset}\n\n" - exit - fi +if [[ -n "$inScope_file" ]]; then + isAsciiText $inScope_file + if [[ "False" = "$IS_ASCII" ]]; then + printf "\n\n${bred} In Scope file is not a text file${reset}\n\n" + exit + fi fi if [[ $(id -u | grep -o '^0$') == "0" ]]; then - SUDO=" " + SUDO=" " else - SUDO="sudo" + SUDO="sudo" fi startdir=${PWD} @@ -3027,12 +3364,12 @@ banner check_version startdir=${PWD} -if [ -n "$list" ]; then - if [[ "$list" = ./* ]]; then +if [[ -n "$list" ]]; then + if [[ $list == ./* ]]; then flist="${startdir}/${list:2}" - elif [[ "$list" = ~* ]]; then + elif [[ $list == ~* ]]; then flist="${HOME}/${list:2}" - elif [[ "$list" = /* ]]; then + elif [[ $list == /* ]]; then flist=$list else flist="$startdir/$list" @@ -3042,122 +3379,127 @@ else fi case $opt_mode in - 'r') - if [ -n "$multi" ]; then - if [ "$AXIOM" = true ]; then - mode="multi_recon" - fi - multi_recon - exit - fi - if [ -n "$list" ]; then - if [ "$AXIOM" = true ]; then - mode="list_recon" - fi - sed -i 's/\r$//' $list - for domain in $(cat $list); do - start - recon - end - done - else - if [ "$AXIOM" = true ]; then - mode="recon" - fi - start - recon - end - fi - ;; - 's') - if [ -n "$list" ]; then - if [ "$AXIOM" = true ]; then - mode="subs_menu" - fi - sed -i 's/\r$//' $list - for domain in $(cat $list); do - subs_menu - done - else - subs_menu - fi - ;; - 'p') - if [ -n "$list" ]; then - if [ "$AXIOM" = true ]; then - mode="passive" - fi - sed -i 's/\r$//' $list - for domain in $(cat $list); do - passive - done - else - passive - fi - ;; - 'a') - export VULNS_GENERAL=true - if [ -n "$list" ]; then - if [ "$AXIOM" = true ]; then - mode="all" - fi - sed -i 's/\r$//' $list - for domain in $(cat $list); do - all - done - else - all - fi - ;; - 'w') - if [ -n "$list" ]; then - start - if [[ "$list" = /* ]]; then - cp $list $dir/webs/webs.txt - else - cp $SCRIPTPATH/$list $dir/webs/webs.txt - fi - else - printf "\n\n${bred} Web mode needs a website list file as target (./reconftw.sh -l target.txt -w) ${reset}\n\n" - exit - fi - webs_menu - exit - ;; - 'n') - PRESERVE=true - if [ -n "$multi" ]; then - multi_osint - exit - fi - if [ -n "$list" ]; then - sed -i 's/\r$//' $list - while IFS= read -r domain; do - start - osint - end - done - else - start - osint - end - fi - ;; - 'c') - export DIFF=true - dir="$SCRIPTPATH/Recon/$domain" - cd $dir || { echo "Failed to cd directory '$dir'"; exit 1; } - LOGFILE="${dir}/.log/${NOW}_${NOWT}.txt" - called_fn_dir=$dir/.called_fn - $custom_function - cd $SCRIPTPATH || { echo "Failed to cd directory '$dir'"; exit 1; } - exit - ;; - # No mode selected. EXIT! - *) - help - tools_installed - exit 1 - ;; +'r') + if [[ -n "$multi" ]]; then + if [[ "$AXIOM" = true ]]; then + mode="multi_recon" + fi + multi_recon + exit + fi + if [[ -n "$list" ]]; then + if [[ "$AXIOM" = true ]]; then + mode="list_recon" + fi + sed -i 's/\r$//' $list + for domain in $(cat $list); do + start + recon + end + done + else + if [[ "$AXIOM" = true ]]; then + mode="recon" + fi + start + recon + end + fi + ;; +'s') + if [[ -n "$list" ]]; then + if [[ "$AXIOM" = true ]]; then + mode="subs_menu" + fi + sed -i 's/\r$//' $list + for domain in $(cat $list); do + subs_menu + done + else + subs_menu + fi + ;; +'p') + if [[ -n "$list" ]]; then + if [[ "$AXIOM" = true ]]; then + mode="passive" + fi + sed -i 's/\r$//' $list + for domain in $(cat $list); do + passive + done + else + passive + fi + ;; +'a') + export VULNS_GENERAL=true + if [[ -n "$list" ]]; then + if [[ "$AXIOM" = true ]]; then + mode="all" + fi + sed -i 's/\r$//' $list + for domain in $(cat $list); do + all + done + else + all + fi + ;; +'w') + if [[ -n "$list" ]]; then + start + if [[ $list == /* ]]; then + cp $list $dir/webs/webs.txt + else + cp ${SCRIPTPATH}/$list $dir/webs/webs.txt + fi + else + printf "\n\n${bred} Web mode needs a website list file as target (./reconftw.sh -l target.txt -w) ${reset}\n\n" + exit + fi + webs_menu + exit + ;; +'n') + PRESERVE=true + if [[ -n "$multi" ]]; then + multi_osint + exit + fi + if [[ -n "$list" ]]; then + sed -i 's/\r$//' $list + while IFS= read -r domain; do + start + osint + end + done + else + start + osint + end + fi + ;; +'c') + export DIFF=true + dir="${SCRIPTPATH}/Recon/$domain" + cd $dir || { + echo "Failed to cd directory '$dir'" + exit 1 + } + LOGFILE="${dir}/.log/${NOW}_${NOWT}.txt" + called_fn_dir=$dir/.called_fn + $custom_function + cd ${SCRIPTPATH} || { + echo "Failed to cd directory '$dir'" + exit 1 + } + exit + ;; + # No mode selected. EXIT! +*) + help + tools_installed + exit 1 + ;; esac - From 04fd3b7d83886b4a69aa60ec21df706aa90b92d1 Mon Sep 17 00:00:00 2001 From: six2dez Date: Fri, 10 Nov 2023 23:57:46 +0100 Subject: [PATCH 07/17] Added spinner and better format --- .gitmodules | 3 + assets/spinny | 1 + reconftw.cfg | 2 + reconftw.sh | 776 +++++++++++++++++++++++++++----------------------- 4 files changed, 422 insertions(+), 360 deletions(-) create mode 100644 .gitmodules create mode 160000 assets/spinny diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..ad953d50 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "assets/spinny"] + path = assets/spinny + url = https://github.com/hschne/spinny diff --git a/assets/spinny b/assets/spinny new file mode 160000 index 00000000..2e0a8cca --- /dev/null +++ b/assets/spinny @@ -0,0 +1 @@ +Subproject commit 2e0a8cca7e49a0d16262939c2a1c5f57719224b5 diff --git a/reconftw.cfg b/reconftw.cfg index 40be4fdc..16886182 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -16,6 +16,8 @@ proxy_url="http://127.0.0.1:8080/" # Proxy url install_golang=true # Set it to false if you already have Golang configured and ready upgrade_tools=true upgrade_before_running=false # Upgrade tools before running +#SPINNY_FRAMES=(. .. ... .... ..... " ...." " ..." " .." " ." " " ) +SPINNY_DELAY=0.1 #dir_output=/custom/output/path # Golang Vars (Comment or change on your own) diff --git a/reconftw.sh b/reconftw.sh index 36599bab..e2b37c89 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -20,11 +20,11 @@ function banner() { function check_version() { timeout 10 git fetch exit_status=$? - if [[ "${exit_status}" -eq 0 ]]; then + if [[ ${exit_status} -eq 0 ]]; then BRANCH=$(git rev-parse --abbrev-ref HEAD) HEADHASH=$(git rev-parse HEAD) UPSTREAMHASH=$(git rev-parse "${BRANCH}"@\{upstream\}) - if [[ "${HEADHASH}" != "${UPSTREAMHASH}" ]]; then + if [[ ${HEADHASH} != "${UPSTREAMHASH}" ]]; then printf "\n${yellow} There is a new version, run ./install.sh to get latest version${reset}\n\n" fi else @@ -33,6 +33,7 @@ function check_version() { } function tools_installed() { + spinny::start printf "\n\n${bgreen}#######################################################################${reset}\n" printf "${bblue} Checking installed tools ${reset}\n\n" @@ -352,7 +353,7 @@ function tools_installed() { allinstalled=false } - if [[ "${allinstalled}" = true ]]; then + if [[ ${allinstalled} == true ]]; then printf "${bgreen} Good! All installed! ${reset}\n\n" else printf "\n${yellow} Try running the installer script again ./install.sh" @@ -363,6 +364,7 @@ function tools_installed() { printf "${bblue} Tools check finished\n" printf "${bgreen}#######################################################################\n${reset}" + spinny::stop } ############################################################################################################### @@ -370,26 +372,29 @@ function tools_installed() { ############################################################################################################### function google_dorks() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$GOOGLE_DORKS" = true ]] && [[ "$OSINT" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $GOOGLE_DORKS == true ]] && [[ $OSINT == true ]]; then python3 ${tools}/dorks_hunter/dorks_hunter.py -d "$domain" -o osint/dorks.txt || { echo "dorks_hunter command failed" exit 1 } end_func "Results are saved in $domain/osint/dorks.txt" "${FUNCNAME[0]}" else - if [[ "$GOOGLE_DORKS" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $GOOGLE_DORKS == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} are already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function github_dorks() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$GITHUB_DORKS" = true ]] && [[ "$OSINT" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $GITHUB_DORKS == true ]] && [[ $OSINT == true ]]; then start_func "${FUNCNAME[0]}" "Github Dorks in process" - if [[ -s "${GITHUB_TOKENS}" ]]; then - if [[ "$DEEP" = true ]]; then + if [[ -s ${GITHUB_TOKENS} ]]; then + if [[ $DEEP == true ]]; then gitdorks_go -gd ${tools}/gitdorks_go/Dorks/medium_dorks.txt -nws 20 -target "$domain" -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { echo "gitdorks_go/anew command failed" exit 1 @@ -405,19 +410,21 @@ function github_dorks() { fi end_func "Results are saved in $domain/osint/gitdorks.txt" "${FUNCNAME[0]}" else - if [[ "$GITHUB_DORKS" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $GITHUB_DORKS == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function github_repos() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$GITHUB_REPOS" = true ]] && [[ "$OSINT" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $GITHUB_REPOS == true ]] && [[ $OSINT == true ]]; then start_func "${FUNCNAME[0]}" "Github Repos analysis in process" - if [[ -s "${GITHUB_TOKENS}" ]]; then + if [[ -s ${GITHUB_TOKENS} ]]; then GH_TOKEN=$(cat ${GITHUB_TOKENS} | head -1) echo $domain | unfurl format %r >.tmp/company_name.txt enumerepo -token-string "${GH_TOKEN}" -usernames .tmp/company_name.txt -o .tmp/company_repos.txt 2>>"$LOGFILE" >/dev/null @@ -436,16 +443,18 @@ function github_repos() { fi end_func "Results are saved in $domain/osint/github_company_secrets.json" ${FUNCNAME[0]} else - if [[ "$GITHUB_REPOS" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $GITHUB_REPOS == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function metadata() { - if { [[ ! -f "${called_fn_dir}/.${FUNCNAME[0]}" ]] || [[ "${DIFF}" = true ]]; } && [[ "${METADATA}" = true ]] && [[ "${OSINT}" = true ]] && ! [[ ${domain} =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + spinny::start + if { [[ ! -f "${called_fn_dir}/.${FUNCNAME[0]}" ]] || [[ ${DIFF} == true ]]; } && [[ ${METADATA} == true ]] && [[ ${OSINT} == true ]] && ! [[ ${domain} =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Scanning metadata in public files" metafinder -d "$domain" -l $METAFINDER_LIMIT -o osint -go -bi -ba &>>"$LOGFILE" || { echo "metafinder command failed" @@ -455,22 +464,24 @@ function metadata() { rm -rf "osint/${domain}" 2>>"$LOGFILE" end_func "Results are saved in $domain/osint/[software/authors/metadata_results].txt" ${FUNCNAME[0]} else - if [[ "$METADATA" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $METADATA == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [[ "$METADATA" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $METADATA == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi fi + spinny::stop } function postleaks() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$POSTMAN_LEAKS" = true ]] && [[ "$OSINT" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $POSTMAN_LEAKS == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Scanning for leaks in postman public directory" postleaksNg -k "$domain" >.tmp/postleaks.txt || { @@ -480,22 +491,24 @@ function postleaks() { end_func "Results are saved in $domain/osint/[software/authors/metadata_results].txt" ${FUNCNAME[0]} else - if [[ "$POSTMAN_LEAKS" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $POSTMAN_LEAKS == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [[ "$POSTMAN_LEAKS" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $POSTMAN_LEAKS == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi fi + spinny::stop } function emails() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$EMAILS" = true ]] && [[ "$OSINT" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $EMAILS == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Searching emails/users/passwords leaks" emailfinder -d $domain 2>>"$LOGFILE" | anew -q .tmp/emailfinder.txt || { echo "emailfinder command failed" @@ -505,50 +518,54 @@ function emails() { end_func "Results are saved in $domain/osint/emails.txt" ${FUNCNAME[0]} else - if [[ "$EMAILS" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $EMAILS == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [[ "$EMAILS" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $EMAILS == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi fi + spinny::stop } function domain_info() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$DOMAIN_INFO" = true ]] && [[ "$OSINT" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $DOMAIN_INFO == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Searching domain info (whois, registrant name/email domains)" whois -H $domain >osint/domain_info_general.txt || { echo "whois command failed"; } - if [[ "$DEEP" = true ]] || [[ "$REVERSE_WHOIS" = true ]]; then - timeout -k 1m ${AMASS_INTEL_TIMEOUT}m amass intel -d ${domain} -whois -timeout $AMASS_INTEL_TIMEOUT -o osint/domain_info_reverse_whois.txt 2>>"$LOGFILE" >> /dev/null + if [[ $DEEP == true ]] || [[ $REVERSE_WHOIS == true ]]; then + timeout -k 1m ${AMASS_INTEL_TIMEOUT}m amass intel -d ${domain} -whois -timeout $AMASS_INTEL_TIMEOUT -o osint/domain_info_reverse_whois.txt 2>>"$LOGFILE" >>/dev/null fi curl -s "https://aadinternals.azurewebsites.net/api/tenantinfo?domainName=${domain}" -H "Origin: https://aadinternals.com" | jq -r .domains[].name >osint/azure_tenant_domains.txt end_func "Results are saved in $domain/osint/domain_info_[general/name/email/ip].txt" ${FUNCNAME[0]} else - if [[ "$DOMAIN_INFO" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $DOMAIN_INFO == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [[ "$DOMAIN_INFO" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $DOMAIN_INFO == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi fi + spinny::stop } function ip_info() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$IP_INFO" = true ]] && [[ "$OSINT" = true ]] && [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $IP_INFO == true ]] && [[ $OSINT == true ]] && [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Searching ip info" - if [[ -n "$WHOISXML_API" ]]; then + if [[ -n $WHOISXML_API ]]; then curl "https://reverse-ip.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ip=${domain}" 2>/dev/null | jq -r '.result[].name' 2>>"$LOGFILE" | sed -e "s/$/ ${domain}/" | anew -q osint/ip_${domain}_relations.txt curl "https://www.whoisxmlapi.com/whoisserver/WhoisService?apiKey=${WHOISXML_API}&domainName=${domain}&outputFormat=json&da=2®istryRawText=1®istrarRawText=1&ignoreRawTexts=1" 2>/dev/null | jq 2>>"$LOGFILE" | anew -q osint/ip_${domain}_whois.txt curl "https://ip-geolocation.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ipAddress=${domain}" 2>/dev/null | jq -r '.ip,.location' 2>>"$LOGFILE" | anew -q osint/ip_${domain}_location.txt @@ -557,18 +574,19 @@ function ip_info() { printf "\n${yellow} No WHOISXML_API var defined, skipping function ${reset}\n" fi else - if [[ "$IP_INFO" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $IP_INFO == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ ! $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [[ "$IP_INFO" = false ]] || [[ "$OSINT" = false ]]; then + if [[ $IP_INFO == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi fi + spinny::stop } ############################################################################################################### @@ -584,13 +602,13 @@ function subdomains_full() { [ -s "subdomains/subdomains.txt" ] && cp subdomains/subdomains.txt .tmp/subdomains_old.txt [ -s "webs/webs.txt" ] && cp webs/webs.txt .tmp/probed_old.txt - if ([[ ! -f "$called_fn_dir/.sub_active" ]] || [[ ! -f "$called_fn_dir/.sub_brute" ]] || [[ ! -f "$called_fn_dir/.sub_permut" ]] || [[ ! -f "$called_fn_dir/.sub_recursive_brute" ]]) || [[ "$DIFF" = true ]]; then + if ([[ ! -f "$called_fn_dir/.sub_active" ]] || [[ ! -f "$called_fn_dir/.sub_brute" ]] || [[ ! -f "$called_fn_dir/.sub_permut" ]] || [[ ! -f "$called_fn_dir/.sub_recursive_brute" ]]) || [[ $DIFF == true ]]; then resolvers_update fi [ -s "${inScope_file}" ] && cat ${inScope_file} | anew -q subdomains/subdomains.txt - if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && [[ "$SUBDOMAINS_GENERAL" = true ]]; then + if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && [[ $SUBDOMAINS_GENERAL == true ]]; then sub_passive sub_crt sub_active @@ -629,26 +647,27 @@ function subdomains_full() { } function sub_passive() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBPASSIVE" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBPASSIVE == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Passive Subdomain Enumeration" if [[ $RUNAMASS == true ]]; then - timeout -k 1m ${AMASS_ENUM_TIMEOUT} amass enum -passive -d $domain -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT -json .tmp/amass_json.json 2>>"$LOGFILE" >> /dev/null + timeout -k 1m ${AMASS_ENUM_TIMEOUT} amass enum -passive -d $domain -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT -json .tmp/amass_json.json 2>>"$LOGFILE" >>/dev/null fi [ -s ".tmp/amass_json.json" ] && cat .tmp/amass_json.json | jq -r '.name' | anew -q .tmp/amass_psub.txt [[ $RUNSUBFINDER == true ]] && subfinder -all -d "$domain" -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null - if [[ -s "${GITHUB_TOKENS}" ]]; then - if [[ "$DEEP" = true ]]; then + if [[ -s ${GITHUB_TOKENS} ]]; then + if [[ $DEEP == true ]]; then github-subdomains -d $domain -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null else github-subdomains -d $domain -k -q -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null fi fi - if [[ -s "${GITLAB_TOKENS}" ]]; then + if [[ -s ${GITLAB_TOKENS} ]]; then gitlab-subdomains -d "$domain" -t "$GITLAB_TOKENS" 2>>"$LOGFILE" | tee .tmp/gitlab_subdomains_psub.txt >/dev/null fi - if [[ "$INSCOPE" = true ]]; then + if [[ $INSCOPE == true ]]; then check_inscope .tmp/amass_psub.txt 2>>"$LOGFILE" >/dev/null check_inscope .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null check_inscope .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null @@ -657,36 +676,40 @@ function sub_passive() { NUMOFLINES=$(find .tmp -type f -iname "*_psub.txt" -exec cat {} + | sed "s/*.//" | anew .tmp/passive_subs.txt | sed '/^$/d' | wc -l) end_subfunc "${NUMOFLINES} new subs (passive)" ${FUNCNAME[0]} else - if [[ "$SUBPASSIVE" = false ]]; then + if [[ $SUBPASSIVE == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function sub_crt() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBCRT" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBCRT == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Crtsh Subdomain Enumeration" crt -s -json -l ${CTR_LIMIT} $domain 2>>"$LOGFILE" | jq -r '.[].subdomain' 2>>"$LOGFILE" | sed -e 's/^\*\.//' | anew -q .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null [[ $INSCOPE == true ]] && check_inscope .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES=$(cat .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" | sed 's/\*.//g' | anew .tmp/crtsh_subs.txt | sed '/^$/d' | wc -l) end_subfunc "${NUMOFLINES} new subs (cert transparency)" ${FUNCNAME[0]} else - if [[ "$SUBCRT" = false ]]; then + if [[ $SUBCRT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function sub_active() { - if [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; then + spinny::start + if [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Active Subdomain Enumeration" find .tmp -type f -iname "*_subs.txt" -exec cat {} + | anew -q .tmp/subs_no_resolved.txt [ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then resolvers_update_quick_local [ -s ".tmp/subs_no_resolved.txt" ] && puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else @@ -694,7 +717,7 @@ function sub_active() { [ -s ".tmp/subs_no_resolved.txt" ] && axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi echo $domain | dnsx -retry 3 -silent -r $resolvers_trusted 2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt - if [[ "$DEEP" = true ]]; then + if [[ $DEEP == true ]]; then cat .tmp/subdomains_tmp.txt | tlsx -san -cn -silent -ro -c $TLSX_THREADS -p $TLS_PORTS | anew -q .tmp/subdomains_tmp.txt else cat .tmp/subdomains_tmp.txt | tlsx -san -cn -silent -ro -c $TLSX_THREADS | anew -q .tmp/subdomains_tmp.txt @@ -705,14 +728,16 @@ function sub_active() { else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi + spinny::stop } function sub_noerror() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBNOERROR" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBNOERROR == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Checking NOERROR DNS response" if [[ $(echo "${RANDOM}thistotallynotexist${RANDOM}.$domain" | dnsx -r $resolvers -rcode noerror,nxdomain -retry 3 -silent | cut -d' ' -f2) == "[NXDOMAIN]" ]]; then resolvers_update_quick_local - if [[ "$DEEP" = true ]]; then + if [[ $DEEP == true ]]; then dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist_big | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null else dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null @@ -724,18 +749,20 @@ function sub_noerror() { printf "\n${yellow} Detected DNSSEC black lies, skipping this technique ${reset}\n" fi else - if [[ "$SUBBRUTE" = false ]]; then + if [[ $SUBBRUTE == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function sub_dns() { - if [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; then + spinny::start + if [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : DNS Subdomain Enumeration and PTR search" - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then [ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | dnsx -r $resolvers_trusted -a -aaaa -cname -ns -ptr -mx -soa -silent -retry 3 -json -o subdomains/subdomains_dnsregs.json 2>>"$LOGFILE" >/dev/null [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' 2>/dev/null | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subdomains_dns.txt [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | hakip2host | cut -d' ' -f 3 | unfurl -u domains | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subdomains_dns.txt @@ -757,14 +784,16 @@ function sub_dns() { else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi + spinny::stop } function sub_brute() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBBRUTE" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBBRUTE == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Bruteforce Subdomain Enumeration" - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then resolvers_update_quick_local - if [[ "$DEEP" = true ]]; then + if [[ $DEEP == true ]]; then puredns bruteforce $subs_wordlist_big $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else puredns bruteforce $subs_wordlist $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null @@ -772,7 +801,7 @@ function sub_brute() { [ -s ".tmp/subs_brute.txt" ] && puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else resolvers_update_quick_axiom - if [[ "$DEEP" = true ]]; then + if [[ $DEEP == true ]]; then axiom-scan $subs_wordlist_big -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else axiom-scan $subs_wordlist -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -783,28 +812,30 @@ function sub_brute() { NUMOFLINES=$(cat .tmp/subs_brute_valid.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) end_subfunc "${NUMOFLINES} new subs (bruteforce)" ${FUNCNAME[0]} else - if [[ "$SUBBRUTE" = false ]]; then + if [[ $SUBBRUTE == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function sub_scraping() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBSCRAPING" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBSCRAPING == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Source code scraping subdomain search" touch .tmp/scrap_subs.txt if [[ -s "$dir/subdomains/subdomains.txt" ]]; then - if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] || [[ "$DEEP" = true ]]; then - if [[ ! "$AXIOM" = true ]]; then + if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] || [[ $DEEP == true ]]; then + if [[ $AXIOM != true ]]; then resolvers_update_quick_local cat subdomains/subdomains.txt | httpx -follow-host-redirects -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - if [[ "$DEEP" = true ]]; then + if [[ $DEEP == true ]]; then [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null @@ -815,7 +846,7 @@ function sub_scraping() { [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - if [[ "$DEEP" = true ]]; then + if [[ $DEEP == true ]]; then [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 3 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 2 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -824,7 +855,7 @@ function sub_scraping() { sed -i '/^.\{2048\}./d' .tmp/katana.txt [ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | unfurl -u domains 2>>"$LOGFILE" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/scrap_subs.txt [ -s ".tmp/scrap_subs.txt" ] && puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null - if [[ "$INSCOPE" = true ]]; then + if [[ $INSCOPE == true ]]; then check_inscope .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" >/dev/null fi NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | sed '/^$/d' | wc -l) @@ -839,23 +870,25 @@ function sub_scraping() { end_subfunc "No subdomains to search (code scraping)" ${FUNCNAME[0]} fi else - if [[ "$SUBSCRAPING" = false ]]; then + if [[ $SUBSCRAPING == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function sub_analytics() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBANALYTICS" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBANALYTICS == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Analytics Subdomain Enumeration" if [[ -s ".tmp/probed_tmp_scrap.txt" ]]; then mkdir -p .tmp/output_analytics/ analyticsrelationships -ch <.tmp/probed_tmp_scrap.txt >>.tmp/analytics_subs_tmp.txt 2>>"$LOGFILE" [ -s ".tmp/analytics_subs_tmp.txt" ] && cat .tmp/analytics_subs_tmp.txt | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/|__ //" | anew -q .tmp/analytics_subs_clean.txt - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then resolvers_update_quick_local [ -s ".tmp/analytics_subs_clean.txt" ] && puredns resolve .tmp/analytics_subs_clean.txt -w .tmp/analytics_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else @@ -867,25 +900,27 @@ function sub_analytics() { NUMOFLINES=$(cat .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l) end_subfunc "${NUMOFLINES} new subs (analytics relationship)" ${FUNCNAME[0]} else - if [[ "$SUBANALYTICS" = false ]]; then + if [[ $SUBANALYTICS == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function sub_permut() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBPERMUTE" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBPERMUTE == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Permutations Subdomain Enumeration" - if [[ "$DEEP" = true ]] || [[ "$(cat subdomains/subdomains.txt | wc -l)" -le $DEEP_LIMIT ]]; then - if [[ "$PERMUTATIONS_OPTION" = "gotator" ]]; then + if [[ $DEEP == true ]] || [[ "$(cat subdomains/subdomains.txt | wc -l)" -le $DEEP_LIMIT ]]; then + if [[ $PERMUTATIONS_OPTION == "gotator" ]]; then [ -s "subdomains/subdomains.txt" ] && gotator -sub subdomains/subdomains.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt else [ -s "subdomains/subdomains.txt" ] && ripgen -d subdomains/subdomains.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt fi elif [[ "$(cat .tmp/subs_no_resolved.txt | wc -l)" -le $DEEP_LIMIT2 ]]; then - if [[ "$PERMUTATIONS_OPTION" = "gotator" ]]; then + if [[ $PERMUTATIONS_OPTION == "gotator" ]]; then [ -s ".tmp/subs_no_resolved.txt" ] && gotator -sub .tmp/subs_no_resolved.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt else [ -s ".tmp/subs_no_resolved.txt" ] && ripgen -d .tmp/subs_no_resolved.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt @@ -894,7 +929,7 @@ function sub_permut() { end_subfunc "Skipping Permutations: Too Many Subdomains" ${FUNCNAME[0]} return 1 fi - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then resolvers_update_quick_local [ -s ".tmp/gotator1.txt" ] && puredns resolve .tmp/gotator1.txt -w .tmp/permute1.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else @@ -902,13 +937,13 @@ function sub_permut() { [ -s ".tmp/gotator1.txt" ] && axiom-scan .tmp/gotator1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi - if [[ "$PERMUTATIONS_OPTION" = "gotator" ]]; then + if [[ $PERMUTATIONS_OPTION == "gotator" ]]; then [ -s ".tmp/permute1.txt" ] && gotator -sub .tmp/permute1.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2.txt else [ -s ".tmp/permute1.txt" ] && ripgen -d .tmp/permute1.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2.txt fi - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then [ -s ".tmp/gotator2.txt" ] && puredns resolve .tmp/gotator2.txt -w .tmp/permute2.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/gotator2.txt" ] && axiom-scan .tmp/gotator2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -924,16 +959,18 @@ function sub_permut() { fi end_subfunc "${NUMOFLINES} new subs (permutations)" ${FUNCNAME[0]} else - if [[ "$SUBPERMUTE" = false ]]; then + if [[ $SUBPERMUTE == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function sub_regex_permut() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBREGEXPERMUTE" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBREGEXPERMUTE == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Permutations by regex analysis" cd "${tools}/regulator" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" @@ -945,7 +982,7 @@ function sub_regex_permut() { exit 1 } - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then resolvers_update_quick_local [ -s ".tmp/${domain}.brute" ] && puredns resolve .tmp/${domain}.brute -w .tmp/regulator.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else @@ -962,20 +999,22 @@ function sub_regex_permut() { fi end_subfunc "${NUMOFLINES} new subs (permutations by regex)" ${FUNCNAME[0]} else - if [[ "$SUBREGEXPERMUTE" = false ]]; then + if [[ $SUBREGEXPERMUTE == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function sub_recursive_passive() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUB_RECURSIVE_PASSIVE" = true ]] && [[ -s "subdomains/subdomains.txt" ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUB_RECURSIVE_PASSIVE == true ]] && [[ -s "subdomains/subdomains.txt" ]]; then start_subfunc ${FUNCNAME[0]} "Running : Subdomains recursive search passive" # Passive recursive [ -s "subdomains/subdomains.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE >.tmp/subdomains_recurs_top.txt - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then resolvers_update_quick_local [ -s ".tmp/subdomains_recurs_top.txt" ] && timeout -k 1m ${AMASS_ENUM_TIMEOUT}m amass enum -passive -df .tmp/subdomains_recurs_top.txt -nf subdomains/subdomains.txt -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT 2>>"$LOGFILE" | anew -q .tmp/passive_recursive.txt [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null @@ -989,21 +1028,23 @@ function sub_recursive_passive() { NUMOFLINES=$(cat .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]} else - if [[ "$SUB_RECURSIVE_PASSIVE" = false ]]; then + if [[ $SUB_RECURSIVE_PASSIVE == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function sub_recursive_brute() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUB_RECURSIVE_BRUTE" = true ]] && [[ -s "subdomains/subdomains.txt" ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUB_RECURSIVE_BRUTE == true ]] && [[ -s "subdomains/subdomains.txt" ]]; then start_subfunc ${FUNCNAME[0]} "Running : Subdomains recursive search active" if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]]; then [ ! -s ".tmp/subdomains_recurs_top.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE >.tmp/subdomains_recurs_top.txt ripgen -d .tmp/subdomains_recurs_top.txt -w $subs_wordlist >.tmp/brute_recursive_wordlist.txt - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then resolvers_update_quick_local [ -s ".tmp/brute_recursive_wordlist.txt" ] && puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -w .tmp/brute_recursive_result.txt 2>>"$LOGFILE" >/dev/null else @@ -1012,25 +1053,25 @@ function sub_recursive_brute() { fi [ -s ".tmp/brute_recursive_result.txt" ] && cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt - if [[ "$PERMUTATIONS_OPTION" = "gotator" ]]; then + if [[ $PERMUTATIONS_OPTION == "gotator" ]]; then [ -s ".tmp/brute_recursive.txt" ] && gotator -sub .tmp/brute_recursive.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1_recursive.txt else [ -s ".tmp/brute_recursive.txt" ] && ripgen -d .tmp/brute_recursive.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1_recursive.txt fi - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then [ -s ".tmp/gotator1_recursive.txt" ] && puredns resolve .tmp/gotator1_recursive.txt -w .tmp/permute1_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/gotator1_recursive.txt" ] && axiom-scan .tmp/gotator1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi - if [[ "$PERMUTATIONS_OPTION" = "gotator" ]]; then + if [[ $PERMUTATIONS_OPTION == "gotator" ]]; then [ -s ".tmp/permute1_recursive.txt" ] && gotator -sub .tmp/permute1_recursive.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2_recursive.txt else [ -s ".tmp/permute1_recursive.txt" ] && ripgen -d .tmp/permute1_recursive.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2_recursive.txt fi - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then [ -s ".tmp/gotator2_recursive.txt" ] && puredns resolve .tmp/gotator2_recursive.txt -w .tmp/permute2_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/gotator2_recursive.txt" ] && axiom-scan .tmp/gotator2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1039,14 +1080,14 @@ function sub_recursive_brute() { else end_subfunc "skipped in this mode or defined in reconftw.cfg" ${FUNCNAME[0]} fi - if [[ "$INSCOPE" = true ]]; then + if [[ $INSCOPE == true ]]; then check_inscope .tmp/permute_recursive.txt 2>>"$LOGFILE" >/dev/null check_inscope .tmp/brute_recursive.txt 2>>"$LOGFILE" >/dev/null fi # Last validation cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/brute_perm_recursive.txt - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then [ -s ".tmp/brute_recursive.txt" ] && puredns resolve .tmp/brute_perm_recursive.txt -w .tmp/brute_perm_recursive_final.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/brute_recursive.txt" ] && axiom-scan .tmp/brute_perm_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_perm_recursive_final.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1055,20 +1096,22 @@ function sub_recursive_brute() { NUMOFLINES=$(cat .tmp/brute_perm_recursive_final.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l) end_subfunc "${NUMOFLINES} new subs (recursive active)" ${FUNCNAME[0]} else - if [[ "$SUB_RECURSIVE_BRUTE" = false ]]; then + if [[ $SUB_RECURSIVE_BRUTE == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function subtakeover() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SUBTAKEOVER" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBTAKEOVER == true ]]; then start_func ${FUNCNAME[0]} "Looking for possible subdomain and DNS takeover" touch .tmp/tko.txt [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then nuclei -update 2>>"$LOGFILE" >/dev/null cat subdomains/subdomains.txt .tmp/webs_all.txt 2>/dev/null | nuclei -silent -nh -tags takeover -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -t ${NUCLEI_TEMPLATES_PATH} -o .tmp/tko.txt else @@ -1083,21 +1126,23 @@ function subtakeover() { sed -i '/^$/d' .tmp/tko.txt NUMOFLINES=$(cat .tmp/tko.txt 2>>"$LOGFILE" | anew webs/takeover.txt | sed '/^$/d' | wc -l) - if [[ "$NUMOFLINES" -gt 0 ]]; then + if [[ $NUMOFLINES -gt 0 ]]; then notification "${NUMOFLINES} new possible takeovers found" info fi end_func "Results are saved in $domain/webs/takeover.txt" ${FUNCNAME[0]} else - if [[ "$SUBTAKEOVER" = false ]]; then + if [[ $SUBTAKEOVER == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function zonetransfer() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$ZONETRANSFER" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $ZONETRANSFER == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Zone transfer check" for ns in $(dig +short ns "$domain"); do dig axfr "$domain" @"$ns" >>subdomains/zonetransfer.txt; done if [[ -s "subdomains/zonetransfer.txt" ]]; then @@ -1105,25 +1150,27 @@ function zonetransfer() { fi end_func "Results are saved in $domain/subdomains/zonetransfer.txt" ${FUNCNAME[0]} else - if [[ "$ZONETRANSFER" = false ]]; then + if [[ $ZONETRANSFER == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [[ "$ZONETRANSFER" = false ]]; then + if [[ $ZONETRANSFER == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi fi + spinny::stop } function s3buckets() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$S3BUCKETS" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $S3BUCKETS == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "AWS S3 buckets search" # S3Scanner - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then [ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt else axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1134,28 +1181,29 @@ function s3buckets() { python3 ~/Tools/cloud_enum/cloud_enum.py -k $keyword -qs -l .tmp/output_cloud.txt 2>>"$LOGFILE" >/dev/null NUMOFLINES1=$(cat .tmp/output_cloud.txt 2>>"$LOGFILE" | sed '/^#/d' | sed '/^$/d' | anew subdomains/cloud_assets.txt | wc -l) - if [[ "$NUMOFLINES1" -gt 0 ]]; then + if [[ $NUMOFLINES1 -gt 0 ]]; then notification "${NUMOFLINES1} new cloud assets found" info fi NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -aiv "not_exist" | grep -aiv "Warning:" | grep -aiv "invalid_name" | grep -aiv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l) - if [[ "$NUMOFLINES2" -gt 0 ]]; then + if [[ $NUMOFLINES2 -gt 0 ]]; then notification "${NUMOFLINES2} new S3 buckets found" info fi end_func "Results are saved in subdomains/s3buckets.txt and subdomains/cloud_assets.txt" ${FUNCNAME[0]} else - if [[ "$S3BUCKETS" = false ]]; then + if [[ $S3BUCKETS == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [[ "$S3BUCKETS" = false ]]; then + if [[ $S3BUCKETS == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi fi + spinny::stop } ############################################################################################################### @@ -1163,9 +1211,10 @@ function s3buckets() { ############################################################################################################### function webprobe_simple() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WEBPROBESIMPLE" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WEBPROBESIMPLE == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Http probing $domain" - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then cat subdomains/subdomains.txt | httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt 2>>"$LOGFILE" >/dev/null else axiom-scan subdomains/subdomains.txt -m httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1177,24 +1226,26 @@ function webprobe_simple() { NUMOFLINES=$(cat .tmp/probed_tmp.txt 2>>"$LOGFILE" | anew webs/webs.txt | sed '/^$/d' | wc -l) cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt end_subfunc "${NUMOFLINES} new websites resolved" ${FUNCNAME[0]} - if [[ "$PROXY" = true ]] && [[ -n "$proxy_url" ]] && [[ $(cat webs/webs.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(cat webs/webs.txt | wc -l) -le $DEEP_LIMIT2 ]]; then notification "Sending websites to proxy" info ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null fi else - if [[ "$WEBPROBESIMPLE" = false ]]; then + if [[ $WEBPROBESIMPLE == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function webprobe_full() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WEBPROBEFULL" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WEBPROBEFULL == true ]]; then start_func ${FUNCNAME[0]} "Http probing non standard ports" if [[ -s "subdomains/subdomains.txt" ]]; then - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then if [[ -s "subdomains/subdomains.txt" ]]; then cat subdomains/subdomains.txt | httpx -follow-host-redirects -random-agent -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" >/dev/null fi @@ -1218,44 +1269,48 @@ function webprobe_full() { [ -s "webs/webs_uncommon_ports.txt" ] && cat webs/webs_uncommon_ports.txt cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" ${FUNCNAME[0]} - if [[ "$PROXY" = true ]] && [[ -n "$proxy_url" ]] && [[ $(cat webs/webs_uncommon_ports.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(cat webs/webs_uncommon_ports.txt | wc -l) -le $DEEP_LIMIT2 ]]; then notification "Sending websites with uncommon ports to proxy" info ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null fi else - if [[ "$WEBPROBEFULL" = false ]]; then + if [[ $WEBPROBEFULL == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function screenshot() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WEBSCREENSHOT" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WEBSCREENSHOT == true ]]; then start_func ${FUNCNAME[0]} "Web Screenshots" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt num_lines=$(wc -l <.tmp/webs_all.txt) dynamic_gowitness_timeout=$(expr $num_lines \* $GOWITNESS_TIMEOUT_PER_SITE) - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then [ -s ".tmp/webs_all.txt" ] && timeout -k 1m ${dynamic_gowitness_timeout}s gowitness file -f .tmp/webs_all.txt -t $GOWITNESS_THREADS $GOWITNESS_FLAGS 2>>"$LOGFILE" else timeout -k 1m ${dynamic_gowitness_timeout}s axiom-scan .tmp/webs_all.txt -m gowitness -t $GOWITNESS_THREADS $GOWITNESS_FLAGS -o screenshots $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi end_func "Results are saved in $domain/screenshots folder" ${FUNCNAME[0]} else - if [[ "$WEBSCREENSHOT" = false ]]; then + if [[ $WEBSCREENSHOT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function virtualhosts() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$VIRTUALHOSTS" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $VIRTUALHOSTS == true ]]; then start_func ${FUNCNAME[0]} "Virtual Hosts dicovery" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt if [[ -s ".tmp/webs_all.txt" ]]; then @@ -1271,12 +1326,13 @@ function virtualhosts() { end_func "No $domain/web/webs.txts file found, virtualhosts skipped " ${FUNCNAME[0]} fi else - if [[ "$VIRTUALHOSTS" = false ]]; then + if [[ $VIRTUALHOSTS == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } ############################################################################################################### @@ -1284,7 +1340,8 @@ function virtualhosts() { ############################################################################################################### function favicon() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$FAVICON" = true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $FAVICON == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Favicon Ip Lookup" cd "${tools}/fav-up" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}" @@ -1304,22 +1361,24 @@ function favicon() { } end_func "Results are saved in hosts/favicontest.txt" ${FUNCNAME[0]} else - if [[ "$FAVICON" = false ]]; then + if [[ $FAVICON == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then return else - if [[ "$FAVICON" = false ]]; then + if [[ $FAVICON == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi fi + spinny::stop } function portscan() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$PORTSCANNER" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $PORTSCANNER == true ]]; then start_func ${FUNCNAME[0]} "Port scan" if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try . | "\(.host) \(.a[0])"' | anew -q .tmp/subs_ips.txt @@ -1333,11 +1392,11 @@ function portscan() { printf "${bblue}\n Resolved IP addresses (No CDN) ${reset}\n\n" [ -s ".tmp/ips_nocdn.txt" ] && cat .tmp/ips_nocdn.txt | sort printf "${bblue}\n Scanning ports... ${reset}\n\n" - if [[ "$PORTSCAN_PASSIVE" = true ]] && [[ ! -f "hosts/portscan_passive.txt" ]] && [[ -s ".tmp/ips_nocdn.txt" ]]; then + if [[ $PORTSCAN_PASSIVE == true ]] && [[ ! -f "hosts/portscan_passive.txt" ]] && [[ -s ".tmp/ips_nocdn.txt" ]]; then smap -iL .tmp/ips_nocdn.txt >hosts/portscan_passive.txt fi - if [[ "$PORTSCAN_ACTIVE" = true ]]; then - if [[ ! "$AXIOM" = true ]]; then + if [[ $PORTSCAN_ACTIVE == true ]]; then + if [[ $AXIOM != true ]]; then [ -s ".tmp/ips_nocdn.txt" ] && $SUDO nmap --top-ports 200 -sV -n --max-retries 2 -Pn --open --script vulners -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx --top-ports 200 -sV -n -Pn --open --max-retries 2 --script vulners -oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1345,27 +1404,30 @@ function portscan() { fi end_func "Results are saved in hosts/portscan_[passive|active].txt" ${FUNCNAME[0]} else - if [[ "$PORTSCANNER" = false ]]; then + if [[ $PORTSCANNER == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function cdnprovider() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$CDN_IP" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CDN_IP == true ]]; then start_func ${FUNCNAME[0]} "CDN provider check" [ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try . | .a[]' | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u >.tmp/ips_cdn.txt [ -s ".tmp/ips_cdn.txt" ] && cat .tmp/ips_cdn.txt | cdncheck -silent -resp -nc | anew -q $dir/hosts/cdn_providers.txt end_func "Results are saved in hosts/cdn_providers.txt" ${FUNCNAME[0]} else - if [[ "$CDN_IP" = false ]]; then + if [[ $CDN_IP == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } ############################################################################################################### @@ -1373,11 +1435,12 @@ function cdnprovider() { ############################################################################################################### function waf_checks() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WAF_DETECTION" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WAF_DETECTION == true ]]; then start_func ${FUNCNAME[0]} "Website's WAF detection" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt if [[ -s ".tmp/webs_all.txt" ]]; then - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then wafw00f -i .tmp/webs_all.txt -o .tmp/wafs.txt 2>>"$LOGFILE" >/dev/null else axiom-scan .tmp/webs_all.txt -m wafw00f -o .tmp/wafs.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1394,22 +1457,24 @@ function waf_checks() { end_func "No websites to scan" ${FUNCNAME[0]} fi else - if [[ "$WAF_DETECTION" = false ]]; then + if [[ $WAF_DETECTION == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function nuclei_check() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$NUCLEICHECK" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $NUCLEICHECK == true ]]; then start_func ${FUNCNAME[0]} "Templates based web scanner" nuclei -update 2>>"$LOGFILE" >/dev/null mkdir -p nuclei_output [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt [ ! -s ".tmp/webs_subs.txt" ] && cat subdomains/subdomains.txt .tmp/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt - if [[ ! "$AXIOM" = true ]]; then # avoid globbing (expansion of *). + if [[ $AXIOM != true ]]; then # avoid globbing (expansion of *). IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY" for crit in "${severity_array[@]}"; do printf "${yellow}\n Running : Nuclei $crit ${reset}\n\n" @@ -1429,21 +1494,23 @@ function nuclei_check() { fi end_func "Results are saved in $domain/nuclei_output folder" ${FUNCNAME[0]} else - if [[ "$NUCLEICHECK" = false ]]; then + if [[ $NUCLEICHECK == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function fuzz() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$FUZZ" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $FUZZ == true ]]; then start_func ${FUNCNAME[0]} "Web directory fuzzing" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt if [[ -s ".tmp/webs_all.txt" ]]; then mkdir -p $dir/fuzzing $dir/.tmp/fuzzing - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then interlace -tL .tmp/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -o _output_/_cleantarget_.json" -o $dir/.tmp/fuzzing 2>>"$LOGFILE" >/dev/null for sub in $(cat .tmp/webs_all.txt); do sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') @@ -1466,16 +1533,18 @@ function fuzz() { end_func "No $domain/web/webs.txts file found, fuzzing skipped " ${FUNCNAME[0]} fi else - if [[ "$FUZZ" = false ]]; then + if [[ $FUZZ == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function cms_scanner() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$CMS_SCANNER" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CMS_SCANNER == true ]]; then start_func ${FUNCNAME[0]} "CMS Scanner" mkdir -p $dir/cms && rm -rf $dir/cms/* [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt @@ -1483,11 +1552,11 @@ function cms_scanner() { tr '\n' ',' <.tmp/webs_all.txt >.tmp/cms.txt 2>>"$LOGFILE" timeout -k 1m ${CMSSCAN_TIMEOUT}s python3 ${tools}/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r &>>"$LOGFILE" exit_status=$? - if [[ "${exit_status}" -eq 125 ]]; then + if [[ ${exit_status} -eq 125 ]]; then echo "TIMEOUT cmseek.py - investigate manually for $dir" >>"$LOGFILE" end_func "TIMEOUT cmseek.py - investigate manually for $dir" ${FUNCNAME[0]} return - elif [[ "${exit_status}" -ne 0 ]]; then + elif [[ ${exit_status} -ne 0 ]]; then echo "ERROR cmseek.py - investigate manually for $dir" >>"$LOGFILE" end_func "ERROR cmseek.py - investigate manually for $dir" ${FUNCNAME[0]} return @@ -1495,7 +1564,7 @@ function cms_scanner() { for sub in $(cat .tmp/webs_all.txt); do sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') cms_id=$(cat ${tools}/CMSeeK/Result/${sub_out}/cms.json 2>/dev/null | jq -r 'try .cms_id') - if [[ -z "$cms_id" ]]; then + if [[ -z $cms_id ]]; then rm -rf ${tools}/CMSeeK/Result/${sub_out} else mv -f ${tools}/CMSeeK/Result/${sub_out} $dir/cms/ 2>>"$LOGFILE" @@ -1506,37 +1575,39 @@ function cms_scanner() { end_func "No $domain/web/webs.txts file found, cms scanner skipped" ${FUNCNAME[0]} fi else - if [[ "$CMS_SCANNER" = false ]]; then + if [[ $CMS_SCANNER == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function urlchecks() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$URL_CHECK" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $URL_CHECK == true ]]; then start_func ${FUNCNAME[0]} "URL Extraction" mkdir -p js [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt if [[ -s ".tmp/webs_all.txt" ]]; then - if [[ ! "$AXIOM" = true ]]; then - if [[ "$URL_CHECK_PASSIVE" = true ]]; then - if [[ "$DEEP" = true ]]; then + if [[ $AXIOM != true ]]; then + if [[ $URL_CHECK_PASSIVE == true ]]; then + if [[ $DEEP == true ]]; then cat .tmp/webs_all.txt | unfurl -u domains >.tmp/waymore_input.txt python3 ${tools}/waymore/waymore.py -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null else cat .tmp/webs_all.txt | gau --threads $GAU_THREADS | anew -q .tmp/url_extract_tmp.txt fi - if [[ -s "${GITHUB_TOKENS}" ]]; then + if [[ -s ${GITHUB_TOKENS} ]]; then github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt fi fi diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt 2>>"$LOGFILE") <(sort -u .tmp/webs_all.txt 2>>"$LOGFILE") | wc -l) if [[ $diff_webs != "0" ]] || [[ ! -s ".tmp/katana.txt" ]]; then - if [[ "$URL_CHECK_ACTIVE" = true ]]; then - if [[ "$DEEP" = true ]]; then + if [[ $URL_CHECK_ACTIVE == true ]]; then + if [[ $DEEP == true ]]; then katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null else katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null @@ -1544,22 +1615,22 @@ function urlchecks() { fi fi else - if [[ "$URL_CHECK_PASSIVE" = true ]]; then - if [[ "$DEEP" = true ]]; then + if [[ $URL_CHECK_PASSIVE == true ]]; then + if [[ $DEEP == true ]]; then cat .tmp/webs_all.txt | unfurl -u domains >.tmp/waymore_input.txt axiom-scan .tmp/waymore_input.txt -m waymore -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else axiom-scan .tmp/webs_all.txt -m gau -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi - if [[ -s "${GITHUB_TOKENS}" ]]; then + if [[ -s ${GITHUB_TOKENS} ]]; then github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt fi fi diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u .tmp/webs_all.txt) | wc -l) if [[ $diff_webs != "0" ]] || [[ ! -s ".tmp/katana.txt" ]]; then - if [[ "$URL_CHECK_ACTIVE" = true ]]; then - if [[ "$DEEP" = true ]]; then + if [[ $URL_CHECK_ACTIVE == true ]]; then + if [[ $DEEP == true ]]; then axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 3 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 2 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1570,7 +1641,7 @@ function urlchecks() { [ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt [ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | anew -q .tmp/url_extract_tmp.txt [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep -aEi "\.(js)" | anew -q .tmp/url_extract_js.txt - if [[ "$DEEP" = true ]]; then + if [[ $DEEP == true ]]; then [ -s ".tmp/url_extract_js.txt" ] && interlace -tL .tmp/url_extract_js.txt -threads 10 -c "python3 ${tools}/JSA/jsa.py -f target | anew -q .tmp/url_extract_tmp.txt" &>/dev/null fi [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt @@ -1578,22 +1649,24 @@ function urlchecks() { NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | sed '/^$/d' | wc -l) notification "${NUMOFLINES} new urls with params" info end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} - if [[ "$PROXY" = true ]] && [[ -n "$proxy_url" ]] && [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then notification "Sending urls to proxy" info ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null fi fi else - if [[ "$URL_CHECK" = false ]]; then + if [[ $URL_CHECK == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function url_gf() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$URL_GF" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $URL_GF == true ]]; then start_func ${FUNCNAME[0]} "Vulnerable Pattern Search" mkdir -p gf if [[ -s "webs/url_extract.txt" ]]; then @@ -1610,16 +1683,18 @@ function url_gf() { fi end_func "Results are saved in $domain/gf folder" ${FUNCNAME[0]} else - if [[ "$URL_GF" = false ]]; then + if [[ $URL_GF == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function url_ext() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$URL_EXT" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $URL_EXT == true ]]; then if [[ -s ".tmp/url_extract_tmp.txt" ]]; then start_func ${FUNCNAME[0]} "Urls by extension" ext=("7z" "achee" "action" "adr" "apk" "arj" "ascx" "asmx" "asp" "aspx" "axd" "backup" "bak" "bat" "bin" "bkf" "bkp" "bok" "cab" "cer" "cfg" "cfm" "cfml" "cgi" "cnf" "conf" "config" "cpl" "crt" "csr" "csv" "dat" "db" "dbf" "deb" "dmg" "dmp" "doc" "docx" "drv" "email" "eml" "emlx" "env" "exe" "gadget" "gz" "html" "ica" "inf" "ini" "iso" "jar" "java" "jhtml" "json" "jsp" "key" "log" "lst" "mai" "mbox" "mbx" "md" "mdb" "msg" "msi" "nsf" "ods" "oft" "old" "ora" "ost" "pac" "passwd" "pcf" "pdf" "pem" "pgp" "php" "php3" "php4" "php5" "phtm" "phtml" "pkg" "pl" "plist" "pst" "pwd" "py" "rar" "rb" "rdp" "reg" "rpm" "rtf" "sav" "sh" "shtm" "shtml" "skr" "sql" "swf" "sys" "tar" "tar.gz" "tmp" "toast" "tpl" "txt" "url" "vcd" "vcf" "wml" "wpd" "wsdl" "wsf" "xls" "xlsm" "xlsx" "xml" "xsd" "yaml" "yml" "z" "zip") @@ -1634,20 +1709,22 @@ function url_ext() { end_func "Results are saved in $domain/webs/urls_by_ext.txt" ${FUNCNAME[0]} fi else - if [[ "$URL_EXT" = false ]]; then + if [[ $URL_EXT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function jschecks() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$JSCHECKS" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $JSCHECKS == true ]]; then start_func ${FUNCNAME[0]} "Javascript Scan" if [[ -s ".tmp/url_extract_js.txt" ]]; then printf "${yellow} Running : Fetching Urls 1/5${reset}\n" - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then cat .tmp/url_extract_js.txt | subjs -ua "Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" -c 40 | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subjslinks.txt else axiom-scan .tmp/url_extract_js.txt -m subjs -o .tmp/subjslinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1656,7 +1733,7 @@ function jschecks() { [ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | grep -iE "\.js($|\?)" | anew -q .tmp/url_extract_js.txt cat .tmp/url_extract_js.txt | python3 ${tools}/urless/urless/urless.py | anew -q js/url_extract_js.txt 2>>"$LOGFILE" >/dev/null printf "${yellow} Running : Resolving JS Urls 2/5${reset}\n" - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then [ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -status-code -content-type -retries 2 -no-color | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt else [ -s "js/url_extract_js.txt" ] && axiom-scan js/url_extract_js.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -content-type -retries 2 -no-color -o .tmp/js_livelinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1670,7 +1747,7 @@ function jschecks() { cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt fi printf "${yellow} Running : Gathering secrets 4/5${reset}\n" - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then [ -s "js/js_livelinks.txt" ] && cat js/js_livelinks.txt | Mantra -ua ${HEADER} -s | anew -q js/js_secrets.txt else [ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua \"${HEADER}\" -s -o js/js_secrets.txt $AXIOM_EXTRA_ARGS &>/dev/null @@ -1683,16 +1760,18 @@ function jschecks() { end_func "No JS urls found for $domain, function skipped" ${FUNCNAME[0]} fi else - if [[ "$JSCHECKS" = false ]]; then + if [[ $JSCHECKS == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function wordlist_gen() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WORDLIST" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WORDLIST == true ]]; then start_func ${FUNCNAME[0]} "Wordlist generation" if [[ -s ".tmp/url_extract_tmp.txt" ]]; then cat .tmp/url_extract_tmp.txt | unfurl -u keys 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_params.txt @@ -1702,21 +1781,23 @@ function wordlist_gen() { [ -s ".tmp/js_endpoints.txt" ] && cat .tmp/js_endpoints.txt | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q webs/all_paths.txt [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q webs/all_paths.txt end_func "Results are saved in $domain/webs/dict_[words|paths].txt" ${FUNCNAME[0]} - if [[ "$PROXY" = true ]] && [[ -n "$proxy_url" ]] && [[ $(cat webs/all_paths.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(cat webs/all_paths.txt | wc -l) -le $DEEP_LIMIT2 ]]; then notification "Sending urls to proxy" info ffuf -mc all -w webs/all_paths.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null fi else - if [[ "$WORDLIST" = false ]]; then + if [[ $WORDLIST == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function wordlist_gen_roboxtractor() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$ROBOTSWORDLIST" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $ROBOTSWORDLIST == true ]]; then start_func ${FUNCNAME[0]} "Robots wordlist generation" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt if [[ -s ".tmp/webs_all.txt" ]]; then @@ -1724,27 +1805,30 @@ function wordlist_gen_roboxtractor() { fi end_func "Results are saved in $domain/webs/robots_wordlist.txt" ${FUNCNAME[0]} else - if [[ "$ROBOTSWORDLIST" = false ]]; then + if [[ $ROBOTSWORDLIST == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function password_dict() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$PASSWORD_DICT" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $PASSWORD_DICT == true ]]; then start_func ${FUNCNAME[0]} "Password dictionary generation" word=${domain%%.*} python3 ${tools}/pydictor/pydictor.py -extend $word --leet 0 1 2 11 21 --len ${PASSWORD_MIN_LENGTH} ${PASSWORD_MAX_LENGTH} -o webs/password_dict.txt 2>>"$LOGFILE" >/dev/null end_func "Results are saved in $domain/webs/password_dict.txt" ${FUNCNAME[0]} else - if [[ "$PASSWORD_DICT" = false ]]; then + if [[ $PASSWORD_DICT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } ############################################################################################################### @@ -1752,12 +1836,13 @@ function password_dict() { ############################################################################################################### function brokenLinks() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$BROKENLINKS" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $BROKENLINKS == true ]]; then start_func ${FUNCNAME[0]} "Broken links checks" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [[ ! "$AXIOM" = true ]]; then + if [[ $AXIOM != true ]]; then if [[ ! -s ".tmp/katana.txt" ]]; then - if [[ "$DEEP" = true ]]; then + if [[ $DEEP == true ]]; then [ -s ".tmp/webs_all.txt" ] && katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/webs_all.txt" ] && katana -silent -list .tmp/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null @@ -1766,7 +1851,7 @@ function brokenLinks() { [ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt else if [[ ! -s ".tmp/katana.txt" ]]; then - if [[ "$DEEP" = true ]]; then + if [[ $DEEP == true ]]; then [ -s ".tmp/webs_all.txt" ] && axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 3 -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else [ -s ".tmp/webs_all.txt" ] && axiom-scan .tmp/webs_all.txt -m katana -jc -kf all -d 2 -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1779,21 +1864,23 @@ function brokenLinks() { notification "${NUMOFLINES} new broken links found" info end_func "Results are saved in vulns/brokenLinks.txt" ${FUNCNAME[0]} else - if [[ "$BROKENLINKS" = false ]]; then + if [[ $BROKENLINKS == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function xss() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$XSS" = true ]] && [[ -s "gf/xss.txt" ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $XSS == true ]] && [[ -s "gf/xss.txt" ]]; then start_func ${FUNCNAME[0]} "XSS Analysis" [ -s "gf/xss.txt" ] && cat gf/xss.txt | qsreplace FUZZ | sed '/FUZZ/!d' | Gxss -c 100 -p Xss | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/xss_reflected.txt - if [[ ! "$AXIOM" = true ]]; then - if [[ "$DEEP" = true ]]; then - if [[ -n "$XSS_SERVER" ]]; then + if [[ $AXIOM != true ]]; then + if [[ $DEEP == true ]]; then + if [[ -n $XSS_SERVER ]]; then [ -s ".tmp/xss_reflected.txt" ] && cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --only-poc r --ignore-return 302,404,403 --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" @@ -1801,7 +1888,7 @@ function xss() { fi else if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le $DEEP_LIMIT ]]; then - if [[ -n "$XSS_SERVER" ]]; then + if [[ -n $XSS_SERVER ]]; then cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --skip-bav --skip-mining-dom --skip-mining-dict --only-poc r --ignore-return 302,404,403 -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" @@ -1812,8 +1899,8 @@ function xss() { fi fi else - if [[ "$DEEP" = true ]]; then - if [[ -n "$XSS_SERVER" ]]; then + if [[ $DEEP == true ]]; then + if [[ -n $XSS_SERVER ]]; then [ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" @@ -1821,7 +1908,7 @@ function xss() { fi else if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le $DEEP_LIMIT ]]; then - if [[ -n "$XSS_SERVER" ]]; then + if [[ -n $XSS_SERVER ]]; then axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null else printf "${yellow}\n No XSS_SERVER defined, blind xss skipped\n\n" @@ -1834,7 +1921,7 @@ function xss() { fi end_func "Results are saved in vulns/xss.txt" ${FUNCNAME[0]} else - if [[ "$XSS" = false ]]; then + if [[ $XSS == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ ! -s "gf/xss.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to XSS ${reset}\n\n" @@ -1842,27 +1929,31 @@ function xss() { printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function cors() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$CORS" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CORS == true ]]; then start_func ${FUNCNAME[0]} "CORS Scan" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt [ -s ".tmp/webs_all.txt" ] && python3 ${tools}/Corsy/corsy.py -i .tmp/webs_all.txt -o vulns/cors.txt 2>>"$LOGFILE" >/dev/null end_func "Results are saved in vulns/cors.txt" ${FUNCNAME[0]} else - if [[ "$CORS" = false ]]; then + if [[ $CORS == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function open_redirect() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$OPEN_REDIRECT" = true ]] && [[ -s "gf/redirect.txt" ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $OPEN_REDIRECT == true ]] && [[ -s "gf/redirect.txt" ]]; then start_func ${FUNCNAME[0]} "Open redirects checks" - if [[ "$DEEP" = true ]] || [[ $(cat gf/redirect.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ $DEEP == true ]] || [[ $(cat gf/redirect.txt | wc -l) -le $DEEP_LIMIT ]]; then cat gf/redirect.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_redirect.txt python3 ${tools}/Oralyzer/oralyzer.py -l .tmp/tmp_redirect.txt -p ${tools}/Oralyzer/payloads.txt >vulns/redirect.txt sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" vulns/redirect.txt @@ -1872,7 +1963,7 @@ function open_redirect() { printf "${bgreen}#######################################################################${reset}\n" fi else - if [[ "$OPEN_REDIRECT" = false ]]; then + if [[ $OPEN_REDIRECT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ ! -s "gf/redirect.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to Open Redirect ${reset}\n\n" @@ -1880,12 +1971,14 @@ function open_redirect() { printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function ssrf_checks() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SSRF_CHECKS" = true ]] && [[ -s "gf/ssrf.txt" ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SSRF_CHECKS == true ]] && [[ -s "gf/ssrf.txt" ]]; then start_func ${FUNCNAME[0]} "SSRF checks" - if [[ -z "$COLLAB_SERVER" ]]; then + if [[ -z $COLLAB_SERVER ]]; then interactsh-client &>.tmp/ssrf_callback.txt & sleep 2 COLLAB_SERVER_FIX="FFUFHASH.$(cat .tmp/ssrf_callback.txt | tail -n1 | cut -c 16-)" @@ -1895,7 +1988,7 @@ function ssrf_checks() { COLLAB_SERVER_FIX="FFUFHASH.$(echo ${COLLAB_SERVER} | sed -r "s/https?:\/\///")" INTERACT=false fi - if [[ "$DEEP" = true ]] || [[ $(cat gf/ssrf.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ $DEEP == true ]] || [[ $(cat gf/ssrf.txt | wc -l) -le $DEEP_LIMIT ]]; then cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_FIX} | anew -q .tmp/tmp_ssrf.txt cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_URL} | anew -q .tmp/tmp_ssrf.txt ffuf -v -H "${HEADER}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -w .tmp/tmp_ssrf.txt -u FUZZ 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf_requested_url.txt @@ -1910,7 +2003,7 @@ function ssrf_checks() { fi pkill -f interactsh-client & else - if [[ "$SSRF_CHECKS" = false ]]; then + if [[ $SSRF_CHECKS == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ ! -s "gf/ssrf.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to SSRF ${reset}\n\n" @@ -1918,33 +2011,37 @@ function ssrf_checks() { printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function crlf_checks() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$CRLF_CHECKS" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CRLF_CHECKS == true ]]; then start_func ${FUNCNAME[0]} "CRLF checks" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [[ "$DEEP" = true ]] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ $DEEP == true ]] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then crlfuzz -l .tmp/webs_all.txt -o vulns/crlf.txt 2>>"$LOGFILE" >/dev/null end_func "Results are saved in vulns/crlf.txt" ${FUNCNAME[0]} else end_func "Skipping CRLF: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [[ "$CRLF_CHECKS" = false ]]; then + if [[ $CRLF_CHECKS == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function lfi() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$LFI" = true ]] && [[ -s "gf/lfi.txt" ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $LFI == true ]] && [[ -s "gf/lfi.txt" ]]; then start_func ${FUNCNAME[0]} "LFI checks" if [[ -s "gf/lfi.txt" ]]; then cat gf/lfi.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_lfi.txt - if [[ "$DEEP" = true ]] || [[ $(cat .tmp/tmp_lfi.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ $DEEP == true ]] || [[ $(cat .tmp/tmp_lfi.txt | wc -l) -le $DEEP_LIMIT ]]; then interlace -tL .tmp/tmp_lfi.txt -threads ${INTERLACE_THREADS} -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${lfi_wordlist} -u \"_target_\" -mr \"root:\" " 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt end_func "Results are saved in vulns/lfi.txt" ${FUNCNAME[0]} else @@ -1952,7 +2049,7 @@ function lfi() { fi fi else - if [[ "$LFI" = false ]]; then + if [[ $LFI == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ ! -s "gf/lfi.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to LFI ${reset}\n\n" @@ -1960,14 +2057,16 @@ function lfi() { printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function ssti() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SSTI" = true ]] && [[ -s "gf/ssti.txt" ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SSTI == true ]] && [[ -s "gf/ssti.txt" ]]; then start_func ${FUNCNAME[0]} "SSTI checks" if [[ -s "gf/ssti.txt" ]]; then cat gf/ssti.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_ssti.txt - if [[ "$DEEP" = true ]] || [[ $(cat .tmp/tmp_ssti.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ $DEEP == true ]] || [[ $(cat .tmp/tmp_ssti.txt | wc -l) -le $DEEP_LIMIT ]]; then interlace -tL .tmp/tmp_ssti.txt -threads ${INTERLACE_THREADS} -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${ssti_wordlist} -u \"_target_\" -mr \"ssti49\" " 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt end_func "Results are saved in vulns/ssti.txt" ${FUNCNAME[0]} else @@ -1975,7 +2074,7 @@ function ssti() { fi fi else - if [[ "$SSTI" = false ]]; then + if [[ $SSTI == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ ! -s "gf/ssti.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to SSTI ${reset}\n\n" @@ -1983,18 +2082,20 @@ function ssti() { printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function sqli() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SQLI" = true ]] && [[ -s "gf/sqli.txt" ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SQLI == true ]] && [[ -s "gf/sqli.txt" ]]; then start_func ${FUNCNAME[0]} "SQLi checks" cat gf/sqli.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_sqli.txt - if [[ "$DEEP" = true ]] || [[ $(cat .tmp/tmp_sqli.txt | wc -l) -le $DEEP_LIMIT ]]; then - if [[ "$SQLMAP" = true ]]; then + if [[ $DEEP == true ]] || [[ $(cat .tmp/tmp_sqli.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ $SQLMAP == true ]]; then python3 ${tools}/sqlmap/sqlmap.py -m .tmp/tmp_sqli.txt -b -o --smart --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap 2>>"$LOGFILE" >/dev/null fi - if [[ "$GHAURI" = true ]]; then + if [[ $GHAURI == true ]]; then interlace -tL .tmp/tmp_sqli.txt -threads ${INTERLACE_THREADS} -c "ghauri -u _target_ --batch -H \"${HEADER}\" --force-ssl >> vulns/ghauri_log.txt" 2>>"$LOGFILE" >/dev/null fi end_func "Results are saved in vulns/sqlmap folder" ${FUNCNAME[0]} @@ -2002,7 +2103,7 @@ function sqli() { end_func "Skipping SQLi: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [[ "$SQLI" = false ]]; then + if [[ $SQLI == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ ! -s "gf/sqli.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to SQLi ${reset}\n\n" @@ -2010,15 +2111,16 @@ function sqli() { printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function test_ssl() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$TEST_SSL" = true ]]; then + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $TEST_SSL == true ]]; then start_func ${FUNCNAME[0]} "SSL Test" ${tools}/testssl.sh/testssl.sh --quiet --color 0 -U -iL hosts/ips.txt 2>>"$LOGFILE" >vulns/testssl.txt end_func "Results are saved in vulns/testssl.txt" ${FUNCNAME[0]} else - if [[ "$TEST_SSL" = false ]]; then + if [[ $TEST_SSL == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -2027,7 +2129,8 @@ function test_ssl() { } function spraying() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SPRAY" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SPRAY == true ]]; then start_func ${FUNCNAME[0]} "Password spraying" cd "${tools}/brutespray" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" @@ -2040,26 +2143,28 @@ function spraying() { } end_func "Results are saved in vulns/brutespray folder" ${FUNCNAME[0]} else - if [[ "$SPRAY" = false ]]; then + if [[ $SPRAY == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function command_injection() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$COMM_INJ" = true ]] && [[ -s "gf/rce.txt" ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $COMM_INJ == true ]] && [[ -s "gf/rce.txt" ]]; then start_func ${FUNCNAME[0]} "Command Injection checks" [ -s "gf/rce.txt" ] && cat gf/rce.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_rce.txt - if [[ "$DEEP" = true ]] || [[ $(cat .tmp/tmp_rce.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ $DEEP == true ]] || [[ $(cat .tmp/tmp_rce.txt | wc -l) -le $DEEP_LIMIT ]]; then [ -s ".tmp/tmp_rce.txt" ] && python3 ${tools}/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection.txt 2>>"$LOGFILE" >/dev/null end_func "Results are saved in vulns/command_injection folder" ${FUNCNAME[0]} else end_func "Skipping Command injection: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [[ "$COMM_INJ" = false ]]; then + if [[ $COMM_INJ == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" elif [[ ! -s "gf/rce.txt" ]]; then printf "\n${yellow} ${FUNCNAME[0]} No URLs potentially vulnerables to Command Injection ${reset}\n\n" @@ -2067,11 +2172,13 @@ function command_injection() { printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function 4xxbypass() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$BYPASSER4XX" = true ]]; then - if [[ $(cat fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 | wc -l) -le 1000 ]] || [[ "$DEEP" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $BYPASSER4XX == true ]]; then + if [[ $(cat fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 | wc -l) -le 1000 ]] || [[ $DEEP == true ]]; then start_func "403 bypass" cat $dir/fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 >$dir/.tmp/403test.txt cd "${tools}/byp4xx" || { @@ -2089,18 +2196,20 @@ function 4xxbypass() { notification "Too many urls to bypass, skipping" warn fi else - if [[ "$BYPASSER4XX" = false ]]; then + if [[ $BYPASSER4XX == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function prototype_pollution() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$PROTO_POLLUTION" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $PROTO_POLLUTION == true ]]; then start_func ${FUNCNAME[0]} "Prototype Pollution checks" - if [[ "$DEEP" = true ]] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ $DEEP == true ]] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT ]]; then [ -s "webs/url_extract.txt" ] && ppfuzz -l webs/url_extract.txt -c $PPFUZZ_THREADS 2>/dev/null | anew -q .tmp/prototype_pollution.txt [ -s ".tmp/prototype_pollution.txt" ] && cat .tmp/prototype_pollution.txt | sed -e '1,8d' | sed '/^\[ERR/d' | anew -q vulns/prototype_pollution.txt end_func "Results are saved in vulns/prototype_pollution.txt" ${FUNCNAME[0]} @@ -2108,19 +2217,21 @@ function prototype_pollution() { end_func "Skipping Prototype Pollution: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [[ "$PROTO_POLLUTION" = false ]]; then + if [[ $PROTO_POLLUTION == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function smuggling() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$SMUGGLING" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SMUGGLING == true ]]; then start_func ${FUNCNAME[0]} "HTTP Request Smuggling checks" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [[ "$DEEP" = true ]] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ $DEEP == true ]] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then cd "${tools}/smuggler" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" exit 1 @@ -2136,19 +2247,21 @@ function smuggling() { end_func "Skipping Prototype Pollution: Too many webs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [[ "$SMUGGLING" = false ]]; then + if [[ $SMUGGLING == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function webcache() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$WEBCACHE" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WEBCACHE == true ]]; then start_func ${FUNCNAME[0]} "Web Cache Poisoning checks" [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt - if [[ "$DEEP" = true ]] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then + if [[ $DEEP == true ]] || [[ $(cat .tmp/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then cd "${tools}/Web-Cache-Vulnerability-Scanner" || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" exit 1 @@ -2164,19 +2277,21 @@ function webcache() { end_func "Web Cache Poisoning: Too many webs to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [[ "$WEBCACHE" = false ]]; then + if [[ $WEBCACHE == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } function fuzzparams() { - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ "$DIFF" = true ]]; } && [[ "$FUZZPARAMS" = true ]]; then + spinny::start + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $FUZZPARAMS == true ]]; then start_func ${FUNCNAME[0]} "Fuzzing params values checks" - if [[ "$DEEP" = true ]] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then - if [[ ! "$AXIOM" = true ]]; then + if [[ $DEEP == true ]] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + if [[ $AXIOM != true ]]; then nuclei -update 2>>"$LOGFILE" >/dev/null git -C ${tools}/fuzzing-templates pull cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t ${tools}/fuzzing-templates -o .tmp/fuzzparams.txt @@ -2190,12 +2305,13 @@ function fuzzparams() { end_func "Fuzzing params values: Too many entries to test, try with --deep flag" ${FUNCNAME[0]} fi else - if [[ "$FUZZPARAMS" = false ]]; then + if [[ $FUZZPARAMS == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" fi fi + spinny::stop } ############################################################################################################### @@ -2203,7 +2319,7 @@ function fuzzparams() { ############################################################################################################### function deleteOutScoped() { - if [[ -s "$1" ]]; then + if [[ -s $1 ]]; then cat $1 | while read outscoped; do if grep -q "^[*]" <<<$outscoped; then outscoped="${outscoped:1}" @@ -2231,7 +2347,7 @@ function getElapsedTime { function zipSnedOutputFolder { zip_name1=$(date +"%Y_%m_%d-%H.%M.%S") zip_name="${zip_name1}_${domain}.zip" 2>>"$LOGFILE" >/dev/null - (cd "$dir" && zip -r "$zip_name" .) + (cd "$dir" && zip -r "$zip_name" .) 2>>"$LOGFILE" >/dev/null echo "Sending zip file "${dir}/${zip_name}"" if [[ -s "${dir}/$zip_name" ]]; then @@ -2266,7 +2382,7 @@ function remove_big_files() { } function notification() { - if [[ -n "$1" ]] && [[ -n "$2" ]]; then + if [[ -n $1 ]] && [[ -n $2 ]]; then case $2 in info) text="\n${bblue} ${1} ${reset}" @@ -2296,11 +2412,11 @@ function transfer { if tty -s; then file="$1" file_name=$(basename "$file") - if [[ ! -e "$file" ]]; then + if [[ ! -e $file ]]; then echo "$file: No such file or directory" >&2 return 1 fi - if [[ -d "$file" ]]; then + if [[ -d $file ]]; then file_name="$file_name.zip" (cd "$file" && zip -r -q - .) | curl --progress-bar --upload-file "-" "https://transfer.sh/$file_name" | tee /dev/null else @@ -2378,9 +2494,10 @@ function check_inscope() { } function resolvers_update() { - if [[ "$generate_resolvers" = true ]]; then - if [[ ! "$AXIOM" = true ]]; then - if [[ ! -s "$resolvers" ]] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then + spinny::start + if [[ $generate_resolvers == true ]]; then + if [[ $AXIOM != true ]]; then + if [[ ! -s $resolvers ]] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then notification "Resolvers seem older than 1 day\n Generating custom resolvers..." warn eval rm -f $resolvers 2>>"$LOGFILE" dnsvalidator -tL https://public-dns.info/nameservers.txt -threads $DNSVALIDATOR_THREADS -o $resolvers 2>>"$LOGFILE" >/dev/null @@ -2402,17 +2519,18 @@ function resolvers_update() { generate_resolvers=false else - if [[ ! -s "$resolvers" ]] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then + if [[ ! -s $resolvers ]] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then notification "Resolvers seem older than 1 day\n Downloading new resolvers..." warn wget -q -O - ${resolvers_url} >$resolvers wget -q -O - ${resolvers_trusted_url} >$resolvers_trusted notification "Resolvers updated\n" good fi fi + spinny::stop } function resolvers_update_quick_local() { - if [[ "$update_resolvers" = true ]]; then + if [[ $update_resolvers == true ]]; then wget -q -O - ${resolvers_url} >$resolvers wget -q -O - ${resolvers_trusted_url} >$resolvers_trusted fi @@ -2436,7 +2554,7 @@ function ipcidr_target() { list=${PWD}/target_reconftw_ipcidr.txt fi fi - if [[ -n "$2" ]]; then + if [[ -n $2 ]]; then cat $list | anew -q $2 sed -i '/\/[0-9]*$/d' $2 fi @@ -2445,7 +2563,7 @@ function ipcidr_target() { function axiom_lauch() { # let's fire up a FLEET! - if [[ "$AXIOM_FLEET_LAUNCH" = true ]] && [[ -n "$AXIOM_FLEET_NAME" ]] && [[ -n "$AXIOM_FLEET_COUNT" ]]; then + if [[ $AXIOM_FLEET_LAUNCH == true ]] && [[ -n $AXIOM_FLEET_NAME ]] && [[ -n $AXIOM_FLEET_COUNT ]]; then start_func ${FUNCNAME[0]} "Launching our Axiom fleet" python3 -m pip install --upgrade linode-cli 2>>"$LOGFILE" >/dev/null # Check to see if we have a fleet already, if so, SKIP THIS! @@ -2466,7 +2584,7 @@ function axiom_lauch() { echo "axiom-fleet ${AXIOM_FLEET_NAME} ${AXIOM_ARGS}" axiom-fleet ${AXIOM_FLEET_NAME} ${AXIOM_ARGS} axiom-select "$AXIOM_FLEET_NAME*" - if [[ -n "$AXIOM_POST_START" ]]; then + if [[ -n $AXIOM_POST_START ]]; then eval "$AXIOM_POST_START" 2>>"$LOGFILE" >/dev/null fi @@ -2478,9 +2596,9 @@ function axiom_lauch() { } function axiom_shutdown() { - if [[ "$AXIOM_FLEET_LAUNCH" = true ]] && [[ "$AXIOM_FLEET_SHUTDOWN" = true ]] && [[ -n "$AXIOM_FLEET_NAME" ]]; then + if [[ $AXIOM_FLEET_LAUNCH == true ]] && [[ $AXIOM_FLEET_SHUTDOWN == true ]] && [[ -n $AXIOM_FLEET_NAME ]]; then #if [[ "$mode" == "subs_menu" ]] || [[ "$mode" == "list_recon" ]] || [[ "$mode" == "passive" ]] || [[ "$mode" == "all" ]]; then - if [[ "$mode" == "subs_menu" ]] || [[ "$mode" == "passive" ]] || [[ "$mode" == "all" ]]; then + if [[ $mode == "subs_menu" ]] || [[ $mode == "passive" ]] || [[ $mode == "all" ]]; then notification "Automatic Axiom fleet shutdown is not enabled in this mode" info return fi @@ -2507,7 +2625,7 @@ function start() { global_start=$(date +%s) - if [[ "$NOTIFICATION" = true ]]; then + if [[ $NOTIFICATION == true ]]; then NOTIFY="notify -silent" else NOTIFY="" @@ -2517,16 +2635,16 @@ function start() { notification "Recon succesfully started on ${domain}" good [ "$SOFT_NOTIFICATION" = true ] && echo "Recon succesfully started on ${domain}" | notify -silent printf "${bgreen}#######################################################################${reset}\n" - if [[ "$upgrade_before_running" = true ]]; then + if [[ $upgrade_before_running == true ]]; then ${SCRIPTPATH}/install.sh --tools fi tools_installed #[[ -n "$domain" ]] && ipcidr_target $domain - if [[ -z "$domain" ]]; then - if [[ -n "$list" ]]; then - if [[ -z "$domain" ]]; then + if [[ -z $domain ]]; then + if [[ -n $list ]]; then + if [[ -z $domain ]]; then domain="Multi" dir="${SCRIPTPATH}/Recon/$domain" called_fn_dir="$dir"/.called_fn @@ -2542,12 +2660,12 @@ function start() { called_fn_dir="$dir"/.called_fn fi - if [[ -z "$domain" ]]; then + if [[ -z $domain ]]; then notification "\n\n${bred} No domain or list provided ${reset}\n\n" error exit fi - if [[ ! -d "$called_fn_dir" ]]; then + if [[ ! -d $called_fn_dir ]]; then mkdir -p "$called_fn_dir" fi mkdir -p "$dir" @@ -2555,8 +2673,8 @@ function start() { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" exit 1 } - if [[ "$AXIOM" = true ]]; then - if [[ -n "$domain" ]]; then + if [[ $AXIOM == true ]]; then + if [[ -n $domain ]]; then echo "$domain" | anew -q target.txt list="${dir}/target.txt" fi @@ -2580,27 +2698,27 @@ function end() { echo "End $(date +"%F") $(date +"%T")" >>"${LOGFILE}" - if [[ ! "$PRESERVE" = true ]]; then + if [[ $PRESERVE != true ]]; then find $dir -type f -empty | grep -v "called_fn" | xargs rm -f 2>>"$LOGFILE" >/dev/null find $dir -type d -empty | grep -v "called_fn" | xargs rm -rf 2>>"$LOGFILE" >/dev/null fi - if [[ "$REMOVETMP" = true ]]; then + if [[ $REMOVETMP == true ]]; then rm -rf $dir/.tmp fi - if [[ "$REMOVELOG" = true ]]; then + if [[ $REMOVELOG == true ]]; then rm -rf $dir/.log fi - if [[ -n "$dir_output" ]]; then + if [[ -n $dir_output ]]; then output finaldir=$dir_output else finaldir=$dir fi #Zip the output folder and send it via tg/discord/slack - if [[ "$SENDZIPNOTIFY" = true ]]; then + if [[ $SENDZIPNOTIFY == true ]]; then zipSnedOutputFolder fi global_end=$(date +%s) @@ -2634,7 +2752,7 @@ function passive() { SUBREGEXPERMUTE=false SUB_RECURSIVE_BRUTE=false WEBPROBESIMPLE=false - if [[ "$AXIOM" = true ]]; then + if [[ $AXIOM == true ]]; then axiom_lauch axiom_selected fi @@ -2646,7 +2764,7 @@ function passive() { PORTSCAN_ACTIVE=false portscan - if [[ "$AXIOM" = true ]]; then + if [[ $AXIOM == true ]]; then axiom_shutdown fi @@ -2673,7 +2791,7 @@ function osint() { } function vulns() { - if [[ "$VULNS_GENERAL" = true ]]; then + if [[ $VULNS_GENERAL == true ]]; then cors open_redirect ssrf_checks @@ -2698,7 +2816,7 @@ function multi_osint() { global_start=$(date +%s) - if [[ "$NOTIFICATION" = true ]]; then + if [[ $NOTIFICATION == true ]]; then NOTIFY="notify -silent" else NOTIFY="" @@ -2706,7 +2824,7 @@ function multi_osint() { #[[ -n "$domain" ]] && ipcidr_target $domain - if [[ -s "$list" ]]; then + if [[ -s $list ]]; then sed -i 's/\r$//' $list targets=$(cat $list) else @@ -2775,7 +2893,7 @@ function recon() { zonetransfer favicon - if [[ "$AXIOM" = true ]]; then + if [[ $AXIOM == true ]]; then axiom_lauch axiom_selected fi @@ -2795,7 +2913,7 @@ function recon() { urlchecks jschecks - if [[ "$AXIOM" = true ]]; then + if [[ $AXIOM == true ]]; then axiom_shutdown fi @@ -2811,7 +2929,7 @@ function multi_recon() { global_start=$(date +%s) - if [[ "$NOTIFICATION" = true ]]; then + if [[ $NOTIFICATION == true ]]; then NOTIFY="notify -silent" else NOTIFY="" @@ -2819,7 +2937,7 @@ function multi_recon() { #[[ -n "$domain" ]] && ipcidr_target $domain - if [[ -s "$list" ]]; then + if [[ -s $list ]]; then sed -i 's/\r$//' $list targets=$(cat $list) else @@ -2877,7 +2995,7 @@ function multi_recon() { getElapsedTime $loopstart $loopend printf "${bgreen}#######################################################################${reset}\n" printf "${bgreen} $domain finished 1st loop in ${runtime} $currently ${reset}\n" - if [[ -n "$flist" ]]; then + if [[ -n $flist ]]; then POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" fi @@ -2888,7 +3006,7 @@ function multi_recon() { exit 1 } - if [[ "$AXIOM" = true ]]; then + if [[ $AXIOM == true ]]; then axiom_lauch axiom_selected fi @@ -2914,7 +3032,7 @@ function multi_recon() { getElapsedTime $loopstart $loopend printf "${bgreen}#######################################################################${reset}\n" printf "${bgreen} $domain finished 2nd loop in ${runtime} $currently ${reset}\n" - if [[ -n "$flist" ]]; then + if [[ -n $flist ]]; then POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" fi @@ -2969,14 +3087,14 @@ function multi_recon() { getElapsedTime $loopstart $loopend printf "${bgreen}#######################################################################${reset}\n" printf "${bgreen} $domain finished 3rd loop in ${runtime} $currently ${reset}\n" - if [[ -n "$flist" ]]; then + if [[ -n $flist ]]; then POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" fi printf "${bgreen}#######################################################################${reset}\n" done - if [[ "$AXIOM" = true ]]; then + if [[ $AXIOM == true ]]; then axiom_shutdown fi @@ -2999,7 +3117,7 @@ function multi_recon() { getElapsedTime $loopstart $loopend printf "${bgreen}#######################################################################${reset}\n" printf "${bgreen} $domain finished final loop in ${runtime} $currently ${reset}\n" - if [[ -n "$flist" ]]; then + if [[ -n $flist ]]; then POSINLIST=$(eval grep -nrE "^$domain$" "$flist" | cut -f1 -d':') printf "\n${yellow} $domain is $POSINLIST of $LISTTOTAL${reset}\n" fi @@ -3017,7 +3135,7 @@ function multi_recon() { function subs_menu() { start - if [[ "$AXIOM" = true ]]; then + if [[ $AXIOM == true ]]; then axiom_lauch axiom_selected fi @@ -3031,7 +3149,7 @@ function subs_menu() { zonetransfer s3buckets - if [[ "$AXIOM" = true ]]; then + if [[ $AXIOM == true ]]; then axiom_shutdown fi @@ -3106,78 +3224,21 @@ function help() { printf " \n" printf " ${byellow}Run custom function:${reset}\n" printf " ./reconftw.sh -d example.com -c nuclei_check \n" - printf " \n" - printf " ${byellow}Start the web server:${reset}\n" - printf " ./reconftw.sh --web-server start\n" - printf " \n" - printf " ${byellow}Stop the web server:${reset}\n" - printf " ./reconftw.sh --web-server stop\n" -} - -############################################################################################################### -############################################# WEB SERVER ###################################################### -############################################################################################################### - -# webserver initialization, thanks @lur1el, @d3vchac, @mx61tt and @dd4n1b0y <3 - -function webserver() { - printf "${bgreen} Web Interface by @lur1el, @d3vchac, @mx61tt and @dd4n1b0y ${reset}\n" - ver=$(python3 -V 2>&1 | sed 's/.* \([0-9]\).\([0-9]\).*/\1\2/') - - if [[ "$ver" -lt "31" ]]; then - echo "The web interface requires python 3.10 or greater" - exit 1 - fi - - if [[ "$1" == "start" ]]; then - ipAddress=$(curl -s ifconfig.me) - - if [[ "$ipAddress" != "" ]]; then - printf "\n ${bblue}Starting web server... ${reset}\n" - cd ${SCRIPTPATH}/web || { - echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}" - exit 1 - } - $SUDO source ${SCRIPTPATH}/web/.venv/bin/activate - $SUDO screen -S ReconftwWebserver -X kill &>/dev/null - $SUDO screen -dmS ReconftwWebserver python3 manage.py runserver $ipAddress:8001 &>/dev/null - $SUDO service redis-server start &>/dev/null - $SUDO screen -S ReconftwCelery -X kill &>/dev/null - $SUDO screen -dmS ReconftwCelery python3 -m celery -A web worker -l info -P prefork -Q run_scans,default &>/dev/null - printf " ${bblue}Web server started! ${reset}\n" - printf " ${bblue}Service Address: http://$ipAddress:8001${reset}\n" - else - printf "\n" - printf " ${red}Server IP address not found.${reset}\n" - printf "\n" - printf " ${bblue}Check if the server has internet connection.${reset}\n" - fi - elif [[ "$1" == "stop" ]]; then - printf "\n ${bblue}Stoping web server... ${reset}\n" - # $SUDO service postgresql stop - $SUDO screen -S ReconftwWebserver -X kill &>/dev/null - $SUDO service redis-server stop &>/dev/null - $SUDO screen -S ReconftwCelery -X kill &>/dev/null - printf " ${bblue}Web server stoped! ${reset}\n" - else - printf "\n" - printf " ${red}Invalid action${reset}\n" - printf "\n" - printf " ${bblue}Valid actions: start/stop${reset}\n" - fi } ############################################################################################################### ########################################### START SCRIPT ##################################################### ############################################################################################################### +source assets/spinny/spinny.sh + # macOS PATH initialization, thanks @0xtavian <3 if [[ $OSTYPE == "darwin"* ]]; then PATH="/usr/local/opt/gnu-getopt/bin:$PATH" PATH="/usr/local/opt/coreutils/libexec/gnubin:$PATH" fi -PROGARGS=$(getopt -o 'd:m:l:x:i:o:f:q:c:rspanwvh::' --long 'domain:,list:,recon,subdomains,passive,all,web,osint,deep,web-server,help,vps' -n 'reconFTW' -- "$@") +PROGARGS=$(getopt -o 'd:m:l:x:i:o:f:q:c:rspanvh::' --long 'domain:,list:,recon,subdomains,passive,all,web,osint,deep,help,vps' -n 'reconFTW' -- "$@") # Note the quotes around "$PROGARGS": they are essential! eval set -- "$PROGARGS" @@ -3291,12 +3352,6 @@ while true; do shift break ;; - '--web-server') - . ./reconftw.cfg - banner - webserver $3 - exit 1 - ;; '--help' | '-h' | *) # echo "Unknown argument: $1" . ./reconftw.cfg @@ -3317,7 +3372,8 @@ SCRIPTPATH="$( echo "Error importing reconftw.ctg" exit 1 } -if [[ -s "$CUSTOM_CONFIG" ]]; then + +if [[ -s $CUSTOM_CONFIG ]]; then # shellcheck source=/home/six2dez/Tools/reconftw/custom_config.cfg . "${CUSTOM_CONFIG}" || { echo "Error importing reconftw.ctg" @@ -3335,17 +3391,17 @@ if [[ $rate_limit ]]; then HTTPX_RATELIMIT=$rate_limit fi -if [[ -n "$outOfScope_file" ]]; then +if [[ -n $outOfScope_file ]]; then isAsciiText $outOfScope_file - if [[ "False" = "$IS_ASCII" ]]; then + if [[ "False" == "$IS_ASCII" ]]; then printf "\n\n${bred} Out of Scope file is not a text file${reset}\n\n" exit fi fi -if [[ -n "$inScope_file" ]]; then +if [[ -n $inScope_file ]]; then isAsciiText $inScope_file - if [[ "False" = "$IS_ASCII" ]]; then + if [[ "False" == "$IS_ASCII" ]]; then printf "\n\n${bred} In Scope file is not a text file${reset}\n\n" exit fi @@ -3364,7 +3420,7 @@ banner check_version startdir=${PWD} -if [[ -n "$list" ]]; then +if [[ -n $list ]]; then if [[ $list == ./* ]]; then flist="${startdir}/${list:2}" elif [[ $list == ~* ]]; then @@ -3380,15 +3436,15 @@ fi case $opt_mode in 'r') - if [[ -n "$multi" ]]; then - if [[ "$AXIOM" = true ]]; then + if [[ -n $multi ]]; then + if [[ $AXIOM == true ]]; then mode="multi_recon" fi multi_recon exit fi - if [[ -n "$list" ]]; then - if [[ "$AXIOM" = true ]]; then + if [[ -n $list ]]; then + if [[ $AXIOM == true ]]; then mode="list_recon" fi sed -i 's/\r$//' $list @@ -3398,7 +3454,7 @@ case $opt_mode in end done else - if [[ "$AXIOM" = true ]]; then + if [[ $AXIOM == true ]]; then mode="recon" fi start @@ -3407,8 +3463,8 @@ case $opt_mode in fi ;; 's') - if [[ -n "$list" ]]; then - if [[ "$AXIOM" = true ]]; then + if [[ -n $list ]]; then + if [[ $AXIOM == true ]]; then mode="subs_menu" fi sed -i 's/\r$//' $list @@ -3420,8 +3476,8 @@ case $opt_mode in fi ;; 'p') - if [[ -n "$list" ]]; then - if [[ "$AXIOM" = true ]]; then + if [[ -n $list ]]; then + if [[ $AXIOM == true ]]; then mode="passive" fi sed -i 's/\r$//' $list @@ -3434,8 +3490,8 @@ case $opt_mode in ;; 'a') export VULNS_GENERAL=true - if [[ -n "$list" ]]; then - if [[ "$AXIOM" = true ]]; then + if [[ -n $list ]]; then + if [[ $AXIOM == true ]]; then mode="all" fi sed -i 's/\r$//' $list @@ -3447,7 +3503,7 @@ case $opt_mode in fi ;; 'w') - if [[ -n "$list" ]]; then + if [[ -n $list ]]; then start if [[ $list == /* ]]; then cp $list $dir/webs/webs.txt @@ -3463,11 +3519,11 @@ case $opt_mode in ;; 'n') PRESERVE=true - if [[ -n "$multi" ]]; then + if [[ -n $multi ]]; then multi_osint exit fi - if [[ -n "$list" ]]; then + if [[ -n $list ]]; then sed -i 's/\r$//' $list while IFS= read -r domain; do start From 37e2c165edcfb7ef3baaa9930a29bb8cec2904e2 Mon Sep 17 00:00:00 2001 From: six2dez Date: Tue, 14 Nov 2023 11:26:36 +0100 Subject: [PATCH 08/17] push --- .gitignore | 1 + .gitmodules | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) delete mode 100644 .gitmodules diff --git a/.gitignore b/.gitignore index ecd020f9..72333214 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ Recon/ output/ .obsidian/ +test/ #Ignoring compressed files *.tar diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index ad953d50..00000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "assets/spinny"] - path = assets/spinny - url = https://github.com/hschne/spinny From cd2e3ce34c45f57c6e7f2a1ca562b51f7332bcc6 Mon Sep 17 00:00:00 2001 From: six2dez Date: Tue, 14 Nov 2023 11:29:11 +0100 Subject: [PATCH 09/17] push --- assets/spinny | 1 - 1 file changed, 1 deletion(-) delete mode 160000 assets/spinny diff --git a/assets/spinny b/assets/spinny deleted file mode 160000 index 2e0a8cca..00000000 --- a/assets/spinny +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2e0a8cca7e49a0d16262939c2a1c5f57719224b5 From 98ebb907673e7a77f99c987c46885fff90c6ca7f Mon Sep 17 00:00:00 2001 From: six2dez Date: Tue, 14 Nov 2023 11:29:57 +0100 Subject: [PATCH 10/17] fix spinny --- assets/spinny/spinny.sh | 88 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 88 insertions(+) create mode 100755 assets/spinny/spinny.sh diff --git a/assets/spinny/spinny.sh b/assets/spinny/spinny.sh new file mode 100755 index 00000000..9e83f7a7 --- /dev/null +++ b/assets/spinny/spinny.sh @@ -0,0 +1,88 @@ +#!/usr/bin/env bash + +declare __spinny__spinner_pid + +declare -a __spinny__frames=() + +spinny::start() { + tput civis + spinny::_spinner & + __spinny__spinner_pid=$! +} + +spinny::stop() { + [[ -z "$__spinny__spinner_pid" ]] && return 0 + + kill -9 "$__spinny__spinner_pid" + # Use conditional to avoid exiting the program immediatly + wait "$__spinny__spinner_pid" 2>/dev/null || true +} + +spinny::_spinner() { + local delay=${SPINNY_DELAY:-0.3} + spinny::_load_frames + spinny::_pad_frames + while : + do + for frame in "${__spinny__frames[@]}" + do + # After rendering each frame the cursor is reset to + # the previous position so that the next frame can + # overwrite it + tput sc + printf "%b" "$frame" + tput rc + sleep "$delay" + done + done +} + +spinny::_pad_frames() { + # Frames with different lengths need to be padded + # for a smooth animation. We calculate the maximum + # size of all frames and pad all smaller ones with + # white space. + local max_length + max_length=$(spinny::_max_framelength) + local array_length=${#__spinny__frames[@]} + for (( i=0; c max)) && max=$len + done + echo "$max" +} + +spinny::_load_frames() { + # Load custom frames if any or fall back on the default animation + if [[ -z $SPINNY_FRAMES ]]; then + __spinny__frames=(- "\\" "|" /) + else + __spinny__frames=("${SPINNY_FRAMES[@]}") + fi +} + +spinny::_finish(){ + # Make sure to remove variables and make the cursor visible again + unset __spinny__spinner_pid + unset __spinny__frames + tput cnorm +} + +trap spinny::_finish EXIT + + From 0502caa244e6cc62bcc4dabad65f5128af00d912 Mon Sep 17 00:00:00 2001 From: six2dez Date: Thu, 16 Nov 2023 10:47:08 +0100 Subject: [PATCH 11/17] Spinny fix --- assets/spinny/spinny.sh | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/assets/spinny/spinny.sh b/assets/spinny/spinny.sh index 9e83f7a7..a174f6ce 100755 --- a/assets/spinny/spinny.sh +++ b/assets/spinny/spinny.sh @@ -5,7 +5,6 @@ declare __spinny__spinner_pid declare -a __spinny__frames=() spinny::start() { - tput civis spinny::_spinner & __spinny__spinner_pid=$! } @@ -16,6 +15,7 @@ spinny::stop() { kill -9 "$__spinny__spinner_pid" # Use conditional to avoid exiting the program immediatly wait "$__spinny__spinner_pid" 2>/dev/null || true + printf "\r\033[K" } spinny::_spinner() { @@ -26,12 +26,7 @@ spinny::_spinner() { do for frame in "${__spinny__frames[@]}" do - # After rendering each frame the cursor is reset to - # the previous position so that the next frame can - # overwrite it - tput sc - printf "%b" "$frame" - tput rc + printf "\r\033[K%s" "$frame" sleep "$delay" done done @@ -45,7 +40,7 @@ spinny::_pad_frames() { local max_length max_length=$(spinny::_max_framelength) local array_length=${#__spinny__frames[@]} - for (( i=0; c Date: Sun, 3 Dec 2023 22:10:26 +0100 Subject: [PATCH 12/17] Get geoinfo of IPs --- reconftw.sh | 68 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/reconftw.sh b/reconftw.sh index e2b37c89..1335debc 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -13,6 +13,13 @@ function banner() { printf "\n ${reconftw_version} by @six2dez${reset}\n" } +function test_connectivity(){ + if nc -zw1 google.com 443 2>/dev/null; then + echo -e "${lgray}Connection: ${lgreen}OK${reset}" + else + echo -e "${lred}[!] Please check your internet connection and then try again...${reset}";exit 1 + fi +} ############################################################################################################### ################################################### TOOLS ##################################################### ############################################################################################################### @@ -1206,6 +1213,67 @@ function s3buckets() { spinny::stop } + +############################################################################################################### +############################################# GEOLOCALIZATION INFO ####################################################### +############################################################################################################### + + +function geo_info(){ + if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; }; then + start_func ${FUNCNAME[0]} "Running: ipinfo via ipapi.co" + ips_file="${dir}/hosts/ips.txt" + if [ ! -f $ips_file ]; then + echo "File ${dir}/hosts/ips.txt does not exist." + else + for ip in $(cat "$ips_file"); do + json_output=$(curl -s https://ipapi.co/$ip/json) + echo $json_output >> ${dir}/hosts/geoip.json + ip=$(echo $json_output| jq '.ip' | tr -d '''"''') + network=$(echo $json_output| jq '.network' | tr -d '''"''') + city=$(echo $json_output| jq '.city' | tr -d '''"''') + region=$(echo $json_output| jq '.region' | tr -d '''"''') + country=$(echo $json_output| jq '.country' | tr -d '''"''') + country_name=$(echo $json_output| jq '.country_name' | tr -d '''"''') + country_code=$(echo $json_output| jq '.country_code' | tr -d '''"''') + country_code_iso3=$(echo $json_output| jq '.country_code_iso3' | tr -d '''"''') + country_tld=$(echo $json_output| jq '.country_tld' | tr -d '''"''') + continent_code=$(echo $json_output| jq '.continent_code' | tr -d '''"''') + latitude=$(echo $json_output| jq '.latitude' | tr -d '''"''') + longitude=$(echo $json_output| jq '.longitude' | tr -d '''"''') + timezone=$(echo $json_output| jq '.timezone' | tr -d '''"''') + utc_offset=$(echo $json_output| jq '.utc_offset' | tr -d '''"''') + asn=$(echo $json_output| jq '.asn' | tr -d '''"''') + org=$(echo $json_output| jq '.org' | tr -d '''"''') + + echo "IP: $ip" >> ${dir}/hosts/geoip.txt + echo "Network: $network" >> ${dir}/hosts/geoip.txt + echo "City: $city" >> ${dir}/hosts/geoip.txt + echo "Region: $region" >> ${dir}/hosts/geoip.txt + echo "Country: $country" >> ${dir}/hosts/geoip.txt + echo "Country Name: $country_name" >> ${dir}/hosts/geoip.txt + echo "Country Code: $country_code" >> ${dir}/hosts/geoip.txt + echo "Country Code ISO3: $country_code_iso3" >> ${dir}/hosts/geoip.txt + echo "Country tld: $country_tld" >> ${dir}/hosts/geoip.txt + echo "Continent Code: $continent_code" >> ${dir}/hosts/geoip.txt + echo "Latitude: $latitude" >> ${dir}/hosts/geoip.txt + echo "Longitude: $longitude" >> ${dir}/hosts/geoip.txt + echo "Timezone: $timezone" >> ${dir}/hosts/geoip.txt + echo "UTC Offset: $utc_offset" >> ${dir}/hosts/geoip.txt + echo "ASN: $asn" >> ${dir}/hosts/geoip.txt + echo "ORG: $org" >> ${dir}/hosts/geoip.txt + echo -e "------------------------------\n" >> ${dir}/hosts/geoip.txt + done + fi + end_func "Results are saved in hosts/geoip.txt and hosts/geoip.json" ${FUNCNAME[0]} + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi +} + + + + ############################################################################################################### ########################################### WEB DETECTION ##################################################### ############################################################################################################### From 875f3b9ef38fb38ba33ae5083b1ef77c443413e4 Mon Sep 17 00:00:00 2001 From: Lorenzo Camilli <80099484+lorenzocamilli@users.noreply.github.com> Date: Sun, 3 Dec 2023 22:16:07 +0100 Subject: [PATCH 13/17] geo_info function call --- reconftw.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/reconftw.sh b/reconftw.sh index 1335debc..b594e6f1 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1459,6 +1459,7 @@ function portscan() { [ -s "hosts/ips.txt" ] && comm -23 <(cat hosts/ips.txt | sort -u) <(cat hosts/cdn_providers.txt | cut -d'[' -f1 | sed 's/[[:space:]]*$//' | sort -u) | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u | anew -q .tmp/ips_nocdn.txt printf "${bblue}\n Resolved IP addresses (No CDN) ${reset}\n\n" [ -s ".tmp/ips_nocdn.txt" ] && cat .tmp/ips_nocdn.txt | sort + geo_info printf "${bblue}\n Scanning ports... ${reset}\n\n" if [[ $PORTSCAN_PASSIVE == true ]] && [[ ! -f "hosts/portscan_passive.txt" ]] && [[ -s ".tmp/ips_nocdn.txt" ]]; then smap -iL .tmp/ips_nocdn.txt >hosts/portscan_passive.txt From 1d4833efb98f7dad30f48b9702c4f810c1e42bcf Mon Sep 17 00:00:00 2001 From: Lorenzo Camilli <80099484+lorenzocamilli@users.noreply.github.com> Date: Thu, 7 Dec 2023 10:49:36 +0100 Subject: [PATCH 14/17] Get vulns and open prots using shodan db --- reconftw.sh | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/reconftw.sh b/reconftw.sh index b594e6f1..01e29362 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1461,6 +1461,30 @@ function portscan() { [ -s ".tmp/ips_nocdn.txt" ] && cat .tmp/ips_nocdn.txt | sort geo_info printf "${bblue}\n Scanning ports... ${reset}\n\n" + ips_file="${dir}/hosts/ips.txt" + if [ "$PORTSCAN_PASSIVE" = true ] ; then + if [ ! -f $ips_file ]; then + echo "File $ips_file does not exist." + else + start_subfunc "Running : Shodan to check for open ports " + for cip in $(cat "$ips_file"); do + json_result=$(curl -s https://internetdb.shodan.io/${cip}) + json_array+=("$json_result") + done + formatted_json="[" + for ((i=0; i<${#json_array[@]}; i++)); do + formatted_json+="$(echo ${json_array[i]} | tr -d '\n')" + if [ $i -lt $((${#json_array[@]}-1)) ]; then + formatted_json+=", " + fi + done + formatted_json+="]" + echo "$formatted_json" > "${dir}/hosts/shodan_results.json" + fi + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + end_func "Results are saved in hosts/shodan_results.json" ${FUNCNAME[0]} if [[ $PORTSCAN_PASSIVE == true ]] && [[ ! -f "hosts/portscan_passive.txt" ]] && [[ -s ".tmp/ips_nocdn.txt" ]]; then smap -iL .tmp/ips_nocdn.txt >hosts/portscan_passive.txt fi From 863f2d22cff34c8cb44e65961e44243921d4060c Mon Sep 17 00:00:00 2001 From: Krishna Agarwal <85845881+Kr1shna4garwal@users.noreply.github.com> Date: Mon, 18 Dec 2023 21:41:45 +0530 Subject: [PATCH 15/17] Update install.sh Mantra -> mantra (Causing error in installation) --- install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install.sh b/install.sh index 9a7090f1..c04f60f7 100755 --- a/install.sh +++ b/install.sh @@ -69,7 +69,7 @@ gotools["subfinder"]="go install -v github.com/projectdiscovery/subfinder/v2/cmd gotools["byp4xx"]="go install -v github.com/lobuhi/byp4xx@latest" gotools["hakip2host"]="go install -v github.com/hakluke/hakip2host@latest" gotools["gau"]="go install -v github.com/lc/gau/v2/cmd/gau@latest" -gotools["Mantra"]="go install -v github.com/MrEmpy/Mantra@latest" +gotools["Mantra"]="go install -v github.com/MrEmpy/mantra@latest" gotools["crt"]="go install -v github.com/cemulus/crt@latest" gotools["s3scanner"]="go install -v github.com/sa7mon/s3scanner@latest" From a9e819ad6e0d6d11434d36af3627f8d373018433 Mon Sep 17 00:00:00 2001 From: six2dez Date: Wed, 17 Jan 2024 12:57:38 +0100 Subject: [PATCH 16/17] New dev version no web --- assets/potential.json | 55 - assets/spinny/spinny.sh | 108 +- install.sh | 698 +++--- reconftw.sh | 117 +- requirements.txt | 2 +- web/Makefile | 13 - web/README.md | 87 - web/apikeys/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 131 -> 0 bytes web/apikeys/__pycache__/apps.cpython-310.pyc | Bin 412 -> 0 bytes .../__pycache__/config.cpython-310.pyc | Bin 4546 -> 0 bytes .../__pycache__/models.cpython-310.pyc | Bin 169 -> 0 bytes web/apikeys/__pycache__/urls.cpython-310.pyc | Bin 296 -> 0 bytes web/apikeys/__pycache__/views.cpython-310.pyc | Bin 6162 -> 0 bytes web/apikeys/apps.py | 6 - web/apikeys/config.py | 204 -- web/apikeys/migrations/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 142 -> 0 bytes web/apikeys/models.py | 3 - web/apikeys/tests.py | 3 - web/apikeys/urls.py | 10 - web/apikeys/views.py | 144 -- web/editprofile/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 135 -> 0 bytes .../__pycache__/apps.cpython-310.pyc | Bin 451 -> 0 bytes .../__pycache__/forms.cpython-310.pyc | Bin 159 -> 0 bytes .../__pycache__/imgUser.cpython-310.pyc | Bin 471 -> 0 bytes .../__pycache__/models.cpython-310.pyc | Bin 575 -> 0 bytes .../__pycache__/urls.cpython-310.pyc | Bin 296 -> 0 bytes .../__pycache__/views.cpython-310.pyc | Bin 1830 -> 0 bytes web/editprofile/apps.py | 6 - web/editprofile/imgUser.py | 13 - web/editprofile/migrations/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 173 -> 0 bytes web/editprofile/models.py | 8 - web/editprofile/tests.py | 3 - web/editprofile/urls.py | 10 - web/editprofile/views.py | 103 - web/manage.py | 22 - web/projects/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 132 -> 0 bytes web/projects/__pycache__/apps.cpython-310.pyc | Bin 415 -> 0 bytes .../__pycache__/models.cpython-310.pyc | Bin 1168 -> 0 bytes web/projects/__pycache__/urls.cpython-310.pyc | Bin 402 -> 0 bytes .../__pycache__/views.cpython-310.pyc | Bin 4637 -> 0 bytes web/projects/apps.py | 6 - web/projects/migrations/0001_initial.py | 26 - .../migrations/0002_project_scan_mode.py | 18 - web/projects/migrations/__init__.py | 0 .../__pycache__/0001_initial.cpython-310.pyc | Bin 1029 -> 0 bytes .../0002_project_scan_mode.cpython-310.pyc | Bin 593 -> 0 bytes .../__pycache__/__init__.cpython-310.pyc | Bin 143 -> 0 bytes web/projects/models.py | 22 - web/projects/tests.py | 3 - web/projects/urls.py | 13 - web/projects/views.py | 249 --- web/requirements.txt | 39 - web/scans/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 129 -> 0 bytes web/scans/__pycache__/apps.cpython-310.pyc | Bin 406 -> 0 bytes web/scans/__pycache__/models.cpython-310.pyc | Bin 14402 -> 0 bytes web/scans/__pycache__/tasks.cpython-310.pyc | Bin 2266 -> 0 bytes web/scans/__pycache__/urls.cpython-310.pyc | Bin 331 -> 0 bytes web/scans/__pycache__/utils.cpython-310.pyc | Bin 31044 -> 0 bytes web/scans/__pycache__/views.cpython-310.pyc | Bin 11827 -> 0 bytes web/scans/apps.py | 6 - web/scans/migrations/0001_initial.py | 391 ---- .../migrations/0002_screenshots_hostname.py | 18 - ...ommonports_webs_uncommon_ports_and_more.py | 27 - ..._secrets_cms_cms_cms_subdomain_and_more.py | 46 - ...takeover_subtakeover_subdomain_and_more.py | 30 - ...ssets_azure_cloudassets_google_and_more.py | 33 - ...founcommon_webfullinfouncommon_and_more.py | 37 - web/scans/migrations/0008_screenshots_port.py | 18 - ...name_address_portscanactive_ip_and_more.py | 32 - ...0_alter_subdomainsdns_a_record_and_more.py | 58 - ...llinfo_a_alter_webfullinfo_technologies.py | 23 - ..._webfullinfo_port_alter_webfullinfo_url.py | 23 - web/scans/migrations/__init__.py | 0 .../__pycache__/0001_initial.cpython-310.pyc | Bin 7151 -> 0 bytes .../0002_screenshots_hostname.cpython-310.pyc | Bin 585 -> 0 bytes ...bs_uncommon_ports_and_more.cpython-310.pyc | Bin 745 -> 0 bytes ...cms_cms_subdomain_and_more.cpython-310.pyc | Bin 1351 -> 0 bytes ...akeover_subdomain_and_more.cpython-310.pyc | Bin 851 -> 0 bytes ...loudassets_google_and_more.cpython-310.pyc | Bin 810 -> 0 bytes ...bfullinfouncommon_and_more.cpython-310.pyc | Bin 851 -> 0 bytes .../0008_screenshots_port.cpython-310.pyc | Bin 625 -> 0 bytes ...portscanactive_ip_and_more.cpython-310.pyc | Bin 829 -> 0 bytes ...mainsdns_a_record_and_more.cpython-310.pyc | Bin 1053 -> 0 bytes ...r_webfullinfo_technologies.cpython-310.pyc | Bin 721 -> 0 bytes ...port_alter_webfullinfo_url.cpython-310.pyc | Bin 694 -> 0 bytes .../__pycache__/__init__.cpython-310.pyc | Bin 140 -> 0 bytes web/scans/models.py | 390 ---- web/scans/tasks.py | 116 - web/scans/templatetags/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 142 -> 0 bytes .../__pycache__/dict_handler.cpython-310.pyc | Bin 351 -> 0 bytes web/scans/templatetags/dict_handler.py | 6 - web/scans/tests.py | 38 - web/scans/urls.py | 11 - web/scans/utils.py | 1473 ------------- web/scans/views.py | 448 ---- web/schedules/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 133 -> 0 bytes .../__pycache__/urls.cpython-310.pyc | Bin 412 -> 0 bytes .../__pycache__/views.cpython-310.pyc | Bin 7635 -> 0 bytes web/schedules/admin.py | 3 - web/schedules/apps.py | 6 - web/schedules/migrations/__init__.py | 0 web/schedules/models.py | 3 - web/schedules/urls.py | 13 - web/schedules/views.py | 138 -- web/static/css/bootstrap.min.css | 7 - web/static/css/bootstrap.min.css.map | 1 - web/static/css/recon.css | 968 --------- web/static/css/scan_all.css | 1225 ----------- web/static/css/subdomains.css | 1000 --------- web/static/fa/css/all.min.css | 6 - web/static/fa/webfonts/fa-brands-400.ttf | Bin 181260 -> 0 bytes web/static/fa/webfonts/fa-brands-400.woff2 | Bin 105204 -> 0 bytes web/static/fa/webfonts/fa-regular-400.ttf | Bin 60208 -> 0 bytes web/static/fa/webfonts/fa-regular-400.woff2 | Bin 23760 -> 0 bytes web/static/fa/webfonts/fa-solid-900.ttf | Bin 298276 -> 0 bytes web/static/fa/webfonts/fa-solid-900.woff2 | Bin 125064 -> 0 bytes web/static/fa/webfonts/fa-v4compatibility.ttf | Bin 10552 -> 0 bytes .../fa/webfonts/fa-v4compatibility.woff2 | Bin 5016 -> 0 bytes web/static/img/background.png | Bin 394004 -> 0 bytes web/static/img/favicon.ico | Bin 4286 -> 0 bytes web/static/img/reconftw_logo.png | Bin 13296 -> 0 bytes web/static/img/ss_template.png | Bin 6364 -> 0 bytes web/static/img/unknown.ico | Bin 4286 -> 0 bytes web/static/imgUsers/Defult.png | Bin 43442 -> 0 bytes web/static/js/bootstrap.bundle.min.js | 7 - web/static/js/bootstrap.bundle.min.js.map | 1 - web/static/js/bootstrap.min.js | 7 - web/static/js/bootstrap.min.js.map | 1 - web/static/js/scans_all.js | 469 ---- web/templates/apikeys_settings.html | 1869 ---------------- web/templates/base.html | 234 -- web/templates/edit_profile.html | 1151 ---------- web/templates/logged_out.html | 6 - web/templates/login.html | 196 -- web/templates/modal.html | 449 ---- web/templates/projects.html | 1585 -------------- web/templates/scans.html | 1516 ------------- web/templates/scans_all_js.html | 472 ---- web/templates/scans_osint.html | 1710 --------------- web/templates/scans_passive.html | 1677 --------------- web/templates/scans_recon.html | 1239 ----------- web/templates/scans_subdomains.html | 390 ---- web/templates/scans_web.html | 1889 ----------------- web/web/__init__.py | 3 - web/web/__pycache__/__init__.cpython-310.pyc | Bin 195 -> 0 bytes web/web/__pycache__/__init__.cpython-39.pyc | Bin 232 -> 0 bytes web/web/__pycache__/celery.cpython-310.pyc | Bin 380 -> 0 bytes web/web/__pycache__/celery.cpython-39.pyc | Bin 417 -> 0 bytes web/web/__pycache__/settings.cpython-310.pyc | Bin 2651 -> 0 bytes web/web/__pycache__/settings.cpython-39.pyc | Bin 2276 -> 0 bytes web/web/__pycache__/urls.cpython-310.pyc | Bin 856 -> 0 bytes web/web/__pycache__/urls.cpython-39.pyc | Bin 685 -> 0 bytes web/web/__pycache__/wsgi.cpython-310.pyc | Bin 522 -> 0 bytes web/web/__pycache__/wsgi.cpython-39.pyc | Bin 537 -> 0 bytes web/web/asgi.py | 16 - web/web/celery.py | 9 - web/web/settings.py | 227 -- web/web/urls.py | 21 - web/web/wsgi.py | 16 - 167 files changed, 408 insertions(+), 23337 deletions(-) delete mode 100644 assets/potential.json delete mode 100644 web/Makefile delete mode 100644 web/README.md delete mode 100644 web/apikeys/__init__.py delete mode 100644 web/apikeys/__pycache__/__init__.cpython-310.pyc delete mode 100644 web/apikeys/__pycache__/apps.cpython-310.pyc delete mode 100644 web/apikeys/__pycache__/config.cpython-310.pyc delete mode 100644 web/apikeys/__pycache__/models.cpython-310.pyc delete mode 100644 web/apikeys/__pycache__/urls.cpython-310.pyc delete mode 100644 web/apikeys/__pycache__/views.cpython-310.pyc delete mode 100644 web/apikeys/apps.py delete mode 100644 web/apikeys/config.py delete mode 100644 web/apikeys/migrations/__init__.py delete mode 100644 web/apikeys/migrations/__pycache__/__init__.cpython-310.pyc delete mode 100644 web/apikeys/models.py delete mode 100644 web/apikeys/tests.py delete mode 100644 web/apikeys/urls.py delete mode 100644 web/apikeys/views.py delete mode 100644 web/editprofile/__init__.py delete mode 100644 web/editprofile/__pycache__/__init__.cpython-310.pyc delete mode 100644 web/editprofile/__pycache__/apps.cpython-310.pyc delete mode 100644 web/editprofile/__pycache__/forms.cpython-310.pyc delete mode 100644 web/editprofile/__pycache__/imgUser.cpython-310.pyc delete mode 100644 web/editprofile/__pycache__/models.cpython-310.pyc delete mode 100644 web/editprofile/__pycache__/urls.cpython-310.pyc delete mode 100644 web/editprofile/__pycache__/views.cpython-310.pyc delete mode 100644 web/editprofile/apps.py delete mode 100644 web/editprofile/imgUser.py delete mode 100644 web/editprofile/migrations/__init__.py delete mode 100644 web/editprofile/migrations/__pycache__/__init__.cpython-310.pyc delete mode 100644 web/editprofile/models.py delete mode 100644 web/editprofile/tests.py delete mode 100644 web/editprofile/urls.py delete mode 100644 web/editprofile/views.py delete mode 100644 web/manage.py delete mode 100644 web/projects/__init__.py delete mode 100644 web/projects/__pycache__/__init__.cpython-310.pyc delete mode 100644 web/projects/__pycache__/apps.cpython-310.pyc delete mode 100644 web/projects/__pycache__/models.cpython-310.pyc delete mode 100644 web/projects/__pycache__/urls.cpython-310.pyc delete mode 100644 web/projects/__pycache__/views.cpython-310.pyc delete mode 100644 web/projects/apps.py delete mode 100644 web/projects/migrations/0001_initial.py delete mode 100644 web/projects/migrations/0002_project_scan_mode.py delete mode 100644 web/projects/migrations/__init__.py delete mode 100644 web/projects/migrations/__pycache__/0001_initial.cpython-310.pyc delete mode 100644 web/projects/migrations/__pycache__/0002_project_scan_mode.cpython-310.pyc delete mode 100644 web/projects/migrations/__pycache__/__init__.cpython-310.pyc delete mode 100644 web/projects/models.py delete mode 100644 web/projects/tests.py delete mode 100644 web/projects/urls.py delete mode 100644 web/projects/views.py delete mode 100644 web/requirements.txt delete mode 100644 web/scans/__init__.py delete mode 100644 web/scans/__pycache__/__init__.cpython-310.pyc delete mode 100644 web/scans/__pycache__/apps.cpython-310.pyc delete mode 100644 web/scans/__pycache__/models.cpython-310.pyc delete mode 100644 web/scans/__pycache__/tasks.cpython-310.pyc delete mode 100644 web/scans/__pycache__/urls.cpython-310.pyc delete mode 100644 web/scans/__pycache__/utils.cpython-310.pyc delete mode 100644 web/scans/__pycache__/views.cpython-310.pyc delete mode 100644 web/scans/apps.py delete mode 100644 web/scans/migrations/0001_initial.py delete mode 100644 web/scans/migrations/0002_screenshots_hostname.py delete mode 100644 web/scans/migrations/0003_remove_websuncommonports_webs_uncommon_ports_and_more.py delete mode 100644 web/scans/migrations/0004_rename_github_secrets_cms_cms_cms_subdomain_and_more.py delete mode 100644 web/scans/migrations/0005_remove_subtakeover_takeover_subtakeover_subdomain_and_more.py delete mode 100644 web/scans/migrations/0006_cloudassets_azure_cloudassets_google_and_more.py delete mode 100644 web/scans/migrations/0007_remove_webfullinfouncommon_webfullinfouncommon_and_more.py delete mode 100644 web/scans/migrations/0008_screenshots_port.py delete mode 100644 web/scans/migrations/0009_rename_address_portscanactive_ip_and_more.py delete mode 100644 web/scans/migrations/0010_alter_subdomainsdns_a_record_and_more.py delete mode 100644 web/scans/migrations/0011_alter_webfullinfo_a_alter_webfullinfo_technologies.py delete mode 100644 web/scans/migrations/0012_alter_webfullinfo_port_alter_webfullinfo_url.py delete mode 100644 web/scans/migrations/__init__.py delete mode 100644 web/scans/migrations/__pycache__/0001_initial.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/0002_screenshots_hostname.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/0003_remove_websuncommonports_webs_uncommon_ports_and_more.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/0004_rename_github_secrets_cms_cms_cms_subdomain_and_more.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/0005_remove_subtakeover_takeover_subtakeover_subdomain_and_more.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/0006_cloudassets_azure_cloudassets_google_and_more.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/0007_remove_webfullinfouncommon_webfullinfouncommon_and_more.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/0008_screenshots_port.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/0009_rename_address_portscanactive_ip_and_more.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/0010_alter_subdomainsdns_a_record_and_more.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/0011_alter_webfullinfo_a_alter_webfullinfo_technologies.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/0012_alter_webfullinfo_port_alter_webfullinfo_url.cpython-310.pyc delete mode 100644 web/scans/migrations/__pycache__/__init__.cpython-310.pyc delete mode 100644 web/scans/models.py delete mode 100644 web/scans/tasks.py delete mode 100644 web/scans/templatetags/__init__.py delete mode 100644 web/scans/templatetags/__pycache__/__init__.cpython-310.pyc delete mode 100644 web/scans/templatetags/__pycache__/dict_handler.cpython-310.pyc delete mode 100644 web/scans/templatetags/dict_handler.py delete mode 100644 web/scans/tests.py delete mode 100644 web/scans/urls.py delete mode 100644 web/scans/utils.py delete mode 100644 web/scans/views.py delete mode 100644 web/schedules/__init__.py delete mode 100644 web/schedules/__pycache__/__init__.cpython-310.pyc delete mode 100644 web/schedules/__pycache__/urls.cpython-310.pyc delete mode 100644 web/schedules/__pycache__/views.cpython-310.pyc delete mode 100644 web/schedules/admin.py delete mode 100644 web/schedules/apps.py delete mode 100644 web/schedules/migrations/__init__.py delete mode 100644 web/schedules/models.py delete mode 100644 web/schedules/urls.py delete mode 100644 web/schedules/views.py delete mode 100644 web/static/css/bootstrap.min.css delete mode 100644 web/static/css/bootstrap.min.css.map delete mode 100644 web/static/css/recon.css delete mode 100644 web/static/css/scan_all.css delete mode 100644 web/static/css/subdomains.css delete mode 100644 web/static/fa/css/all.min.css delete mode 100644 web/static/fa/webfonts/fa-brands-400.ttf delete mode 100644 web/static/fa/webfonts/fa-brands-400.woff2 delete mode 100644 web/static/fa/webfonts/fa-regular-400.ttf delete mode 100644 web/static/fa/webfonts/fa-regular-400.woff2 delete mode 100644 web/static/fa/webfonts/fa-solid-900.ttf delete mode 100644 web/static/fa/webfonts/fa-solid-900.woff2 delete mode 100644 web/static/fa/webfonts/fa-v4compatibility.ttf delete mode 100644 web/static/fa/webfonts/fa-v4compatibility.woff2 delete mode 100644 web/static/img/background.png delete mode 100644 web/static/img/favicon.ico delete mode 100644 web/static/img/reconftw_logo.png delete mode 100644 web/static/img/ss_template.png delete mode 100644 web/static/img/unknown.ico delete mode 100644 web/static/imgUsers/Defult.png delete mode 100644 web/static/js/bootstrap.bundle.min.js delete mode 100644 web/static/js/bootstrap.bundle.min.js.map delete mode 100644 web/static/js/bootstrap.min.js delete mode 100644 web/static/js/bootstrap.min.js.map delete mode 100644 web/static/js/scans_all.js delete mode 100644 web/templates/apikeys_settings.html delete mode 100644 web/templates/base.html delete mode 100644 web/templates/edit_profile.html delete mode 100644 web/templates/logged_out.html delete mode 100644 web/templates/login.html delete mode 100644 web/templates/modal.html delete mode 100644 web/templates/projects.html delete mode 100644 web/templates/scans.html delete mode 100644 web/templates/scans_all_js.html delete mode 100644 web/templates/scans_osint.html delete mode 100644 web/templates/scans_passive.html delete mode 100644 web/templates/scans_recon.html delete mode 100644 web/templates/scans_subdomains.html delete mode 100644 web/templates/scans_web.html delete mode 100644 web/web/__init__.py delete mode 100644 web/web/__pycache__/__init__.cpython-310.pyc delete mode 100644 web/web/__pycache__/__init__.cpython-39.pyc delete mode 100644 web/web/__pycache__/celery.cpython-310.pyc delete mode 100644 web/web/__pycache__/celery.cpython-39.pyc delete mode 100644 web/web/__pycache__/settings.cpython-310.pyc delete mode 100644 web/web/__pycache__/settings.cpython-39.pyc delete mode 100644 web/web/__pycache__/urls.cpython-310.pyc delete mode 100644 web/web/__pycache__/urls.cpython-39.pyc delete mode 100644 web/web/__pycache__/wsgi.cpython-310.pyc delete mode 100644 web/web/__pycache__/wsgi.cpython-39.pyc delete mode 100644 web/web/asgi.py delete mode 100644 web/web/celery.py delete mode 100644 web/web/settings.py delete mode 100644 web/web/urls.py delete mode 100644 web/web/wsgi.py diff --git a/assets/potential.json b/assets/potential.json deleted file mode 100644 index 26aefd03..00000000 --- a/assets/potential.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "flags" : "-HanrE", - "patterns" : [ - "callback=", - "jsonp=", - "api_key=", - "api=", - "password=", - "email=", - "emailto=", - "token=", - "username=", - "csrf_token=", - "unsubscribe_token=", - "p=", - "q=", - "query=", - "search=", - "id=", - "item=", - "page_id=", - "secret=", - "url=", - "from_url=", - "load_url=", - "file_url=", - "page_url=", - "file_name=", - "page=", - "folder=", - "folder_urllogin_url=", - "img_url=", - "return_url=", - "return_to=", - "next=", - "redirect=", - "redirect_to=", - "logout=", - "checkout=", - "checkout_url=", - "goto=", - "next_page=", - "file=", - "load_file=", - "cmd=", - "ip=", - "ping=", - "lang=", - "edit=", - "LoginId=", - "size=", - "signature=", - "passinfo=" - ] - } \ No newline at end of file diff --git a/assets/spinny/spinny.sh b/assets/spinny/spinny.sh index a174f6ce..4a594bf7 100755 --- a/assets/spinny/spinny.sh +++ b/assets/spinny/spinny.sh @@ -1,83 +1,79 @@ -#!/usr/bin/env bash +#!/usr/bin/env bash declare __spinny__spinner_pid declare -a __spinny__frames=() spinny::start() { - spinny::_spinner & - __spinny__spinner_pid=$! + spinny::_spinner & + __spinny__spinner_pid=$! } spinny::stop() { - [[ -z "$__spinny__spinner_pid" ]] && return 0 + [[ -z $__spinny__spinner_pid ]] && return 0 - kill -9 "$__spinny__spinner_pid" - # Use conditional to avoid exiting the program immediatly - wait "$__spinny__spinner_pid" 2>/dev/null || true - printf "\r\033[K" + kill -9 "$__spinny__spinner_pid" + # Use conditional to avoid exiting the program immediatly + wait "$__spinny__spinner_pid" 2>/dev/null || true + printf "\r\033[K" } spinny::_spinner() { - local delay=${SPINNY_DELAY:-0.3} - spinny::_load_frames - spinny::_pad_frames - while : - do - for frame in "${__spinny__frames[@]}" - do - printf "\r\033[K%s" "$frame" - sleep "$delay" - done - done + local delay=${SPINNY_DELAY:-0.3} + spinny::_load_frames + spinny::_pad_frames + while :; do + for frame in "${__spinny__frames[@]}"; do + printf "\r\033[K%s" "$frame" + sleep "$delay" + done + done } spinny::_pad_frames() { - # Frames with different lengths need to be padded - # for a smooth animation. We calculate the maximum - # size of all frames and pad all smaller ones with - # white space. - local max_length - max_length=$(spinny::_max_framelength) - local array_length=${#__spinny__frames[@]} - for (( i=0; i max)) && max=$len - done - echo "$max" + local max=${#__spinny__frames[0]} + for frame in "${__spinny__frames[@]}"; do + local len=${#frame} + ((len > max)) && max=$len + done + echo "$max" } spinny::_load_frames() { - # Load custom frames if any or fall back on the default animation - if [[ -z $SPINNY_FRAMES ]]; then - __spinny__frames=(- "\\" "|" /) - else - __spinny__frames=("${SPINNY_FRAMES[@]}") - fi + # Load custom frames if any or fall back on the default animation + if [[ -z $SPINNY_FRAMES ]]; then + # trunk-ignore(shellcheck/SC1003) + __spinny__frames=(- '\' "|" /) + else + __spinny__frames=("${SPINNY_FRAMES[@]}") + fi } -spinny::_finish(){ - # Make sure to remove variables and make the cursor visible again - unset __spinny__spinner_pid - unset __spinny__frames - tput cnorm +spinny::_finish() { + # Make sure to remove variables and make the cursor visible again + unset __spinny__spinner_pid + unset __spinny__frames + tput cnorm } trap spinny::_finish EXIT - - diff --git a/install.sh b/install.sh index c04f60f7..97e49876 100755 --- a/install.sh +++ b/install.sh @@ -8,25 +8,25 @@ double_check=false # ARM Detection ARCH=$(uname -m) case $ARCH in - amd64|x86_64) IS_ARM="False" ;; - arm64|armv6l) - IS_ARM="True" - RPI_4=$([[ $ARCH == "arm64" ]] && echo "True" || echo "False") - RPI_3=$([[ $ARCH == "arm64" ]] && echo "False" || echo "True") - ;; +amd64 | x86_64) IS_ARM="False" ;; +arm64 | armv6l) + IS_ARM="True" + RPI_4=$([[ $ARCH == "arm64" ]] && echo "True" || echo "False") + RPI_3=$([[ $ARCH == "arm64" ]] && echo "False" || echo "True") + ;; esac #Mac Osx Detecting -IS_MAC=$([[ "$OSTYPE" == "darwin"* ]] && echo "True" || echo "False") +IS_MAC=$([[ $OSTYPE == "darwin"* ]] && echo "True" || echo "False") BASH_VERSION=$(bash --version | awk 'NR==1{print $4}' | cut -d'.' -f1) -if [ "${BASH_VERSION}" -lt 4 ]; then - printf "${bred} Your Bash version is lower than 4, please update${reset}\n" - printf "%s Your Bash version is lower than 4, please update%s\n" "${bred}" "${reset}" >&2 - if [ "True" = "$IS_MAC" ]; then - printf "${yellow} For MacOS run 'brew install bash' and rerun installer in a new terminal${reset}\n\n" - exit 1; - fi +if [[ ${BASH_VERSION} -lt 4 ]]; then + printf "${bred} Your Bash version is lower than 4, please update${reset}\n" + printf "%s Your Bash version is lower than 4, please update%s\n" "${bred}" "${reset}" >&2 + if [[ "True" == "$IS_MAC" ]]; then + printf "${yellow} For MacOS run 'brew install bash' and rerun installer in a new terminal${reset}\n\n" + exit 1 + fi fi # Declaring Go tools and their installation commands @@ -37,7 +37,7 @@ gotools["amass"]="go install -v github.com/owasp-amass/amass/v3/...@master" gotools["ffuf"]="go install -v github.com/ffuf/ffuf/v2@latest" gotools["github-subdomains"]="go install -v github.com/gwen001/github-subdomains@latest" gotools["gitlab-subdomains"]="go install -v github.com/gwen001/gitlab-subdomains@latest" -gotools["nuclei"]="go install -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei@latest" +gotools["nuclei"]="go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest" gotools["anew"]="go install -v github.com/tomnomnom/anew@latest" gotools["notify"]="go install -v github.com/projectdiscovery/notify/cmd/notify@latest" gotools["unfurl"]="go install -v github.com/tomnomnom/unfurl@v0.3.0" @@ -66,17 +66,15 @@ gotools["inscope"]="go install -v github.com/tomnomnom/hacks/inscope@latest" gotools["enumerepo"]="go install -v github.com/trickest/enumerepo@latest" gotools["Web-Cache-Vulnerability-Scanner"]="go install -v github.com/Hackmanit/Web-Cache-Vulnerability-Scanner@latest" gotools["subfinder"]="go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest" -gotools["byp4xx"]="go install -v github.com/lobuhi/byp4xx@latest" gotools["hakip2host"]="go install -v github.com/hakluke/hakip2host@latest" gotools["gau"]="go install -v github.com/lc/gau/v2/cmd/gau@latest" -gotools["Mantra"]="go install -v github.com/MrEmpy/mantra@latest" +gotools["mantra"]="go install -v github.com/MrEmpy/mantra@latest" gotools["crt"]="go install -v github.com/cemulus/crt@latest" -gotools["s3scanner"]="go install -v github.com/sa7mon/s3scanner@latest" +gotools["s3scanner"]="go install -v github.com/sa7mon/s3scanner@latest" # Declaring repositories and their paths declare -A repos repos["dorks_hunter"]="six2dez/dorks_hunter" -repos["pwndb"]="davidtavarez/pwndb" repos["dnsvalidator"]="vortexau/dnsvalidator" repos["interlace"]="codingo/Interlace" repos["brutespray"]="x90skysn3k/brutespray" @@ -101,14 +99,13 @@ repos["urless"]="xnl-h4ck3r/urless" repos["smuggler"]="defparam/smuggler" repos["Web-Cache-Vulnerability-Scanner"]="Hackmanit/Web-Cache-Vulnerability-Scanner" repos["regulator"]="cramppet/regulator" -repos["byp4xx"]="lobuhi/byp4xx" repos["ghauri"]="r0oth3x49/ghauri" repos["gitleaks"]="gitleaks/gitleaks" repos["trufflehog"]="trufflesecurity/trufflehog" +repos["dontgo403"]="devploit/dontgo403" - -function banner_web(){ - tput clear +function banner() { + tput clear printf "\n${bgreen}" printf " ██▀███ ▓█████ ▄████▄ ▒█████ ███▄ █ █████▒▄▄▄█████▓ █ █░\n" printf " ▓██ ▒ ██▒▓█ ▀ ▒██▀ ▀█ ▒██▒ ██▒ ██ ▀█ █ ▓██ ▒ ▓ ██▒ ▓▒▓█░ █ ░█░\n" @@ -120,321 +117,198 @@ function banner_web(){ printf " ░░ ░ ░ ░ ░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ░ ░ \n" printf " ░ ░ ░░ ░ ░ ░ ░ ░ \n" printf " ░ \n" - printf " ${reconftw_version} by @six2dez\n" + printf " ${reconftw_version} by @six2dez\n" } function install_ppfuzz() { - local url=$1 - local tar_file=$2 - - wget -N -c "$url" $DEBUG_STD - eval $SUDO tar -C /usr/local/bin/ -xzf "$tar_file" $DEBUG_STD - eval $SUDO rm -rf "$tar_file" $DEBUG_STD + local url=$1 + local tar_file=$2 + + eval wget -N -c "$url" $DEBUG_STD + eval $SUDO tar -C /usr/local/bin/ -xzf "$tar_file" $DEBUG_STD + eval $SUDO rm -rf "$tar_file" $DEBUG_STD } # This function installs various tools and repositories as per the configuration. -function install_tools(){ - - eval pip3 install -I -r requirements.txt $DEBUG_STD - - printf "${bblue} Running: Installing Golang tools (${#gotools[@]})${reset}\n\n" - go env -w GO111MODULE=auto - go_step=0 - for gotool in "${!gotools[@]}"; do - go_step=$((go_step + 1)) - if [ "$upgrade_tools" = "false" ]; then - res=$(command -v "$gotool") && { - echo -e "[${yellow}SKIPPING${reset}] $gotool already installed in...${blue}${res}${reset}" - continue - } - fi - eval ${gotools[$gotool]} $DEBUG_STD - exit_status=$? - if [ $exit_status -eq 0 ] - then - printf "${yellow} $gotool installed (${go_step}/${#gotools[@]})${reset}\n" - else - printf "${red} Unable to install $gotool, try manually (${go_step}/${#gotools[@]})${reset}\n" - double_check=true - fi - done - - printf "${bblue}\n Running: Installing repositories (${#repos[@]})${reset}\n\n" - - # Repos with special configs - eval git clone https://github.com/projectdiscovery/nuclei-templates ${NUCLEI_TEMPLATES_PATH} $DEBUG_STD - eval git clone https://github.com/geeknik/the-nuclei-templates.git ${NUCLEI_TEMPLATES_PATH}/extra_templates $DEBUG_STD - eval git clone https://github.com/projectdiscovery/fuzzing-templates $tools/fuzzing-templates $DEBUG_STD - eval nuclei -update-templates update-template-dir ${NUCLEI_TEMPLATES_PATH} $DEBUG_STD - cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - eval git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git $dir/sqlmap $DEBUG_STD - eval git clone --depth 1 https://github.com/drwetter/testssl.sh.git $dir/testssl.sh $DEBUG_STD - eval $SUDO git clone https://gitlab.com/exploit-database/exploitdb /opt/exploitdb $DEBUG_STD - - # Standard repos installation - repos_step=0 - for repo in "${!repos[@]}"; do - repos_step=$((repos_step + 1)) - if [ "$upgrade_tools" = "false" ]; then - unset is_installed - unset is_need_dl - [[ $repo == "Gf-Patterns" ]] && is_need_dl=1 - [[ $repo == "gf" ]] && is_need_dl=1 - res=$(command -v "$repo") && is_installed=1 - [[ -z $is_need_dl ]] && [[ -n $is_installed ]] && { - # HERE: not installed yet. - echo -e "[${yellow}SKIPPING${reset}] $repo already installed in...${blue}${res}${reset}" - continue - } - fi - eval git clone https://github.com/${repos[$repo]} $dir/$repo $DEBUG_STD - eval cd $dir/$repo $DEBUG_STD - eval git pull $DEBUG_STD - exit_status=$? - if [ $exit_status -eq 0 ] - then - printf "${yellow} $repo installed (${repos_step}/${#repos[@]})${reset}\n" - else - printf "${red} Unable to install $repo, try manually (${repos_step}/${#repos[@]})${reset}\n" - double_check=true - fi - if ( [ -z $is_installed ] && [ "$upgrade_tools" = "false" ] ) || [ "$upgrade_tools" = "true" ] ; then - if [ -s "requirements.txt" ]; then +function install_tools() { + + eval pip3 install -I -r requirements.txt $DEBUG_STD + + printf "${bblue} Running: Installing Golang tools (${#gotools[@]})${reset}\n\n" + go env -w GO111MODULE=auto + go_step=0 + for gotool in "${!gotools[@]}"; do + go_step=$((go_step + 1)) + if [[ $upgrade_tools == "false" ]]; then + res=$(command -v "$gotool") && { + echo -e "[${yellow}SKIPPING${reset}] $gotool already installed in...${blue}${res}${reset}" + continue + } + fi + eval ${gotools[$gotool]} $DEBUG_STD + exit_status=$? + if [[ $exit_status -eq 0 ]]; then + printf "${yellow} $gotool installed (${go_step}/${#gotools[@]})${reset}\n" + else + printf "${red} Unable to install $gotool, try manually (${go_step}/${#gotools[@]})${reset}\n" + double_check=true + fi + done + + printf "${bblue}\n Running: Installing repositories (${#repos[@]})${reset}\n\n" + + # Repos with special configs + eval git clone https://github.com/projectdiscovery/nuclei-templates ${NUCLEI_TEMPLATES_PATH} $DEBUG_STD + eval git clone https://github.com/geeknik/the-nuclei-templates.git ${NUCLEI_TEMPLATES_PATH}/extra_templates $DEBUG_STD + eval git clone https://github.com/projectdiscovery/fuzzing-templates ${tools}/fuzzing-templates $DEBUG_STD + eval nuclei -update-templates update-template-dir ${NUCLEI_TEMPLATES_PATH} $DEBUG_STD + cd "${dir}" || { + echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + eval git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git "${dir}"/sqlmap $DEBUG_STD + eval git clone --depth 1 https://github.com/drwetter/testssl.sh.git "${dir}"/testssl.sh $DEBUG_STD + eval $SUDO git clone https://gitlab.com/exploit-database/exploitdb /opt/exploitdb $DEBUG_STD + + # Standard repos installation + repos_step=0 + for repo in "${!repos[@]}"; do + repos_step=$((repos_step + 1)) + if [[ $upgrade_tools == "false" ]]; then + unset is_installed + unset is_need_dl + [[ $repo == "Gf-Patterns" ]] && is_need_dl=1 + [[ $repo == "gf" ]] && is_need_dl=1 + res=$(command -v "$repo") && is_installed=1 + [[ -z $is_need_dl ]] && [[ -n $is_installed ]] && { + # HERE: not installed yet. + echo -e "[${yellow}SKIPPING${reset}] $repo already installed in...${blue}${res}${reset}" + continue + } + fi + eval git clone https://github.com/${repos[$repo]} "${dir}"/$repo $DEBUG_STD + eval cd "${dir}"/$repo $DEBUG_STD + eval git pull $DEBUG_STD + exit_status=$? + if [[ $exit_status -eq 0 ]]; then + printf "${yellow} $repo installed (${repos_step}/${#repos[@]})${reset}\n" + else + printf "${red} Unable to install $repo, try manually (${repos_step}/${#repos[@]})${reset}\n" + double_check=true + fi + if ([[ -z $is_installed ]] && [[ $upgrade_tools == "false" ]]) || [[ $upgrade_tools == "true" ]]; then + if [[ -s "requirements.txt" ]]; then eval $SUDO pip3 install -r requirements.txt $DEBUG_STD fi - if [ -s "setup.py" ]; then + if [[ -s "setup.py" ]]; then eval $SUDO pip3 install . $DEBUG_STD fi - if [ "massdns" = "$repo" ]; then + if [[ "massdns" == "$repo" ]]; then eval make $DEBUG_STD && strip -s bin/massdns && eval $SUDO cp bin/massdns /usr/local/bin/ $DEBUG_ERROR fi - if [ "gitleaks" = "$repo" ]; then + if [[ "gitleaks" == "$repo" ]]; then eval make build $DEBUG_STD && eval $SUDO cp ./gitleaks /usr/local/bin/ $DEBUG_ERROR fi + if [[ "dontgo403" == "$repo" ]]; then + eval go get $DEBUG_STD && eval go build $DEBUG_STD && eval chmod +x ./dontgo403 $DEBUG_STD + fi fi - if [ "gf" = "$repo" ]; then + if [[ "gf" == "$repo" ]]; then eval cp -r examples ~/.gf $DEBUG_ERROR - elif [ "Gf-Patterns" = "$repo" ]; then + elif [[ "Gf-Patterns" == "$repo" ]]; then eval mv ./*.json ~/.gf $DEBUG_ERROR fi - cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - done - - if [ "True" = "$IS_ARM" ]; then - if [ "True" = "$RPI_3" ]; then + cd "${dir}" || { + echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + done + + if [[ "True" == "$IS_ARM" ]]; then + if [[ "True" == "$RPI_3" ]]; then install_ppfuzz "https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz" "ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz" - elif [ "True" = "$RPI_4" ]; then + elif [[ "True" == "$RPI_4" ]]; then install_ppfuzz "https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-aarch64-unknown-linux-gnueabihf.tar.gz" "ppfuzz-v1.0.1-aarch64-unknown-linux-gnueabihf.tar.gz" fi - elif [ "True" = "$IS_MAC" ]; then - if [ "True" = "$IS_ARM" ]; then - install_ppfuzz "https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz" "ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz" - else - install_ppfuzz "https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-x86_64-apple-darwin.tar.gz" "ppfuzz-v1.0.1-x86_64-apple-darwin.tar.gz" - fi - else - install_ppfuzz "https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-x86_64-unknown-linux-musl.tar.gz" "ppfuzz-v1.0.1-x86_64-unknown-linux-musl.tar.gz" - fi - eval $SUDO chmod 755 /usr/local/bin/ppfuzz - eval $SUDO strip -s /usr/local/bin/ppfuzz $DEBUG_STD - eval notify $DEBUG_STD - eval subfinder $DEBUG_STD - eval subfinder $DEBUG_STD -} - -install_webserver(){ - printf "${bblue} Running: Installing web reconftw ${reset}\n\n" - - printf "${yellow} Installing python libraries...${reset}\n\n" - - # Install venv - printf "${yellow} python virtualenv install...${reset}\n\n" - $SUDO rm -rf /web/.venv/ - $SUDO pip3 install virtualenv &>/dev/null - $SUDO virtualenv web/.venv/ &>/dev/null - if [ $? -eq 0 ]; then - printf "${yellow} Activating virtualenv...${reset}\n\n" - $SUDO source web/.venv/bin/activate - $SUDO pip3 install --upgrade pip &>/dev/null - else - printf '[ERROR] Failed to create virtualenv. Please install requirements mentioned in Documentation.' - exit 1 - fi - - printf "${yellow} Installing Requirements...${reset}\n\n" - $SUDO pip3 install -r $SCRIPTPATH/web/requirements.txt &>/dev/null - - printf "${yellow} Installing tools...${reset}\n\n" - if command -v apt > /dev/null; then - $SUDO apt install redis-server -y &>/dev/null - elif command -v yum > /dev/null; then - $SUDO yum install redis -y &>/dev/null - else - printf '[ERROR] Unable to find a supported package manager. Please install redis manually.\n' - exit 1 - fi - - printf "${yellow} Creating WEB User...${reset}\n\n" - $SUDO rm $SCRIPTPATH/web/db.sqlite3 &>/dev/null - $SUDO python3 $SCRIPTPATH/web/manage.py makemigrations &>/dev/null - $SUDO python3 $SCRIPTPATH/web/manage.py migrate &>/dev/null - $SUDO python3 $SCRIPTPATH/web/manage.py createsuperuser - printf "\n\n" + elif [[ "True" == "$IS_MAC" ]]; then + if [[ "True" == "$IS_ARM" ]]; then + install_ppfuzz "https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz" "ppfuzz-v1.0.1-armv7-unknown-linux-gnueabihf.tar.gz" + else + install_ppfuzz "https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-x86_64-apple-darwin.tar.gz" "ppfuzz-v1.0.1-x86_64-apple-darwin.tar.gz" + fi + else + install_ppfuzz "https://github.com/dwisiswant0/ppfuzz/releases/download/v1.0.1/ppfuzz-v1.0.1-x86_64-unknown-linux-musl.tar.gz" "ppfuzz-v1.0.1-x86_64-unknown-linux-musl.tar.gz" + fi + eval $SUDO chmod 755 /usr/local/bin/ppfuzz + eval $SUDO strip -s /usr/local/bin/ppfuzz $DEBUG_STD + eval notify $DEBUG_STD + eval subfinder $DEBUG_STD + eval subfinder $DEBUG_STD } - -banner_web +banner printf "\n${bgreen} reconFTW installer/updater script ${reset}\n\n" - -if [[ -d $dir && -d ~/.gf && -d ~/.config/notify/ && -d ~/.config/amass/ && -d ~/.config/nuclei/ && -f $dir/.github_tokens ]]; then - rftw_installed=true -else - rftw_installed=false +if [[ -d $dir && -d ~/.gf && -d ~/.config/notify/ && -d ~/.config/amass/ && -d ~/.config/nuclei/ && -f "${dir}"/.github_tokens ]]; then + rftw_installed=true +else + rftw_installed=false fi - -# Display menu and wait for user input -display_menu(){ - while true; do - printf "${bblue} Choose one of the following options: ${reset}\n\n" - - if $rftw_installed; then - printf "${bblue} 1. Install/Update ReconFTW (without Web Interface)${reset}\n\n" - printf "${bblue} 2. Install/Update ReconFTW + Install Web Interface${reset}\n\n" - printf "${bblue} 3. Setup Web Interface${reset} ${yellow}(User Interaction needed!)${reset}\n\n" - printf "${bblue} 4. Exit${reset}\n\n" - printf "${bgreen}#######################################################################${reset}\n\n" - read -p "${bblue}Insert option: ${reset}" option - printf "\n\n${bgreen}#######################################################################${reset}\n\n" - - option=$(echo "$option" | tr -d '[:space:]') - if ! [[ "$option" =~ ^[1-4]$ ]]; then - printf "${bred} Invalid option. Please try again.${reset}\n\n" - continue - fi - - case $option in - 1) - web=false - break - ;; - 2) - web=true - break - ;; - 3) - install_webserver - exit 1 - ;; - 4) - printf "${bblue} Exiting...${reset}\n\n" - exit 1 - ;; - *) - printf "${bblue} Invalid option. Exiting...${reset}\n\n" - exit 1 - ;; - esac - - else - printf "${bblue} 1. Install/Update ReconFTW${reset}\n\n" - printf "${bblue} 2. Install/Update ReconFTW + Install Web Interface${reset} ${yellow}(User Interaction needed!)${reset}\n\n" - printf "${bred} 3. Can't setup Web Interface without ReconFTW${reset}\n\n" - printf "${bblue} 4. Exit${reset}\n\n" - printf "${bgreen}#######################################################################${reset}\n\n" - read -p "$(echo -e ${bblue} "Insert option: "${reset})" option - printf "\n${bgreen}#######################################################################${reset}\n\n" - - case $option in - 1) - web=false - break - ;; - 2) - web=true - break - ;; - 4) - printf "${bblue} Exiting...${reset}\n\n" - exit 1 - ;; - *) - printf "${bred} Invalid option. Please try again.${reset}\n\n" - exit 1 - ;; - esac - fi - done -} - -case "$1" in - --tools) - install_tools - ;; - --auto) - # possibly some other actions - ;; - *) - echo "$1" - display_menu - ;; -esac - printf "${yellow} This may take time. So, go grab a coffee! ${reset}\n\n" if [[ $(id -u | grep -o '^0$') == "0" ]]; then - SUDO="" + SUDO="" else - if sudo -n false 2>/dev/null; then - printf "${bred} Is strongly recommended to add your user to sudoers${reset}\n" - printf "${bred} This will avoid prompts for sudo password in the middle of the installation${reset}\n" - printf "${bred} And more important, in the middle of the scan (needed for nmap SYN scan)${reset}\n\n" - printf "${bred} echo \"${USERNAME} ALL=(ALL:ALL) NOPASSWD: ALL\" > /etc/sudoers.d/reconFTW${reset}\n\n" - fi - SUDO="sudo" + if sudo -n false 2>/dev/null; then + printf "${bred} Is strongly recommended to add your user to sudoers${reset}\n" + printf "${bred} This will avoid prompts for sudo password in the middle of the installation${reset}\n" + printf "${bred} And more important, in the middle of the scan (needed for nmap SYN scan)${reset}\n\n" + printf "${bred} echo \"${USERNAME} ALL=(ALL:ALL) NOPASSWD: ALL\" > /etc/sudoers.d/reconFTW${reset}\n\n" + fi + SUDO="sudo" fi -install_apt(){ - eval $SUDO apt update -y $DEBUG_STD - eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium-browser -y $DEBUG_STD - eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium -y $DEBUG_STD - eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install python3 python3-pip python3-virtualenv build-essential gcc cmake ruby whois git curl libpcap-dev wget zip python3-dev pv dnsutils libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx medusa xvfb libxml2-utils procps bsdmainutils libdata-hexdump-perl -y $DEBUG_STD - curl https://sh.rustup.rs -sSf | sh -s -- -y >/dev/null 2>&1 - eval source "$HOME/.cargo/env $DEBUG_STD" - eval cargo install ripgen $DEBUG_STD - eval source "$HOME/.cargo/env $DEBUG_STD" +install_apt() { + eval $SUDO apt update -y $DEBUG_STD + eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium-browser -y $DEBUG_STD + eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium -y $DEBUG_STD + eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install python3 python3-pip python3-virtualenv build-essential gcc cmake ruby whois git curl libpcap-dev wget zip python3-dev pv dnsutils libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx medusa xvfb libxml2-utils procps bsdmainutils libdata-hexdump-perl -y $DEBUG_STD + curl https://sh.rustup.rs -sSf | sh -s -- -y >/dev/null 2>&1 + eval source "${HOME}/.cargo/env $DEBUG_STD" + eval cargo install ripgen $DEBUG_STD + eval source "${HOME}/.cargo/env $DEBUG_STD" } -install_brew(){ - if brew --version &>/dev/null; then - printf "${bgreen} brew is already installed ${reset}\n\n" - else - /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" - fi - eval brew update -$DEBUG_STD - eval brew install --cask chromium $DEBUG_STD - eval brew install bash coreutils python massdns jq gcc cmake ruby git curl libpcap-dev wget zip python3-dev pv dnsutils whois libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx medusa xvfb libxml2-utils libdata-hexdump-perl gnu-getopt $DEBUG_STD - export PATH="/opt/homebrew/opt/gnu-getopt/bin:$PATH" - echo 'export PATH="/opt/homebrew/opt/gnu-getopt/bin:$PATH"' >> ~/.zshrc - brew install rustup - rustup-init - eval cargo install ripgen $DEBUG_STD +install_brew() { + if brew --version &>/dev/null; then + printf "${bgreen} brew is already installed ${reset}\n\n" + else + /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" + fi + eval brew update -$DEBUG_STD + eval brew install --cask chromium $DEBUG_STD + eval brew install bash coreutils python massdns jq gcc cmake ruby git curl libpcap-dev wget zip python3-dev pv dnsutils whois libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx medusa xvfb libxml2-utils libdata-hexdump-perl gnu-getopt $DEBUG_STD + export PATH="/opt/homebrew/opt/gnu-getopt/bin:$PATH" + echo 'export PATH="/opt/homebrew/opt/gnu-getopt/bin:$PATH"' >>~/.zshrc + brew install rustup + rustup-init + eval cargo install ripgen $DEBUG_STD } -install_yum(){ - eval $SUDO yum groupinstall "Development Tools" -y $DEBUG_STD - eval $SUDO yum install python3 python3-pip gcc cmake ruby git curl libpcap-dev wget whois zip python3-devel pv bind-utils libopenssl-devel libffi-devel libxml2-devel libxslt-devel zlib-devel nmap jq lynx medusa xorg-x11-server-xvfb -y $DEBUG_STD - curl https://sh.rustup.rs -sSf | sh -s -- -y >/dev/null 2>&1 - eval source "$HOME/.cargo/env $DEBUG_STD" - eval cargo install ripgen $DEBUG_STD +install_yum() { + eval $SUDO yum groupinstall "Development Tools" -y $DEBUG_STD + eval $SUDO yum install python3 python3-pip gcc cmake ruby git curl libpcap-dev wget whois zip python3-devel pv bind-utils libopenssl-devel libffi-devel libxml2-devel libxslt-devel zlib-devel nmap jq lynx medusa xorg-x11-server-xvfb -y $DEBUG_STD + curl https://sh.rustup.rs -sSf | sh -s -- -y >/dev/null 2>&1 + eval source "${HOME}/.cargo/env $DEBUG_STD" + eval cargo install ripgen $DEBUG_STD } -install_pacman(){ - eval $SUDO pacman -Sy install python python-pip base-devel gcc cmake ruby git curl libpcap whois wget zip pv bind openssl libffi libxml2 libxslt zlib nmap jq lynx medusa xorg-server-xvfb -y $DEBUG_STD - curl https://sh.rustup.rs -sSf | sh -s -- -y >/dev/null 2>&1 - eval source "$HOME/.cargo/env $DEBUG_STD" - eval cargo install ripgen $DEBUG_STD +install_pacman() { + eval $SUDO pacman -Sy install python python-pip base-devel gcc cmake ruby git curl libpcap whois wget zip pv bind openssl libffi libxml2 libxslt zlib nmap jq lynx medusa xorg-server-xvfb -y $DEBUG_STD + curl https://sh.rustup.rs -sSf | sh -s -- -y >/dev/null 2>&1 + eval source "${HOME}/.cargo/env $DEBUG_STD" + eval cargo install ripgen $DEBUG_STD } eval git config --global --unset http.proxy $DEBUG_STD @@ -443,86 +317,95 @@ eval git config --global --unset https.proxy $DEBUG_STD printf "${bblue} Running: Looking for new reconFTW version${reset}\n\n" if ! eval git fetch $DEBUG_STD; then - echo "Failed to fetch updates." - exit 1 + echo "Failed to fetch updates." + exit 1 fi BRANCH=$(git rev-parse --abbrev-ref HEAD) HEADHASH=$(git rev-parse HEAD) UPSTREAMHASH=$(git rev-parse "${BRANCH}@{upstream}") -if [ "$HEADHASH" != "$UPSTREAMHASH" ] -then - printf "${yellow} There is a new version, updating...${reset}\n\n" - if git status --porcelain | grep -q 'reconftw.cfg$'; then - mv reconftw.cfg reconftw.cfg_bck - printf "${yellow} reconftw.cfg has been backed up in reconftw.cfg_bck${reset}\n\n" - fi - eval git reset --hard $DEBUG_STD - eval git pull $DEBUG_STD - printf "${bgreen} Updated! Running new installer version...${reset}\n\n" +if [[ $HEADHASH != "$UPSTREAMHASH" ]]; then + printf "${yellow} There is a new version, updating...${reset}\n\n" + if git status --porcelain | grep -q 'reconftw.cfg$'; then + mv reconftw.cfg reconftw.cfg_bck + printf "${yellow} reconftw.cfg has been backed up in reconftw.cfg_bck${reset}\n\n" + fi + eval git reset --hard $DEBUG_STD + eval git pull $DEBUG_STD + printf "${bgreen} Updated! Running new installer version...${reset}\n\n" else - printf "${bgreen} reconFTW is already up to date!${reset}\n\n" + printf "${bgreen} reconFTW is already up to date!${reset}\n\n" fi printf "${bblue} Running: Installing system packages ${reset}\n\n" -if [ -f /etc/debian_version ]; then install_apt; -elif [ -f /etc/redhat-release ]; then install_yum; -elif [ -f /etc/arch-release ]; then install_pacman; -elif [ "True" = "$IS_MAC" ]; then install_brew; -elif [ -f /etc/os-release ]; then install_yum; #/etc/os-release fall in yum for some RedHat and Amazon Linux instances +if [[ -f /etc/debian_version ]]; then + install_apt +elif [[ -f /etc/redhat-release ]]; then + install_yum +elif [[ -f /etc/arch-release ]]; then + install_pacman +elif [[ "True" == "$IS_MAC" ]]; then + install_brew +elif [[ -f /etc/os-release ]]; then + install_yum #/etc/os-release fall in yum for some RedHat and Amazon Linux instances fi # Installing latest Golang version version=$(curl -L -s https://golang.org/VERSION?m=text | head -1) -[[ $version = g* ]] || version="go1.20.7" +[[ $version == g* ]] || version="go1.20.7" printf "${bblue} Running: Installing/Updating Golang ${reset}\n\n" -if [ "$install_golang" = "true" ]; then - if [[ $(eval type go $DEBUG_ERROR | grep -o 'go is') == "go is" ]] && [[ "$version" = $(go version | cut -d " " -f3) ]] - then - printf "${bgreen} Golang is already installed and updated ${reset}\n\n" - else - eval $SUDO rm -rf /usr/local/go $DEBUG_STD - if [ "True" = "$IS_ARM" ]; then - if [ "True" = "$RPI_3" ]; then - eval wget "https://dl.google.com/go/${version}.linux-armv6l.tar.gz" -O /tmp/${version}.linux-armv6l.tar.gz $DEBUG_STD - eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.linux-armv6l.tar.gz" $DEBUG_STD - elif [ "True" = "$RPI_4" ]; then - eval wget "https://dl.google.com/go/${version}.linux-arm64.tar.gz" -O /tmp/${version}.linux-arm64.tar.gz $DEBUG_STD - eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.linux-arm64.tar.gz" $DEBUG_STD - fi - elif [ "True" = "$IS_MAC" ]; then - if [ "True" = "$IS_ARM" ]; then - eval wget "https://dl.google.com/go/${version}.darwin-arm64.tar.gz" -O /tmp/${version}.darwin-arm64.tar.gz $DEBUG_STD - eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.darwin-arm64.tar.gz" $DEBUG_STD - else - eval wget "https://dl.google.com/go/${version}.darwin-amd64.tar.gz" -O /tmp/${version}.darwin-amd64.tar.gz $DEBUG_STD - eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.darwin-amd64.tar.gz" $DEBUG_STD - fi +if [[ $install_golang == "true" ]]; then + if [[ $(eval type go $DEBUG_ERROR | grep -o 'go is') == "go is" ]] && [[ $version == $(go version | cut -d " " -f3) ]]; then + printf "${bgreen} Golang is already installed and updated ${reset}\n\n" + else + eval $SUDO rm -rf /usr/local/go $DEBUG_STD + if [[ "True" == "$IS_ARM" ]]; then + if [[ "True" == "$RPI_3" ]]; then + eval wget "https://dl.google.com/go/${version}.linux-armv6l.tar.gz" -O /tmp/${version}.linux-armv6l.tar.gz $DEBUG_STD + eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.linux-armv6l.tar.gz" $DEBUG_STD + elif [[ "True" == "$RPI_4" ]]; then + eval wget "https://dl.google.com/go/${version}.linux-arm64.tar.gz" -O /tmp/${version}.linux-arm64.tar.gz $DEBUG_STD + eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.linux-arm64.tar.gz" $DEBUG_STD + fi + elif [[ "True" == "$IS_MAC" ]]; then + if [[ "True" == "$IS_ARM" ]]; then + eval wget "https://dl.google.com/go/${version}.darwin-arm64.tar.gz" -O /tmp/${version}.darwin-arm64.tar.gz $DEBUG_STD + eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.darwin-arm64.tar.gz" $DEBUG_STD else - eval wget "https://dl.google.com/go/${version}.linux-amd64.tar.gz" -O /tmp/${version}.linux-amd64.tar.gz $DEBUG_STD - eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.linux-amd64.tar.gz" $DEBUG_STD + eval wget "https://dl.google.com/go/${version}.darwin-amd64.tar.gz" -O /tmp/${version}.darwin-amd64.tar.gz $DEBUG_STD + eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.darwin-amd64.tar.gz" $DEBUG_STD fi - eval $SUDO ln -sf /usr/local/go/bin/go /usr/local/bin/ - #rm -rf $version* - export GOROOT=/usr/local/go - export GOPATH=$HOME/go - export PATH=$GOPATH/bin:$GOROOT/bin:$HOME/.local/bin:$PATH -cat << EOF >> ~/"${profile_shell}" + else + eval wget "https://dl.google.com/go/${version}.linux-amd64.tar.gz" -O /tmp/${version}.linux-amd64.tar.gz $DEBUG_STD + eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.linux-amd64.tar.gz" $DEBUG_STD + fi + eval $SUDO ln -sf /usr/local/go/bin/go /usr/local/bin/ + #rm -rf $version* + export GOROOT=/usr/local/go + export GOPATH=${HOME}/go + export PATH=$GOPATH/bin:$GOROOT/bin:${HOME}/.local/bin:$PATH + cat <>~/"${profile_shell}" # Golang vars export GOROOT=/usr/local/go export GOPATH=\$HOME/go export PATH=\$GOPATH/bin:\$GOROOT/bin:\$HOME/.local/bin:\$PATH EOF -fi + fi else - printf "${byellow} Golang will not be configured according to the user's prefereneces (reconftw.cfg install_golang var)${reset}\n"; + printf "${byellow} Golang will not be configured according to the user's prefereneces (reconftw.cfg install_golang var)${reset}\n" fi -[ -n "$GOPATH" ] || { printf "${bred} GOPATH env var not detected, add Golang env vars to your \$HOME/.bashrc or \$HOME/.zshrc:\n\n export GOROOT=/usr/local/go\n export GOPATH=\$HOME/go\n export PATH=\$GOPATH/bin:\$GOROOT/bin:\$PATH\n\n"; exit 1; } -[ -n "$GOROOT" ] || { printf "${bred} GOROOT env var not detected, add Golang env vars to your \$HOME/.bashrc or \$HOME/.zshrc:\n\n export GOROOT=/usr/local/go\n export GOPATH=\$HOME/go\n export PATH=\$GOPATH/bin:\$GOROOT/bin:\$PATH\n\n"; exit 1; } +[ -n "$GOPATH" ] || { + printf "${bred} GOPATH env var not detected, add Golang env vars to your \$HOME/.bashrc or \$HOME/.zshrc:\n\n export GOROOT=/usr/local/go\n export GOPATH=\$HOME/go\n export PATH=\$GOPATH/bin:\$GOROOT/bin:\$PATH\n\n" + exit 1 +} +[ -n "$GOROOT" ] || { + printf "${bred} GOROOT env var not detected, add Golang env vars to your \$HOME/.bashrc or \$HOME/.zshrc:\n\n export GOROOT=/usr/local/go\n export GOPATH=\$HOME/go\n export PATH=\$GOPATH/bin:\$GOROOT/bin:\$PATH\n\n" + exit 1 +} printf "${bblue} Running: Installing requirements ${reset}\n\n" @@ -531,8 +414,8 @@ mkdir -p $tools mkdir -p ~/.config/notify/ mkdir -p ~/.config/amass/ mkdir -p ~/.config/nuclei/ -touch $dir/.github_tokens -touch $dir/.gitlab_tokens +touch "${dir}"/.github_tokens +touch "${dir}"/.gitlab_tokens eval wget -N -c https://bootstrap.pypa.io/get-pip.py $DEBUG_STD && eval python3 get-pip.py $DEBUG_STD eval rm -f get-pip.py $DEBUG_STD @@ -541,74 +424,80 @@ install_tools printf "${bblue}\n Running: Downloading required files ${reset}\n\n" ## Downloads -[ ! -f ~/.config/amass/config.ini ] && wget -q -O ~/.config/amass/config.ini https://raw.githubusercontent.com/owasp-amass/amass/master/examples/config.ini -[ ! -f ~/.config/notify/provider-config.yaml ] && wget -q -O ~/.config/notify/provider-config.yaml https://gist.githubusercontent.com/six2dez/23a996bca189a11e88251367e6583053/raw -#wget -q -O - https://raw.githubusercontent.com/devanshbatham/ParamSpider/master/gf_profiles/potential.json > ~/.gf/potential.json - Removed -wget -q -O - https://raw.githubusercontent.com/m4ll0k/Bug-Bounty-Toolz/master/getjswords.py > ${tools}/getjswords.py -wget -q -O - https://raw.githubusercontent.com/n0kovo/n0kovo_subdomains/main/n0kovo_subdomains_huge.txt > ${subs_wordlist_big} -wget -q -O - https://raw.githubusercontent.com/six2dez/resolvers_reconftw/main/resolvers_trusted.txt > ${resolvers_trusted} -wget -q -O - https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt > ${resolvers} -wget -q -O - https://gist.github.com/six2dez/a307a04a222fab5a57466c51e1569acf/raw > ${subs_wordlist} -wget -q -O - https://gist.github.com/six2dez/ffc2b14d283e8f8eff6ac83e20a3c4b4/raw > ${tools}/permutations_list.txt -wget -q -O - https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt > ${fuzz_wordlist} -wget -q -O - https://gist.githubusercontent.com/six2dez/a89a0c7861d49bb61a09822d272d5395/raw > ${lfi_wordlist} -wget -q -O - https://gist.githubusercontent.com/six2dez/ab5277b11da7369bf4e9db72b49ad3c1/raw > ${ssti_wordlist} -wget -q -O - https://gist.github.com/six2dez/d62ab8f8ffd28e1c206d401081d977ae/raw > ${tools}/headers_inject.txt -wget -q -O - https://gist.githubusercontent.com/six2dez/6e2d9f4932fd38d84610eb851014b26e/raw > ${tools}/axiom_config.sh -eval $SUDO chmod +x $tools/axiom_config.sh +[[ ! -f ~/.config/amass/config.ini ]] && wget -q -O ~/.config/amass/config.ini https://raw.githubusercontent.com/owasp-amass/amass/master/examples/config.ini +[[ ! -f ~/.config/notify/provider-config.yaml ]] && wget -q -O ~/.config/notify/provider-config.yaml https://gist.githubusercontent.com/six2dez/23a996bca189a11e88251367e6583053/raw +#wget -q -O - https://raw.githubusercontent.com/devanshbatham/ParamSpider/master/gf_profiles/potential.json > ~/.gf/potential.json - Removed +wget -q -O - https://raw.githubusercontent.com/m4ll0k/Bug-Bounty-Toolz/master/getjswords.py >${tools}/getjswords.py +wget -q -O - https://raw.githubusercontent.com/n0kovo/n0kovo_subdomains/main/n0kovo_subdomains_huge.txt >${subs_wordlist_big} +wget -q -O - https://raw.githubusercontent.com/six2dez/resolvers_reconftw/main/resolvers_trusted.txt >${resolvers_trusted} +wget -q -O - https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt >${resolvers} +wget -q -O - https://gist.github.com/six2dez/a307a04a222fab5a57466c51e1569acf/raw >${subs_wordlist} +wget -q -O - https://gist.github.com/six2dez/ffc2b14d283e8f8eff6ac83e20a3c4b4/raw >${tools}/permutations_list.txt +wget -q -O - https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt >${fuzz_wordlist} +wget -q -O - https://gist.githubusercontent.com/six2dez/a89a0c7861d49bb61a09822d272d5395/raw >${lfi_wordlist} +wget -q -O - https://gist.githubusercontent.com/six2dez/ab5277b11da7369bf4e9db72b49ad3c1/raw >${ssti_wordlist} +wget -q -O - https://gist.github.com/six2dez/d62ab8f8ffd28e1c206d401081d977ae/raw >${tools}/headers_inject.txt +wget -q -O - https://gist.githubusercontent.com/six2dez/6e2d9f4932fd38d84610eb851014b26e/raw >${tools}/axiom_config.sh +eval $SUDO chmod +x ${tools}/axiom_config.sh eval $SUDO mv $SCRIPTPATH/assets/potential.json ~/.gf/potential.json ## Last check -if [ "$double_check" = "true" ]; then - printf "${bblue} Running: Double check for installed tools ${reset}\n\n" - go_step=0 - for gotool in "${!gotools[@]}"; do - go_step=$((go_step + 1)) - eval type -P $gotool $DEBUG_STD || { eval ${gotools[$gotool]} $DEBUG_STD; } - exit_status=$? - done - repos_step=0 - for repo in "${!repos[@]}"; do - repos_step=$((repos_step + 1)) - eval cd $dir/$repo $DEBUG_STD || { eval git clone https://github.com/${repos[$repo]} $dir/$repo $DEBUG_STD && cd $dir/$repo || { echo "Failed to cd directory '$dir'"; exit 1; }; } - eval git pull $DEBUG_STD - exit_status=$? - if [ -s "setup.py" ]; then +if [[ $double_check == "true" ]]; then + printf "${bblue} Running: Double check for installed tools ${reset}\n\n" + go_step=0 + for gotool in "${!gotools[@]}"; do + go_step=$((go_step + 1)) + eval type -P $gotool $DEBUG_STD || { eval ${gotools[$gotool]} $DEBUG_STD; } + exit_status=$? + done + repos_step=0 + for repo in "${!repos[@]}"; do + repos_step=$((repos_step + 1)) + eval cd "${dir}"/$repo $DEBUG_STD || { eval git clone https://github.com/${repos[$repo]} "${dir}"/$repo $DEBUG_STD && cd "${dir}"/$repo || { + echo "Failed to cd directory '$dir'" + exit 1 + }; } + eval git pull $DEBUG_STD + exit_status=$? + if [[ -s "setup.py" ]]; then eval $SUDO python3 setup.py install $DEBUG_STD fi - if [ "massdns" = "$repo" ]; then + if [[ "massdns" == "$repo" ]]; then eval make $DEBUG_STD && strip -s bin/massdns && eval $SUDO cp bin/massdns /usr/local/bin/ $DEBUG_ERROR - elif [ "gf" = "$repo" ]; then + elif [[ "gf" == "$repo" ]]; then eval cp -r examples ~/.gf $DEBUG_ERROR - elif [ "Gf-Patterns" = "$repo" ]; then + elif [[ "Gf-Patterns" == "$repo" ]]; then eval mv ./*.json ~/.gf $DEBUG_ERROR - elif [ "trufflehog" = "$repo" ]; then + elif [[ "trufflehog" == "$repo" ]]; then eval go install $DEBUG_STD fi - cd "$dir" || { echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"; exit 1; } - done + cd "${dir}" || { + echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + done fi printf "${bblue} Running: Performing last configurations ${reset}\n\n" ## Last steps -if [ "$generate_resolvers" = true ]; then - if [ ! -s "$resolvers" ] || [[ $(find "$resolvers" -mtime +1 -print) ]] ; then +if [[ $generate_resolvers == true ]]; then + if [[ ! -s $resolvers ]] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then printf "${reset}\n\nChecking resolvers lists...\n Accurate resolvers are the key to great results\n This may take around 10 minutes if it's not updated\n\n" - eval rm -f $resolvers 2>>"$LOGFILE" - dnsvalidator -tL https://public-dns.info/nameservers.txt -threads $DNSVALIDATOR_THREADS -o $resolvers &>/dev/null - dnsvalidator -tL https://raw.githubusercontent.com/blechschmidt/massdns/master/lists/resolvers.txt -threads $DNSVALIDATOR_THREADS -o tmp_resolvers &>/dev/null - [ -s "tmp_resolvers" ] && cat tmp_resolvers | anew -q $resolvers - [ -s "tmp_resolvers" ] && rm -f tmp_resolvers &>/dev/null - [ ! -s "$resolvers" ] && wget -q -O - https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt > ${resolvers} - [ ! -s "$resolvers_trusted" ] && wget -q -O - https://raw.githubusercontent.com/six2dez/resolvers_reconftw/main/resolvers_trusted.txt > ${resolvers_trusted} + eval rm -f $resolvers 2>>"${LOGFILE}" + dnsvalidator -tL https://public-dns.info/nameservers.txt -threads $DNSVALIDATOR_THREADS -o $resolvers &>/dev/null + dnsvalidator -tL https://raw.githubusercontent.com/blechschmidt/massdns/master/lists/resolvers.txt -threads $DNSVALIDATOR_THREADS -o tmp_resolvers &>/dev/null + [[ -s "tmp_resolvers" ]] && cat tmp_resolvers | anew -q $resolvers + [[ -s "tmp_resolvers" ]] && rm -f tmp_resolvers &>/dev/null + [[ ! -s $resolvers ]] && wget -q -O - https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt >${resolvers} + [[ ! -s $resolvers_trusted ]] && wget -q -O - https://raw.githubusercontent.com/six2dez/resolvers_reconftw/main/resolvers_trusted.txt >${resolvers_trusted} printf "${yellow} Resolvers updated\n ${reset}\n\n" fi generate_resolvers=false else - [ ! -s "$resolvers" ] || if [[ $(find "$resolvers" -mtime +1 -print) ]] ; then - ${reset}"\n\nChecking resolvers lists...\n Accurate resolvers are the key to great results\n Downloading new resolvers ${reset}\n\n" - wget -q -O - https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt > ${resolvers} - wget -q -O - https://raw.githubusercontent.com/six2dez/resolvers_reconftw/main/resolvers_trusted.txt > ${resolvers_trusted} + [[ ! -s $resolvers ]] || if [[ $(find "$resolvers" -mtime +1 -print) ]]; then + ${reset}"\n\nChecking resolvers lists...\n Accurate resolvers are the key to great results\n Downloading new resolvers ${reset}\n\n" + wget -q -O - https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt >${resolvers} + wget -q -O - https://raw.githubusercontent.com/six2dez/resolvers_reconftw/main/resolvers_trusted.txt >${resolvers_trusted} printf "${yellow} Resolvers updated\n ${reset}\n\n" fi fi @@ -618,9 +507,6 @@ eval strip -s "$HOME"/go/bin/* $DEBUG_STD eval $SUDO cp "$HOME"/go/bin/* /usr/local/bin/ $DEBUG_STD -if [ "$web" = true ]; then - printf "\n${bgreen} Web server is installed, to set it up run ./install.sh and select option 3 ${reset}\n\n" -fi printf "${yellow} Remember set your api keys:\n - amass (~/.config/amass/config.ini)\n - subfinder (~/.config/subfinder/provider-config.yaml)\n - GitLab (~/Tools/.gitlab_tokens)\n - SSRF Server (COLLAB_SERVER in reconftw.cfg or env var) \n - Blind XSS Server (XSS_SERVER in reconftw.cfg or env var) \n - notify (~/.config/notify/provider-config.yaml) \n - WHOISXML API (WHOISXML_API in reconftw.cfg or env var)\n\n${reset}" printf "${bgreen} Finished!${reset}\n\n" diff --git a/reconftw.sh b/reconftw.sh index 01e29362..abb076ff 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1,5 +1,17 @@ #!/usr/bin/env bash +# Welcome to reconFTW main script +# ██▀███ ▓█████ ▄████▄ ▒█████ ███▄ █ █████▒▄▄▄█████▓ █ █░ +# ▓██ ▒ ██▒▓█ ▀ ▒██▀ ▀█ ▒██▒ ██▒ ██ ▀█ █ ▓██ ▒ ▓ ██▒ ▓▒▓█░ █ ░█░ +# ▓██ ░▄█ ▒▒███ ▒▓█ ▄ ▒██░ ██▒▓██ ▀█ ██▒▒████ ░ ▒ ▓██░ ▒░▒█░ █ ░█ +# ▒██▀▀█▄ ▒▓█ ▄ ▒▓▓▄ ▄██▒▒██ ██░▓██▒ ▐▌██▒░▓█▒ ░ ░ ▓██▓ ░ ░█░ █ ░█ +# ░██▓ ▒██▒░▒████▒▒ ▓███▀ ░░ ████▓▒░▒██░ ▓██░░▒█░ ▒██▒ ░ ░░██▒██▓ +# ░ ▒▓ ░▒▓░░░ ▒░ ░░ ░▒ ▒ ░░ ▒░▒░▒░ ░ ▒░ ▒ ▒ ▒ ░ ▒ ░░ ░ ▓░▒ ▒ +# ░▒ ░ ▒░ ░ ░ ░ ░ ▒ ░ ▒ ▒░ ░ ░░ ░ ▒░ ░ ░ ▒ ░ ░ +# ░░ ░ ░ ░ ░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ░ ░ +# ░ ░ ░░ ░ ░ ░ ░ ░ +# + function banner_graber() { source "${SCRIPTPATH}"/banners.txt randx=$(shuf -i 1-23 -n 1) @@ -13,11 +25,12 @@ function banner() { printf "\n ${reconftw_version} by @six2dez${reset}\n" } -function test_connectivity(){ +function test_connectivity() { if nc -zw1 google.com 443 2>/dev/null; then - echo -e "${lgray}Connection: ${lgreen}OK${reset}" + echo -e "Connection: ${bgreen}OK${reset}" else - echo -e "${lred}[!] Please check your internet connection and then try again...${reset}";exit 1 + echo -e "${bred}[!] Please check your internet connection and then try again...${reset}" + exit 1 fi } ############################################################################################################### @@ -491,8 +504,8 @@ function postleaks() { if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $POSTMAN_LEAKS == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Scanning for leaks in postman public directory" - postleaksNg -k "$domain" >.tmp/postleaks.txt || { - echo "postleaksNg command failed" + porch-pirate -s "$domain" --dump >osint/postman_leaks.txt || { + echo "porch-pirate command failed" exit 1 } @@ -756,7 +769,7 @@ function sub_noerror() { printf "\n${yellow} Detected DNSSEC black lies, skipping this technique ${reset}\n" fi else - if [[ $SUBBRUTE == false ]]; then + if [[ $SUBNOERROR == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -1213,56 +1226,54 @@ function s3buckets() { spinny::stop } - ############################################################################################################### ############################################# GEOLOCALIZATION INFO ####################################################### ############################################################################################################### - -function geo_info(){ +function geo_info() { if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; }; then - start_func ${FUNCNAME[0]} "Running: ipinfo via ipapi.co" + start_func ${FUNCNAME[0]} "Running: ipinfo via ipapi.co" ips_file="${dir}/hosts/ips.txt" if [ ! -f $ips_file ]; then echo "File ${dir}/hosts/ips.txt does not exist." else for ip in $(cat "$ips_file"); do json_output=$(curl -s https://ipapi.co/$ip/json) - echo $json_output >> ${dir}/hosts/geoip.json - ip=$(echo $json_output| jq '.ip' | tr -d '''"''') - network=$(echo $json_output| jq '.network' | tr -d '''"''') - city=$(echo $json_output| jq '.city' | tr -d '''"''') - region=$(echo $json_output| jq '.region' | tr -d '''"''') - country=$(echo $json_output| jq '.country' | tr -d '''"''') - country_name=$(echo $json_output| jq '.country_name' | tr -d '''"''') - country_code=$(echo $json_output| jq '.country_code' | tr -d '''"''') - country_code_iso3=$(echo $json_output| jq '.country_code_iso3' | tr -d '''"''') - country_tld=$(echo $json_output| jq '.country_tld' | tr -d '''"''') - continent_code=$(echo $json_output| jq '.continent_code' | tr -d '''"''') - latitude=$(echo $json_output| jq '.latitude' | tr -d '''"''') - longitude=$(echo $json_output| jq '.longitude' | tr -d '''"''') - timezone=$(echo $json_output| jq '.timezone' | tr -d '''"''') - utc_offset=$(echo $json_output| jq '.utc_offset' | tr -d '''"''') - asn=$(echo $json_output| jq '.asn' | tr -d '''"''') - org=$(echo $json_output| jq '.org' | tr -d '''"''') - - echo "IP: $ip" >> ${dir}/hosts/geoip.txt - echo "Network: $network" >> ${dir}/hosts/geoip.txt - echo "City: $city" >> ${dir}/hosts/geoip.txt - echo "Region: $region" >> ${dir}/hosts/geoip.txt - echo "Country: $country" >> ${dir}/hosts/geoip.txt - echo "Country Name: $country_name" >> ${dir}/hosts/geoip.txt - echo "Country Code: $country_code" >> ${dir}/hosts/geoip.txt - echo "Country Code ISO3: $country_code_iso3" >> ${dir}/hosts/geoip.txt - echo "Country tld: $country_tld" >> ${dir}/hosts/geoip.txt - echo "Continent Code: $continent_code" >> ${dir}/hosts/geoip.txt - echo "Latitude: $latitude" >> ${dir}/hosts/geoip.txt - echo "Longitude: $longitude" >> ${dir}/hosts/geoip.txt - echo "Timezone: $timezone" >> ${dir}/hosts/geoip.txt - echo "UTC Offset: $utc_offset" >> ${dir}/hosts/geoip.txt - echo "ASN: $asn" >> ${dir}/hosts/geoip.txt - echo "ORG: $org" >> ${dir}/hosts/geoip.txt - echo -e "------------------------------\n" >> ${dir}/hosts/geoip.txt + echo $json_output >>${dir}/hosts/geoip.json + ip=$(echo $json_output | jq '.ip' | tr -d '''"''') + network=$(echo $json_output | jq '.network' | tr -d '''"''') + city=$(echo $json_output | jq '.city' | tr -d '''"''') + region=$(echo $json_output | jq '.region' | tr -d '''"''') + country=$(echo $json_output | jq '.country' | tr -d '''"''') + country_name=$(echo $json_output | jq '.country_name' | tr -d '''"''') + country_code=$(echo $json_output | jq '.country_code' | tr -d '''"''') + country_code_iso3=$(echo $json_output | jq '.country_code_iso3' | tr -d '''"''') + country_tld=$(echo $json_output | jq '.country_tld' | tr -d '''"''') + continent_code=$(echo $json_output | jq '.continent_code' | tr -d '''"''') + latitude=$(echo $json_output | jq '.latitude' | tr -d '''"''') + longitude=$(echo $json_output | jq '.longitude' | tr -d '''"''') + timezone=$(echo $json_output | jq '.timezone' | tr -d '''"''') + utc_offset=$(echo $json_output | jq '.utc_offset' | tr -d '''"''') + asn=$(echo $json_output | jq '.asn' | tr -d '''"''') + org=$(echo $json_output | jq '.org' | tr -d '''"''') + + echo "IP: $ip" >>${dir}/hosts/geoip.txt + echo "Network: $network" >>${dir}/hosts/geoip.txt + echo "City: $city" >>${dir}/hosts/geoip.txt + echo "Region: $region" >>${dir}/hosts/geoip.txt + echo "Country: $country" >>${dir}/hosts/geoip.txt + echo "Country Name: $country_name" >>${dir}/hosts/geoip.txt + echo "Country Code: $country_code" >>${dir}/hosts/geoip.txt + echo "Country Code ISO3: $country_code_iso3" >>${dir}/hosts/geoip.txt + echo "Country tld: $country_tld" >>${dir}/hosts/geoip.txt + echo "Continent Code: $continent_code" >>${dir}/hosts/geoip.txt + echo "Latitude: $latitude" >>${dir}/hosts/geoip.txt + echo "Longitude: $longitude" >>${dir}/hosts/geoip.txt + echo "Timezone: $timezone" >>${dir}/hosts/geoip.txt + echo "UTC Offset: $utc_offset" >>${dir}/hosts/geoip.txt + echo "ASN: $asn" >>${dir}/hosts/geoip.txt + echo "ORG: $org" >>${dir}/hosts/geoip.txt + echo -e "------------------------------\n" >>${dir}/hosts/geoip.txt done fi end_func "Results are saved in hosts/geoip.txt and hosts/geoip.json" ${FUNCNAME[0]} @@ -1271,9 +1282,6 @@ function geo_info(){ fi } - - - ############################################################################################################### ########################################### WEB DETECTION ##################################################### ############################################################################################################### @@ -1455,14 +1463,14 @@ function portscan() { else echo $domain | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt fi - [ ! -s "hosts/cdn_providers.txt" ] && cat hosts/ips.txt 2>/dev/null | cdncheck -silent -resp -nc 2>/dev/null >hosts/cdn_providers.txt + [ ! -s "hosts/cdn_providers.txt" ] && cat hosts/ips.txt 2>/dev/null | cdncheck -silent -resp -cdn -waf -nc 2>/dev/null >hosts/cdn_providers.txt [ -s "hosts/ips.txt" ] && comm -23 <(cat hosts/ips.txt | sort -u) <(cat hosts/cdn_providers.txt | cut -d'[' -f1 | sed 's/[[:space:]]*$//' | sort -u) | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u | anew -q .tmp/ips_nocdn.txt printf "${bblue}\n Resolved IP addresses (No CDN) ${reset}\n\n" [ -s ".tmp/ips_nocdn.txt" ] && cat .tmp/ips_nocdn.txt | sort geo_info printf "${bblue}\n Scanning ports... ${reset}\n\n" - ips_file="${dir}/hosts/ips.txt" - if [ "$PORTSCAN_PASSIVE" = true ] ; then + ips_file="${dir}/hosts/ips.txt" + if [ "$PORTSCAN_PASSIVE" = true ]; then if [ ! -f $ips_file ]; then echo "File $ips_file does not exist." else @@ -1472,14 +1480,14 @@ function portscan() { json_array+=("$json_result") done formatted_json="[" - for ((i=0; i<${#json_array[@]}; i++)); do + for ((i = 0; i < ${#json_array[@]}; i++)); do formatted_json+="$(echo ${json_array[i]} | tr -d '\n')" - if [ $i -lt $((${#json_array[@]}-1)) ]; then + if [ $i -lt $((${#json_array[@]} - 1)) ]; then formatted_json+=", " fi done formatted_json+="]" - echo "$formatted_json" > "${dir}/hosts/shodan_results.json" + echo "$formatted_json" >"${dir}/hosts/shodan_results.json" fi else printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" @@ -2160,6 +2168,7 @@ function ssti() { if [[ -s "gf/ssti.txt" ]]; then cat gf/ssti.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_ssti.txt if [[ $DEEP == true ]] || [[ $(cat .tmp/tmp_ssti.txt | wc -l) -le $DEEP_LIMIT ]]; then + #TInjA url -u "file://.tmp/tmp_ssti.txt" --csti --reportpath "vulns/" interlace -tL .tmp/tmp_ssti.txt -threads ${INTERLACE_THREADS} -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${ssti_wordlist} -u \"_target_\" -mr \"ssti49\" " 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt end_func "Results are saved in vulns/ssti.txt" ${FUNCNAME[0]} else diff --git a/requirements.txt b/requirements.txt index 758b2755..ccc8cfbd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -34,4 +34,4 @@ tldextract # dorks_hunter tqdm # multiple ujson # multiple urllib3 # multiple -postleaksNg # Tool +porch-pirate # Tool diff --git a/web/Makefile b/web/Makefile deleted file mode 100644 index 61af4a94..00000000 --- a/web/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -setup: ## Setup - rm db.sqlite3 dump.rdb celerybeat-schedule.db -rf - rm static/img/target_icon/* -f - python3 manage.py makemigrations - python3 manage.py migrate - -up: ## Start - eval "$(redis-server)" - eval "$(python3.9 -m celery -A web worker -l info -P solo)" - python3 manage.py runserver 0.0.0.0:8000 - -user: ## Generate Username - python3 manage.py createsuperuser diff --git a/web/README.md b/web/README.md deleted file mode 100644 index bb5badaf..00000000 --- a/web/README.md +++ /dev/null @@ -1,87 +0,0 @@ -

-
- reconftw -
- ReconFTW WEB Interface -
-

- -

Summary

- -Over the past year, our team has been dedicated to the meticulous development of the web interface for ReconFTW. Our unwavering efforts have been singularly focused on enhancing its intuitiveness, user-friendliness, and visual aesthetics, all while ensuring that it strikes the right balance between functionality and simplicity. This has been a challenging endeavor as we aimed to create an interface that caters to the diverse needs of both advanced users and beginners alike. - -One of the key hurdles we encountered was reconciling the intricate functionality required by advanced users with the need for a straightforward and accessible interface for those who are new to the platform. Through meticulous design and rigorous testing, we have successfully incorporated several features that streamline the process of creating scans and obtaining results in a prompt and efficient manner. The ultimate goal has been to elevate the overall user experience and provide a seamless and satisfying interaction with ReconFTW. - -So, without further ado, we invite you to explore the enhanced web interface of ReconFTW and experience the fruits of our labor firsthand. Don't hesitate - seize the opportunity to leverage our powerful yet user-friendly tool now! - -**So, what are you waiting for? Go! Go! Go! :boom: :boom: :boom:** - ---- - -# :video_camera: Demo: -### WEB: -![WEB](https://media1.giphy.com/media/7ikMiEv5bTUP943Gnp/giphy.gif) - -### Mobile: -![MOBILE](https://media3.giphy.com/media/Ec5SOPpVRlh79Vy1uR/giphy.gif) - ---- - -# 💿 Installation: - -**:bangbang: We strictly recommend installing the web interface on a VPS such as DigitalOcean.:bangbang:** - -### Run the following command to install the WEB GUI Interface -```bash -./install.sh #(Option 2) -``` -![ReconFTW Install Web GUI](https://i.imgur.com/675L89x.png) ---- - -### Run the following after install to setup the WEB GUI Interface -```bash -./install.sh #(Option 3) -``` -![ReconFTW Install Web GUI](https://i.imgur.com/675L89x.png) ---- - -# :robot: Usage: - -### To start the Web Service: -```bash -./reconftw.sh --web-server start -``` -![ReconFTW Start Web GUI](https://i.imgur.com/gHi7Bj5.png) - -### To stop the Web Service: -```bash -./reconftw.sh --web-server stop -``` -![ReconFTW Stop Web GUI](https://i.imgur.com/bWklRCY.pngg) ---- - -# :fire: Features: - -- New Scan -- Schedule Scan -- API Keys Config -- Edit Profile -- Delete Scan - ---- - -# :drop_of_blood: How to contribute: - -We tried to make the structure as simple as possible to be understood, with the intention of new collaborators being able to easily read the code and add their knowledge. - -We currently have some problems, for example: we cannot load all the information from the database without the server being taken down. - -We need to finalize features such as: cancel scan, backup scan, api key config. - -We need help! if you have an idea or want to improve our code, please do not hesitate to contact us or contribute directly. We know that a lot can be improved and we have the humbleness to recognize this. - -**Let's make history!** - ->"The best way to find yourself is to lose yourself in the service of others." - Mahatma Gandhi - ---- diff --git a/web/apikeys/__init__.py b/web/apikeys/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/web/apikeys/__pycache__/__init__.cpython-310.pyc b/web/apikeys/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index bb9f958ab1feb6b32380401527bb7e7f07636c49..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 131 zcmd1j<>g`k0%P94$sqbMh(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o2BvzbHSyM87CC zIX^G0q+Gu|HAz3QATv9)vRFSpJ~J<~BtBlRpz;=nO>TZlX-=vgNPjUCkYHf|0D_wv A4gdfE diff --git a/web/apikeys/__pycache__/apps.cpython-310.pyc b/web/apikeys/__pycache__/apps.cpython-310.pyc deleted file mode 100644 index e339d2b3a93fafcaeae30409bacf1fc4ec79ef64..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 412 zcmYjNy-ve05I)B)gtl2wS0u!if#d}cYC(dn3tJW|%{fsZsY9HoDl0qzZ^0{hW$G&+ zA#rXh(Ub1~^LOXdK`;iA>)<#0B>(M_V-b|hNzEl01Y{M|EMW{D0X^g&fLyV6o_Hvp zARO%o1tGI%i>A5P>%1&7S2-3&$(+>8$S6n{0=;GlM=ue`iAC68(UdQ0YX*w93w%!3 zg^q9))f!c0qPwzK^qqbvRfYC69ni~gWPK^uX|1HRV<`#TS1z7P`P!$|p>vKZPy4Eq zDN)FL&mmmz3tD%Lz4^I{TdljeRT=%G+r%5SiVuUxHzsPfEhn_QkU1gagJ3?}*Ono- glf%*u+j@@Hq5lUA(S&N&zZo{Yb+<8&a0)a22e+bP!T2^hr`?_7#HYhFm)bwm!kJJT??9=VWquO0lJTLZ(1K(wLEwm1bLulD( z9Tp>KIcOabN6~W88Wqo>&yoOe&`pb<@>UIce0AQKsxxEL&~m zEvJ#}hMh=xy)aEz5-C)*70C?lnGE4D7q+6=Bw19}TrwA`fZXEyQI=#hoA$!aLb#kN zH(QCbERxDyOOj5s8YyQr?270?v!;S#?{epw-g&rh(H(By5b=CW2@DbU`*eLH;E z(kr=xsE7JqX64p8>sRv1z{cflAp6`yG;h=pjSX`H=w5n97*j^h#r{Pue0K4vzH;O0)Sb7k(}?o#Oij&9UApy+OSe?*!j0?K&%ZJ3 zrtPq`IMZ4PyWOa*{GuB!XA4aSxL-&_*j3)jLK3GBI&D>Xkfs3tePA|dCGB=NTlT7Y zA%#JditbR5wSD8`6EbQg-MMUKyfrtkm|Wv$j<50K{wDkM+Ws@g@9g|L?mq?`jA_Nr zD0WPFTXSnwv`&=QCOh8rlq;hia*veVPF9eQmGq*ns>mo5?YJAIk|U7vS7e+;Ggkrn5yo7+e$VLU&#ihyi3SLTP zWNg4UjCCeJ1frHEkO5emf`d*r=w#(a<^XyxWeyQem>`{9wk;xH7bUxI&x3Tr>AS-1 zd$=mO1&W;N`@$mv0kWVbqMQ~L0N)iAkYOQr25#;XNkAI^HKXt4wubovManQXs0hp> zKXcKNVyjO%417@)E{IaW{@72`<@2B!MNUCRUj#G{x(wUMifxVeS&kk`!S6wJm+pvC z)xQ>>)aOgvgB0p zwu8Ky2YBY{(hoD1n zauhc7w|Cyem|YaH9lQSNlk7SyFOA8y(TVW_z7t^wr`Q&j<8CZJ3slJyWQeoMDd2ut zzJT6ZO@vuElP1fu6{X|Ta-3Y4FDJH}%CLijIP8JsLYeKTD^JlhQBi)G%xf^sy8H^Yze?tHGH1w~g;BPSp)7hDZ8LeE zCNGmYr#&O~5Zr>`f0*0aWcRE=;z#HY_w9&2WQ7$K`KMG%XOrjL%+2JnOS;u!#2)x`aPD`|l_+0oP zm)xCg5f6pJE6=^^B#YXu$&nU8H4#a1*pA5*cFZ;~N& zPB(pdl{$1r$gjgFGlq5>B2K1)pn{*xrNGnlwK|MsHQ_6S$O%MqRNi?>Y(D|Oju{{KO9P{ zZ)f&BCwE{`^MAxq)fwtKgpd9Qw?_#aS4(i-_yXB$rK4$-NEEjN9pWbAij3yK-Cgi{Ay8u z5cw0v(V~NEf%99uFQ(tq*fZbLZ~pK5A!)~^jHO?Y)l{fWt{p9@uyCpq#_bs#vZRTJ zI)ch_QJ_gF6lGj%1=1Fq>~(xK9Ftejeg@XJ%eIySS{Q6=g1rde)&zwBIESFh(v-W2 z<};7}pvbyF^Y7r5-hye6NC%dwG?P3ZDRn@Avm6Ed$%y5#qs(Fa-;G*;dC%0n_pCpA zVO;Hg`k0zKZp$(BI+F^Gc(44TX@fuanW zjJMcw^HWlDiv2X1ZgHk$CFZ5)>!l@ptsOEcnO!bq<|Hu=$OX;r@00Ta;nRlEs$UsclEIN^T*-7M(quAw8 zK>02P-W5d?$rMQsQ&o7;?OH=>= diff --git a/web/apikeys/__pycache__/views.cpython-310.pyc b/web/apikeys/__pycache__/views.cpython-310.pyc deleted file mode 100644 index f924eeeff0aeb5dc727d6e2951933a801e8db5a6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6162 zcmZ{o2Y4LC6~}Mq+TPu}qGqr$U^=2m?%057rWYZQ3`9Vby_-4RT6?$W**(eUNR*iL zm=uzbLOP~*_)@?0Mk?uL(jf)XJNX<4d9%ASce*&zcf0fZ&+MD}rn&iiHim!e^3PW` z9u|u|C87H-gK!4^yzB5mW7KeBbz{mfWSBUl9-oQ_;kc8iC#RA@IN_w~>8W(xoHFZ~ zsZ2dPm96Kdau}DSDVnAx&Co2(QK0$T%qd`TTA;-}1})LPn_|0-sk~PD=|G?q=-@=-XJ<)O9*;aG9$?ds)#U)CsJZy?=Gt7oI zJYiCM#;#yiBGvTDwB8g+ZqM2~gc+E|GKplx_A0Ij40Tz?cEP-q&?@grj|L4jiVc(r zo2?pombHb+rB_t_d6Ak4hW|4!;GclWF8>fo zui5sPNQEvjoDm6%c12?R(uqltb zV2QWHQRu^8;4Mw8H~I=rj@Yvq-&nx$WyG-GV%y`}6Viih^d3J+<1|5&d&n&YO(CT< znMfH;S)`mMK+0<>AQd&0{1ole)Q>cvX%J~h(=wz3G#!YvT+<4qgEXx~I#|;oNQY`# zg>;yv!;y~AbR^PIim<*%BOjxf9*Z=r={Th0HJyNTqNWj~QBA9n)@WLbv`*7{qz#%j zBAujZ6Vhf)CnKGr=~SdGnodJHUDFvzXKFeNX{)BQkVCR(q)=1N1D(yiL_1A6-ZZVx(ewGh)^rWhwVGaxG_C0+NS3BD zQbiL*Vw$Q*Gn#5hdISeGwo%!x<7bf^O?9M(CKsuxX$KP5yh4|>5WKl(sYY&(l={*3({LP-HO!lqqiY-M%mku^eCh6KCpW;^neaMs6!8hAzUXPrjOiK+-uqtnGa($a&z9`JsT-)1OccOSxu_*2IylGq_=NOOTGN^IRuInM5(jS3nRJQhl`@+HKJ`>SfPv-1XwlBs$OI( z6_O}fj?Ef#R?G3Dm~2N8`7)csO{6McIzrK3wr8m2TNTG`(dawq)Dz>IP8&0o@Y#VEoqPpBztCV#r%{ry5SIT;&Y*5MurEFBnMqEJb zhQ-^A&L}s@b`&AQGg0D%42yg?`Kyu?)fA$Hd^nS63}*$6;dG#IpxarLs61#=dKE59 za(Pj4ecZ)K$;XjcQA=8`OA-UX9V|z=i!;-IT6W~YZn>pa*Fjz7pfr(*eyw6dMB0k|HOK#plZ_I^W$d~& z!^SPSXN#^Na9bEQ(ypOiWu)fUo#C8F)tQg`(%|BNCuDCXg61bS4K7H+tCFP`B%ENn z;W$4|j({iSH)*`#@Yzm3xD*K=qf$@Wat1gHgWh#`sgsSH1`vXm8sr6h{|Dmac}S4w zAW8lWDY6et@-N7cXCX`e2|4l)0P=UplfOZM{1uAiFHj^ko+Eo z$nRho`7Inko`D0&)3BWU23C+?!$IU%u#)@|4kk~*A>>Iol>7o#k)Ok1Ea)HP(}jT#Tejd^O_r7ee@CT{}0etbX- R3@}z2%i<=<8s?^g@n1p`=yL!7 diff --git a/web/apikeys/apps.py b/web/apikeys/apps.py deleted file mode 100644 index 45ed3ab9..00000000 --- a/web/apikeys/apps.py +++ /dev/null @@ -1,6 +0,0 @@ -from django.apps import AppConfig - - -class ApikeysConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'apikeys' diff --git a/web/apikeys/config.py b/web/apikeys/config.py deleted file mode 100644 index bf2e5b30..00000000 --- a/web/apikeys/config.py +++ /dev/null @@ -1,204 +0,0 @@ -from pathlib import Path -import yaml - -params = {"censyssecret": ["censys", "secret"], "circlusername":["circl", "username"], "circlpassword":["circl", "password"], "digicertusername": ["certcentral", "username"], "facebooksecret":["facebook", "secret"], "fofausername": ["fofa", "username"], "rikiqusername":["passivetotal", "username"], "spamhaususername":["spamhaus", "username"], "spamhauspassword":["spamhaus", "password"], "twittersecret":["twitter", "secret"], "zoomeyeusername":["zoomeye", "username"], "zoomeyepassword":["zoomeye", "password"], "yandexusername":["yandex", "username"]} - -def ReconConfig(name, key=None, get=None): - name = name.replace("_", "").lower() - - names = { - "shodan":"SHODAN_API_KEY", - "whoisxml":"WHOISXML_API", - "xssserver":"XSS_SERVER", - "collabserver":"COLLAB_SERVER", - "slackchanel":"slack_channel", - "slackauth":"slack_auth", - } - - - if name in names: - name = names[name] - file = "../reconftw.cfg" - - lines = open(file, "r").readlines() - - subs = {} - - if key != None: - for line in lines: - if name in line and key != "": - subs[line] = name+'="'+key+'"\n' - - break - elif name in line and key == "": - subs[line] = '#'+name+'="XXXXXXXXXXXXX"\n' - break - - for sub in subs: - replace = Path(file) - replace.write_text(replace.read_text().replace(sub, subs[sub], 1)) - - elif get == True: - result = "" - for line in lines: - if name in line: - result = line.split("=")[1].replace(" ", "") - break - - if "XXXXXXXX" in result or "XXX-XXX-XXX" in result: - return "" - else: - return result.replace('"', '') - - -#https://ddaniboy.github.io/sariel.html -def amassConfig(name, key=None, get=None): - file = str(Path.home())+"/.config/amass/config.ini" - name = name.lower() - - lines = open(file, "r").readlines() - - if name in params: - param = params[name][1] - name = params[name][0] - else: - param = "apikey" - - - conf = [] - cont = False - - sub = "" - apikey = "" - - for line in lines: - - if "data_sources."+name in line.lower(): - cont = True - - if cont == True: - conf.append(line) - sub += line - if param in line: - cont = False - - if len(line.split("=")) > 1: - apikey = line.split("=")[1].replace("\n", "") - else: - apikey = "" - break - - - - if get == True: - return apikey.replace(" ", "") - else: - apikey = apikey.replace(" ", "") - key = key.replace(" ", "") - if apikey != key and key != "": - final = "" - for con in conf: - if con != "": - if con[0] == "#": - con = con.replace("#", "", 1) - while con[0] == " ": - con = con.replace(" ", "", 1) - - if param in con.lower(): - con = param + " = "+key+"\n" - - - final += con - - replace = Path(file) - replace.write_text(replace.read_text().replace(sub, final, 1)) - - - - elif apikey != "" and key == "": - final = "" - for con in conf: - if con != "": - con = "#"+con - if param in con.lower(): - con = "#"+param+" =\n" - - - final += con - - - replace = Path(file) - replace.write_text(replace.read_text().replace(sub, final, 1)) - - - - -def GithubConfig(number, key=None, get=None): - file = str(Path.home())+"/Tools/.github_tokens" - number = int(number)-1 - lines = open(file, "r").readlines() - - if len(lines) <= 5: - - lines = open(file, "w") - for i in range(0, 6): - lines.write("\n") - lines.close() - - - if key != None: - if key != "": - lines[number] = key+"\n" - elif key == "" and lines[number] != key: - lines[number] = "\n" - - gitTokens = open(file, "w") - for item in lines: - gitTokens.write(item) - gitTokens.close() - - - if get == True: - - lines = open(file, "r").readlines() - - result = lines[number] - - return result - -def theHarvesterConfig(name, key=None, get=None): - namefile = str(Path.home())+"/Tools/theHarvester/api-keys.yaml" - listOfNames = {"chaos":"projectDiscovery"} - - if name.lower() in listOfNames: - name = listOfNames[name.lower()] - - if name == "censys": - var = "secret" - else: - var = "key" - - - with open(namefile) as file: - if key != None: - data = yaml.load(file, Loader=yaml.FullLoader) - - if key != data["apikeys"][name][var] and key != "": - data["apikeys"][name][var] = key - - elif key == "" and data["apikeys"][name][var] != None: - data["apikeys"][name][var] = None - - - with open(namefile, "w") as comp: - yaml.dump(data, comp) - - elif get == True: - data = yaml.load(file, Loader=yaml.FullLoader) - - result = data["apikeys"][name][var] - - if result == None: - return '' - else: - return result \ No newline at end of file diff --git a/web/apikeys/migrations/__init__.py b/web/apikeys/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/web/apikeys/migrations/__pycache__/__init__.cpython-310.pyc b/web/apikeys/migrations/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 3b553616dfa02a24c858a494c681e6a8b1b1314c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 142 zcmd1j<>g`k0v+DJ$sqbMh(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o6vIzbHSyM87CC zIX^G0q+Gu|HAz3QATv9)vRFSiGrcIWBr`v+SU)~KGcU6wK3=b&@)n0pZhlH>PO2Tq LsA47{!NLFl^`#$v diff --git a/web/apikeys/models.py b/web/apikeys/models.py deleted file mode 100644 index 71a83623..00000000 --- a/web/apikeys/models.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.db import models - -# Create your models here. diff --git a/web/apikeys/tests.py b/web/apikeys/tests.py deleted file mode 100644 index 7ce503c2..00000000 --- a/web/apikeys/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/web/apikeys/urls.py b/web/apikeys/urls.py deleted file mode 100644 index 1c182653..00000000 --- a/web/apikeys/urls.py +++ /dev/null @@ -1,10 +0,0 @@ -from django.urls import path -from . import views - -# Namespace name -app_name = 'apikeys_settings' - -# Be careful setting the name to just /login use userlogin instead! -urlpatterns=[ - path('', views.index, name='index'), -] diff --git a/web/apikeys/views.py b/web/apikeys/views.py deleted file mode 100644 index 92326a13..00000000 --- a/web/apikeys/views.py +++ /dev/null @@ -1,144 +0,0 @@ -from django.shortcuts import render -from django.contrib.auth.decorators import login_required -from editprofile.imgUser import imgUser -from apikeys.config import amassConfig, ReconConfig, GithubConfig -# Create your views here. - -otherNames = {'passivedns': '360PassiveDNS', 'digicert': 'CertCentral', 'psbdmp':'Pastebin', 'rikiq':'PassiveTotal', 'quake360':'quake', 'cisco':'Umbrella', 'leaklookup_priv':'leak-lookup_priv', 'leaklookup_pub':'leak-lookup_pub'} - -@login_required(login_url='/login/') -def conf(request): - keys = dict(request.POST) - del keys["csrfmiddlewaretoken"] - if 'UserPicture' in keys: - del keys['UserPicture'] - - if keys["type"][0] == "amass": - del keys["type"] - - for key in keys: - name = key - key = key=keys[key][0] - if name in otherNames: - name = otherNames[name] - - amassConfig(name, key=key) - - - elif keys["type"][0] == "reconftw": - del keys["type"] - - for key in keys: - name = key - key = key=keys[key][0] - ReconConfig(name, key=key) - - - elif keys["type"][0] == "github": - del keys["type"] - - for key in keys: - number = int(key[-1]) - key = key=keys[key][0] - GithubConfig(number, key=key) - - -# elif keys["type"][0] == "TheHarvester": -# del keys["type"] -# -# for key in keys: -# name = key -# key = key=keys[key][0] -# if name != "spyse": -# theHarvesterConfig(name, key=key) - -@login_required(login_url='/login/') -def index(request): - - if request.method == "POST": - conf(request) - - imagePath = imgUser(request.user.id) - - context = { - 'shodan_value': ReconConfig('shodan', get=True), - 'whoisxml_value': ReconConfig('whoisxml', get=True), - 'xss_server_value': ReconConfig('xssserver', get=True), - 'collab_server_value': ReconConfig('collabserver', get=True), - 'slack_channel_value': ReconConfig('slackchanel', get=True), - 'slack_auth_value': ReconConfig('slackauth', get=True), - - 'passivedns_value': amassConfig("360PassiveDNS", get=True), - 'asnlookup_value': amassConfig("asnlookup", get=True), - 'ahrefs_value': amassConfig("ahrefs", get=True), - 'alienvault_value': amassConfig("alienvault", get=True), - 'bevigil_value': amassConfig("bevigil", get=True), - 'bigdatacloud_value': amassConfig("bigdatacloud", get=True), - 'bufferover_value': amassConfig("bufferover", get=True), - 'builtwith_value': amassConfig("builtwith", get=True), - 'c99_value': amassConfig("c99", get=True), - 'censys_value': amassConfig("censys", get=True), - 'censysSecret_value': amassConfig("censysSecret", get=True), - 'chaos_value': amassConfig("chaos", get=True), - 'circlUsername_value': amassConfig("circlUsername", get=True), - 'circlPassword_value': amassConfig("circlPassword", get=True), - 'cloudflare_value': amassConfig("cloudflare", get=True), - 'digicert_value': amassConfig("CertCentral", get=True), - 'digicertUsername_value': amassConfig("digicertUsername", get=True), - 'dnsdb_value': amassConfig("dnsdb", get=True), - 'dnslytics_value': amassConfig("dnslytics", get=True), - 'dnsrepo_value': amassConfig("dnsrepo", get=True), - 'deepinfo_value': amassConfig("deepinfo", get=True), - 'detectify_value': amassConfig("detectify", get=True), - 'facebook_value': amassConfig("facebook", get=True), - 'facebookSecret_value': amassConfig("facebookSecret", get=True), - 'fofa_value': amassConfig("fofa", get=True), - 'fofaUsername_value': amassConfig("fofaUsername", get=True), - 'fullhunt_value': amassConfig("fullhunt", get=True), - 'github_value': amassConfig("github", get=True), - 'hackertarget_value': amassConfig("hackertarget", get=True), - 'hunter_value': amassConfig("hunter", get=True), - 'intelx_value': amassConfig("intelx", get=True), - 'ipdata_value': amassConfig("ipdata", get=True), - 'ipinfo_value': amassConfig("ipinfo", get=True), - 'leakix_value': amassConfig("leakix", get=True), - 'netlas_value': amassConfig("netlas", get=True), - 'networksdb_value': amassConfig("networksdb", get=True), - 'onyphe_value': amassConfig("onyphe", get=True), - 'psbdmp_value': amassConfig("Pastebin", get=True), - 'rikiq_value': amassConfig("PassiveTotal", get=True), - 'rikiqUsername_value': amassConfig("rikiqUsername", get=True), - 'pentesttools_value': amassConfig("pentesttools", get=True), - 'quake360_value': amassConfig("quake", get=True), - 'socradar_value': amassConfig("socradar", get=True), - 'securitytrails_value': amassConfig("SecurityTrails", get=True), - 'shodan2_value': amassConfig("shodan", get=True), - 'spamhausUsername_value': amassConfig("spamhausUsername", get=True), - 'spamhausPassword_value': amassConfig("spamhausPassword", get=True), - 'spyse_value': amassConfig("spyse", get=True), - 'threatbook_value': amassConfig("threatbook", get=True), - 'twitter_value': amassConfig("twitter", get=True), - 'twitterSecret_value': amassConfig("twitterSecret", get=True), - 'cisco_value': amassConfig("Umbrella", get=True), - 'urlscan_value': amassConfig("urlscan", get=True), - 'virustotal_value': amassConfig("virustotal", get=True), - 'whoisxmlapi_value': amassConfig("whoisxmlapi", get=True), - 'zetalytics_value': amassConfig("zetalytics", get=True), - 'zoomeyeUsername_value': amassConfig("zoomeyeUsername", get=True), - 'zoomeyePassword_value': amassConfig("zoomeyePassword", get=True), - 'yandex_value': amassConfig("yandex", get=True), - 'yandexUsername_value': amassConfig("yandexUsername", get=True), - - 'token_1_value': GithubConfig('1', get=True), - 'token_2_value': GithubConfig('2', get=True), - 'token_3_value': GithubConfig('3', get=True), - 'token_4_value': GithubConfig('4', get=True), - 'token_5_value': GithubConfig('5', get=True), - 'token_6_value': GithubConfig('6', get=True), - - "imagePath": imagePath, - "apikeys_settings": "API Keys Settings", - } - - - return render(request, "apikeys_settings.html", context) \ No newline at end of file diff --git a/web/editprofile/__init__.py b/web/editprofile/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/web/editprofile/__pycache__/__init__.cpython-310.pyc b/web/editprofile/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 42a2993477c15d99f4437d9be65943272b31adc0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 135 zcmd1j<>g`kg4aBMlR@-j5P=LBfgA@QE@lA|DGb33nv8xc8Hzx{2;!HPeo=mYiGERP za(-S~Nx6P`YLb3xN@htxQGQxxPO5%C-&o>- z|3t#x4_1+owx=^|Z%vV@<-$ddg`zV>t;PtFBoqjC(}aPS_`s0GZ_n;Q)wVR5(sD=h zdI8U(SegjZC^sN=72T-itf|d|k{a59Jao*%UK?;;h+J~s_BqFfMmu}R`AZ}8K64H- z6OFF9KpLKPJig0)lG0tJgWp-aGP#UF6e=~__)b>q+Ss_13*4{XOjcqo)Fc>eb=8OYJ8OWkHn`d?a_vr7Il>_s Fvw!g`kg1b44$sqbMh(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o6vBKO;XkRX-&$ zFEc5>Qr{)DIJ+djK))z8IX^G0q+B;6F*`9+H#adePrp1hNk26uv!tLXKP@vSRX;7i cD7RRzpz;=nO>TZlX-=vg$n0V!Ai=@_0DbZ$&j0`b diff --git a/web/editprofile/__pycache__/imgUser.cpython-310.pyc b/web/editprofile/__pycache__/imgUser.cpython-310.pyc deleted file mode 100644 index 2fdbdc25c94d1d9bad822c47c41e5be54763b99b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 471 zcmZ8dO-lnY5S?VR+pV-BC=}6?w>{YX1Ac+xu?XU&x7wsNy4zinDb=z)=?`g-{-s>S zgMXo*IPr_(z`V(1-kUs9G@ENc@!ELr?=yfG->e3w=7iGj5=f9NgYI`_T_(8sz z2q@-aL7I6eYs8DU!7Dv0sduKE7JI1ssm_O2RvGILQ_(I*!}IA*`hR$)2ICCdQGV9_{USSQLJ-ndBJYm3yO&Xo&onQ2V8<6649&U-_3L9Aym4N@%|*9B1mt+qV%x3^8l)PhHP$uK)l5 diff --git a/web/editprofile/__pycache__/models.cpython-310.pyc b/web/editprofile/__pycache__/models.cpython-310.pyc deleted file mode 100644 index 498bc7c4b3bbe0ba9235fe382bb8044ece6d6623..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 575 zcmYjPJ5B>J5ViN0O$Y@I2cU~YvKK%oQV>+6q1(oCG7h_mcQ@D@@ev)gR7l)`qabeJ zmMT|32r;t(l(FV{#^ZT2{#3iW9ftD!_BDCO`GcHo3D6m$+5-e&z%?s)$~ik@;DP_h zz*l_kJ@}~)!3FDuYuv|r+(ktNO4}*9+v1}$M74(qilrP_>M4I700-}hr(t9JjU7P% z;Ui015FvHTwdJbL#L6nOe!*<4(kbMP(5k$wv-LMFO*z7!kWD&5wGqNSn=>ThW6TUy zA#;sJV3nS|GSkKlpQ9wJ3YCB?^GS7^oG81ft7T%;v?^!ybw85}nfEcXOs>@=QIOY5 zQ_XU%lFfMs%UjnH!q!HJ7S;6-B7V2`-}>M;I_||T7NV4e62f(aK!TMfyDP-iO6pC- z5P@rtGHFh8rNPB?eu^35H#-E>`xelX8oFpw8zaQ5%W-(k>bMuUHq2#tSq)&aDG;91 ab2Z^DqK22)Tzs@?@0c#s(y!>UeeVZqyM#*s diff --git a/web/editprofile/__pycache__/urls.cpython-310.pyc b/web/editprofile/__pycache__/urls.cpython-310.pyc deleted file mode 100644 index 47a62a974a2938ad93102ffa2bcfe71e47dc0ccb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 296 zcmYjN!A`?43{9G(Ev(}+ka}4p&Is`ZT)6czmDwAe+9paf3Ln9T@Jr#y0qqx%5ZGb| zEcw|_diJyImdkTN@mYOu-l={t`5%VJE#-7Y00Lw$2H8j{AOqEBgK8A@y3r`XpxPVA zp?Fn0+2l~(i~8(HbE1|>`!+rvlc?8V)&wCP-!L{hEcLV7ed;c|;83^d>^ZI)<-H8=Q+a;Kq&HINKQ?EE}JE cE$?3WmsDO44h}uu@R1Y0r4p6Y>Oz-4zY~>9&Hw-a diff --git a/web/editprofile/__pycache__/views.cpython-310.pyc b/web/editprofile/__pycache__/views.cpython-310.pyc deleted file mode 100644 index d399fd1837d591711f0adb6a7287e65eaf154a8f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1830 zcmZuy&2Jk;6yKSh{qTD2ByAz2I`Gy@-~m36W-z7r_LoAc6fN?^fq3BPp3zWFGazH1k}ySNm?9B@!KMm zf&+*Bs;s1wC#T|go+z~}B?`74rI|RoP+H_ix|GR8ydsUcAk8PZ>R^P7*H0S&gRu`o zoq>dtnpDorsi`J2I&&+^>2p%Mz+*MzZpF?V`BCN0yb5MJ&Z!GCiz@fnFKBo>4iCZFDPVsM2WuE~YOivyg37xlGHo=0g3c+GV~~Um z?;3Jdc)V4Ug7T;)ybb$rfydu@tRp<%tbIwg9p2e?YG22*&Q-Va!R9j^Uu_^F4zA#B z@EGXLOQ#O2@M^2_tKgc}E#9^JehJ>Xu)huP{?b}@*X%|&b|bL+(%J@Oq~hCaN>xoIDWgz%*cVAb?Zc^~wQKOUSv@F&nQta|{JQiquZ?%2n=Z(Trmc?Yxw6TZ| z(~(|CvAU}?m>CUjYD*zlvHrZ6nAXoCFVDo|d}3%>^zImj=aE=!CMZv4Vx*OECqf%` z_|wrZ_K*_G(R4>sF*?1iY7;A=-^{#6kAM9B$hb;NLsJgMm0|NlPmM3mQw2t$anQ5$ z(HK+7ndr4mfZZdMHlY-1UKX$y8&;p6;DNwI2^M2mB0s=&ett2ARx)&~Oo#|GVOnVp zEp-UqUO2J`dVha5kY%X{Qeaf|at zq1z=92^rG&yiZ9;BDzU7LBeR{-y<=Nfa8-GbYgPH&ONVET=a=LOtw~J_5BPvMg}!%Z47!_e*Y-Vha?Nyr@>zIE zp#MNtF}N4l9}Ie@RXNbr{-q05>>tQ?!H3*Hf@ke9`8j+{h%r9ETE+~^kAbYVFd)Y_ zaq3TXp7kOKyU^2;+vCZ*sEY+7?;&{yiCrHedVqui=>>8RG3L{nmY*Q9n$~*&xzXZW z+l)2j3(zoL3K#784*=DDAdwFlaT)0{pT?BBl;G?C69@7u5cj%flh2?<%f6bHQjZo| zS^wt48#cO;=}A9X=xLurMaTs1U?uT)V(!09DKELmRMQj>-li~3)Kq{8%%R!2)sUi( z)usBXx#VrFOBg`k0`J`)lR)%i5P=LBfgA@QE@lA|DGb33nv8xc8Hzx{2;x_Oenx(7s(wmh zUS?8$rM^pQadt_5fqqeHa(-S~Nx5!DVs>JtZf;^`o_=|1l74DRW=TO&ep+Tus(x-} qdQoCYW`16=etdjpUS>&ryk0@&Ee@O9{FKt1R6CGO#Y{kgg#iFz4J-Zt diff --git a/web/editprofile/models.py b/web/editprofile/models.py deleted file mode 100644 index a186bc67..00000000 --- a/web/editprofile/models.py +++ /dev/null @@ -1,8 +0,0 @@ -from django.db import models - -class auth_user(models.Model): - username = models.CharField(max_length=150) - email = models.CharField(max_length=254) - - def __str__(self): - return self.email \ No newline at end of file diff --git a/web/editprofile/tests.py b/web/editprofile/tests.py deleted file mode 100644 index 7ce503c2..00000000 --- a/web/editprofile/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/web/editprofile/urls.py b/web/editprofile/urls.py deleted file mode 100644 index 4abb2d3e..00000000 --- a/web/editprofile/urls.py +++ /dev/null @@ -1,10 +0,0 @@ -from django.urls import path -from . import views - -# Namespace name -app_name = 'edit_profile' - -# Be careful setting the name to just /login use userlogin instead! -urlpatterns=[ - path('', views.index, name='index'), -] diff --git a/web/editprofile/views.py b/web/editprofile/views.py deleted file mode 100644 index 6a268ec3..00000000 --- a/web/editprofile/views.py +++ /dev/null @@ -1,103 +0,0 @@ -from django.shortcuts import render -from django.contrib.auth.decorators import login_required -from django.contrib.auth.models import User -from django.contrib.auth.hashers import make_password -from django.core.files.storage import FileSystemStorage -from .imgUser import imgUser -import os - -def edit(request): - response = "change made!" - - username = request.user - dbUser = User.objects.get(username=username) - - post = request.POST - - - if 'username' in post: - if dbUser.username != post['username']: - dbUser.username = post['username'] - - - - if 'email' in post: - if dbUser.email != post['email']: - dbUser.email = post['email'] - - - - - if 'CurrentPassword' in post and 'NewPassword' in post and 'ConfirmPassword' in post: - if post['CurrentPassword'] != "" and post['NewPassword'] != "" and post['ConfirmPassword'] != "": - CurrentPassword = post['CurrentPassword'] - - if dbUser.check_password(CurrentPassword): - if post['NewPassword'] == post['ConfirmPassword']: - dbUser.set_password(post['NewPassword']) - else: - response = "new password and confirmation password are different!" - else: - response = "Current password wrong!" - - - - - - if 'UserPicture' in request.FILES: - path = "static/imgUsers/img"+str(request.user.id)+".png" - - if os.path.exists(path): - os.remove(path) - - myfile = request.FILES['UserPicture'] - fs = FileSystemStorage() - filename = fs.save(path, myfile) - - - - if 'RemoveImg' in post: - if post['RemoveImg'] == 'on': - imagePath = "static/imgUsers/img" + str(dbUser.id) + ".png" - - if os.path.exists(imagePath): - os.remove(imagePath) - - - - - dbUser.save() - - - return post['username'], response - - -@login_required(login_url='/login/') -def index(request): - - - - - if request.method == "POST": - username, response = edit(request) - - else: - username = request.user - response = "" - - - - dbUser = User.objects.get(username=username) - - email = dbUser.email - - imagePath = imgUser(request.user.id) - - context = { - "imagePath": imagePath, - 'UserName': username, - 'email': email, - 'response': response, - } - - return render(request, "edit_profile.html", context) \ No newline at end of file diff --git a/web/manage.py b/web/manage.py deleted file mode 100644 index 19be6dd3..00000000 --- a/web/manage.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python -"""Django's command-line utility for administrative tasks.""" -import os -import sys - - -def main(): - """Run administrative tasks.""" - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'web.settings') - try: - from django.core.management import execute_from_command_line - except ImportError as exc: - raise ImportError( - "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?" - ) from exc - execute_from_command_line(sys.argv) - - -if __name__ == '__main__': - main() diff --git a/web/projects/__init__.py b/web/projects/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/web/projects/__pycache__/__init__.cpython-310.pyc b/web/projects/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index caf5c74cd25cb66d273d8ce322a809683d4803ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 132 zcmd1j<>g`k0x{md$sqbMh(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o2BK0Y%qvm`!Vub}c4hfQvNN@-529ms%UCLqDW005TM B8)g6i diff --git a/web/projects/__pycache__/apps.cpython-310.pyc b/web/projects/__pycache__/apps.cpython-310.pyc deleted file mode 100644 index 1bea93fa26b7b28c617fd118fcae2cdc69592599..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 415 zcmYjN%}T^D5S}z$srw@+dldvvy|gbNva6uCMex>3=yuZ5rEO}{vglPG!I$ura`ogZ z2!fNg;ttH0`T6oqGHS7y0@>-}CwZsy(YK;zXTedAAzCqxyOQV25@g4kVQ)hoY)$r77yB=)?=dk3zC;?;# diff --git a/web/projects/__pycache__/models.cpython-310.pyc b/web/projects/__pycache__/models.cpython-310.pyc deleted file mode 100644 index 54d4e13be62e8ca5a300cc9d2a85b4636b95e8f6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1168 zcmZWp&2QT_6elHFk{#Jj(>80jVJHgtAYhB`!!R^(o5qK_1?-`iE(9Yowmf~LB(-eD zZtH14fZlp-kNuZ;?J57lfB}0?N(W=5z^C^~{@#aF4~GK+>tOg#_SPljBMQ6A1K|;D z{Sb&Cf)=EtDW&8o5sq+Q6X8m_asG5uS9l8&_pZT@#MJs_Eo7lH6nB>o!Xw!FFCdbn zRFKq>?#2^TI9D|70q+YqO!TgtGyo|Sz6ik52Yw(z(T8Vn<)(XLFO6DzBqK2tBhU?7 zd3@uwf90j4&opCkOB}o=>Atuv?!fz&Z~^#T8@y2UMrP)k0y4-T5b~Tig_&;;wc#eu zlDu3chO3n{EYIpHc2o~sZNIrFYA%?m^KT&*LI)-PjTN$5nf1vsc5TQTQ?shRw7s)} zSHD=VYKkIuW5G6C%n?D6E<5iVcE7YFM=Cj4=lgGg2Rj$`kfHrt}JX@k`@RH{S zJg@t3zZxT6R|SYA$kuh9N!=pL-yj^Xv-nMwziOmvL$K)?=J{zM5B(=o*!mkFe~=AP zeWdTGpzrCT)d24xAG`DI{zN2`GMW7R;JXKt)9)wGwxjxw%X(oodbu673ayP z5sjSf7dH(&`k&utiDI&Cq8N-RLPB(oLo4c%=ao7HeG9E4_d&D)V}PHr+aPEMh)=iU zPdDD!;sYBpR`F6YW(SM`_NG8SV(eAJi_W4j+TMRrYiQ!Ha#8Z4m{mruq&mrEA*^3D z<(X7=Fe~{=b~+Cy&i1Ek-Z`nS@Q~qSZsZb5+B&H_s2eSo$IG7^Pk`G<2_EU$EeSB-9qW0;IfDs^ZqkcNP##qga)?nJ zVn`tU5=0qM0@Ewah*HP{?M0Qr{E01#GkLNl_t8+h*E5X|5Wlr}vOXJ(K49alXFhHf z;CpCPb&sEmb*tP%3mXa8p+o7DF}bNUwQ8`V%9w1IQ=h}3R(riVn$DUD4KqoQh{R|0 zFzkZ42?zUf^xIu`@cHPnv-JC%3}|78M%iy>7QrOQ@4YlyJ87`d>cySp37_Tv6*5d+ c4Ie&R_Zp5JZo;{?B$=~0k3<=jyyV&C4{8`~8vp7&-72erhlFMIF6sdpPjAJEM9Fs;6poN;WQ6f1O;7Eof$2F6bpg1du^zM?L z9a=KGYz?S6q<{h6(*P3COYQ}F>@mHxr=GT#0s#v6)=LA_XpnwymXzfby_Lewym|B9 zyti-P->8$#rZoIESH5jlE@;}5SI8W4LC`RIuU1 z+;}anU=wtST0+4w$8wXkq=Ms4%1zhO3Qjl~H(Sdp*m832SZ!=blR2eHI(c`zHm>j~ zr%)?EX3CkUO{lq8E2_CvE2(+1HiaU6SK1M) z&7ES#c|l?3Pcaj`s4(YvYD-%zJ;l3gi+VV|XUlqLr)@VeiTaz@-VEcy_P8w;S(vyb zIz?5v_Wi78^rM>xG}I~mYXsc#E#e$Ow6>T7Q~LnPBj0WzzA5N^Vn~UK~m-ejmLxJ zk+C)0f1-<*+BnhPzY~~pTF%hwh}|=uF!47zJ6hwMPY0k3vf$1^=M>J&jdUj2$E6?^ z+K_1XJpz}N(jGyR(0{AoFFO2wu1hgqc+VCuP5~CfmXZu2s zBR3pOfNu=D6QkW=0($*$Pz(&v6oX>B%=3?RWxcg!awVezmq1_(_Q{bLb)}Ms+jTw?M z)h^R{gXv(JPhp;k-jibuO@JW^J`=sm6XyT9|ET{n`O@)4J{z4jMRuMIX0i7%p9A&F z^-Iun9=p>cO=b2-gAYs{FPvcZ-U?<(`d|)p=Rh}mO7}|pVldZ!m7nJqc=@q0m=EUL zm-$Qgbojup>FoAGFe4X%eeH;W>f&RA{D!|A%tSux<<97EiQYkZ-%#=$%{y;u@|E_L z_UmfAx{H$^{+>ZAB%J5kFMOZ#uI8}=?T+?@-JcwuPyy$kB2IWN>2`@^-BmOPq}3}U zt%wWjS7{$`0`O&mU-){J!8v&KIr8ej;*0ne)(bv&J~)rBI5Q{(R#2)}gVM3GCeC^d zHY~NPWJP{O`Oc}2K-$0X{QZALFxQYGUtbU7yyG@nUO&ERH#=TcP}=F2C_$BZqug!y zvQbuBOXWo)9P38e(yw=VvfGnkvgJ1RY$6Ynqm1bHvm>&~zH}YrHRGQB@ENN<{SLo= zYU}-#jSrUB)|blt@rAor7u>50yu5H@Y2kyVg{^*ayAGjl7rU9=me+9VydiCfXJU+$ zi*?%)D_yO^xYu)cY|+m}bG_N=d1^aMJ5avf+;4b$wwNT&!q)Q54>xX?udl5y->k1J zZ?AsTH$H)!iJ`s`H`R)za>y#r%u1`-=}$o&*=kmi&8xB@sG0!nRZ&_@0;0py`%-q7 zs?~#ogGzt$3`@_u=XDM|byQ(}d854g)3vSb)$-bV8|$kOo+6-pd-LW}dHX4qb(|ab zrLY@rm>`?N)KC9NcvKF*@xboq={=U$S2j-1D6FhqU)@|?$FVD`+7qU%q|osdAw7+Ddu-_6P5-lKgb} zqv(6X)Qi^yy<%wgP!~hJ#f62XKuOcWL{y{w&}cYL7=O@kdbS_N)E5;QqOj`qT@>ef zJxmDO?>GU`j!AX^OY-viQUEna*WV` z05MBim|nxR1=VrtEAHdPB;FHsK1`FV^nAD;8j=_hB*032NSLw@n>HkMJU<+Juj6@W zMzB~Fq9elOjqUAQ5t!bz8+<$RlrT5)yh2+I}cU-g{kMU7-r4}+fd&_8ICnXbHDWfy!a?SY;KJZVSWUip2u3D$+~48J1xwHmzIuD*#Hdf{|wy z%VVEb8JhLHo&%?47J)CCW!;Pybp0RsqQ!E$#pao%mvx=#tia-mHx5n<=M?meIn8p| z&j4>engumU)W3~CDwqYPf0u^_pfoJ(<1C%{FY4yMGl`60ekM2+Z|WFEKMGWeUDbsC z7-5bU6gP`|y7)pesRxYfX#8|(1bW*Yg;!~S40U*+!LZ29U6y+hhhe(ZIpy; zYtQQlyN=}35q^J6rG%yOf?vLhq4I2$5Bw$Nl#8=r+_mL?hYOOSGOxHo5alWH1_laf z*KHsVq6|@P3e(Mo*R-9wk6BcSjuM4h(!#b-tWm4GPiJ1y^xxP?RFm6;x*b# zY-~lz%A)K-k%;gI8EGwErZaw|&WNs3B1xDyhDA%h$s3K+Z=V>fr z5AzDEf_#W!whG{AXcA{G+8Ocw7C4_?KZS@vv|&sk4k;Gr*(5Xd45But;xfm|%zC7M zS1=Gky7{#T!1h9{P#&k6redWA;51gQNL<_6xK_a<*g&UP85rE)=AMqLS_~JUIQiDN6gqy?6if~|n(*<8DomA~26_jwu+lU< ze@fv=(b}mrdUC(Qk)TCGA|gbB4$lmU$U%M8bA)65@V@R>RN_`X7f#^PFX3h+sxrL4 zM=eVRxxV7tQntK3KXUu~q=brx9}x5t8r~vt3GzRuwG*{1DZjUaBb&DGN5-bys+>mI t<%}hFXhV6Y^%ksveXM z#1G)ajlX1x6Q}+Jka&zYMXfDcGxN=Op4OdClhNvQe#hTE#(p^F=JJVoL8o{`7ck%{ z%lLq2Vjwts%|L+niGhcF;(hc69{5MByL3sqtSf9YQ;CtK(s^NPnFdUY*g-efGBGdc z6kq5g}DoN99Q9zdsmxgD9c&t^7 zMHQu4b)g2a&N7*U`s@Pq%o+(R#H>3TLGe`e7%5Dl=q}rOR8=U7Y%_{zdS)pS-HxJn zGnqPF;sOe&q`KNR;1qMfJXYjYIESshsu6G9+!>l}X4@}SvP&&Ga4ziTp)Sby439FI zroFtx1kD0!_A;5^f^O`M<(0UzPmMiR*%i{%Q@k4QZ_I4FeBAZxX=t=ALxa?)QTaZ6 zkHc`fn3f{E{u1HN&d!tQDuM0kxp9B3MyGg4SDpESwlDaaSmrC-oejRq#ZUiF{cok) or;%&Ggu1G?VOS+yfjN5cOAfov2$O4&@&p?WI{bBeVz!PV@q&Tr6+K-6lBpVrNx2(`zNfzu+(V z%89>#RF!eMr5$NTe)Ft(Z*0Fx!%=jD&5TKR}w4noQ7$xrs zFyL1NxS>7&&KnNmBUu)I@Ge=hR1MuW=&|!IrrF@Z*w|+3aYzJf%(%j0NJD`%4Ct0N z+=#x&0%q3FK|p~yyXG*z=EzErC{?EN!@ULy%3biiX~&q3R;$$u)wyo$v`zCvKb5i7 zPGu{fx*xdjlBZ&0wfl@-^;cz$+m07WxV^PVRg-7s^kA@;YjNHg3umY+)znuiRVokM zTZ88#rM~R7&37{bvoj8iYdaGI!v&Lk1Mn`-2t32`?S&2#PL_tD_3nJUtS`-`zF)I`c;FfreyI+G`oHGs?pw%zIKzxju}H$WxD^GXj2=^QBX1}E oY$;L+y>?rF3Y*F6;hZF2-EI9EpOM6@@r0f5e>JV0egFUf diff --git a/web/projects/migrations/__pycache__/__init__.cpython-310.pyc b/web/projects/migrations/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 9f7d4333065b7ff39c37444f76ec17c58442e7df..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 143 zcmd1j<>g`k0s-E?$sqbMh(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o6vAzbHSyM87CC zIX^G0q+Gu|HA%mqC_gJTxujS>H#5B`u_QA;uUJ1mJ~J<~BtBlRpz;=nO>TZlX-=vg M$gE-}Ai=@_01iMO*8l(j diff --git a/web/projects/models.py b/web/projects/models.py deleted file mode 100644 index 0d0ba10e..00000000 --- a/web/projects/models.py +++ /dev/null @@ -1,22 +0,0 @@ -from django.db import models - -class Project(models.Model): - number = models.PositiveSmallIntegerField(default=1) - icon = models.ImageField(default=None, upload_to = 'static/img/target_icon') - domain = models.CharField(max_length=300) - last_change = models.DateTimeField(auto_now=False, blank=True,null=True,) - STATUS_CHOICES = (("SCANNING", "Scanning"),("FINISHED", "Finished"),("WAITING","Waiting")) - status = models.CharField(max_length=9, choices=STATUS_CHOICES, default='WAITING') - command = models.CharField(max_length=400, unique=False, blank=True, null=True) - scan_mode = models.CharField(max_length=400, unique=False, blank=True, null=True) - - def get_last_change(self): - if self.last_change: - return self.last_change.strftime('%d/%m/%Y - %H:%M') - - return self.last_change.strftime('%d/%m/%Y %H:%M') - - def __str__(self): - return self.domain - - \ No newline at end of file diff --git a/web/projects/tests.py b/web/projects/tests.py deleted file mode 100644 index 7ce503c2..00000000 --- a/web/projects/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/web/projects/urls.py b/web/projects/urls.py deleted file mode 100644 index 6f720165..00000000 --- a/web/projects/urls.py +++ /dev/null @@ -1,13 +0,0 @@ -from django.urls import path -from . import views - -# Namespace name -app_name = 'projects' - -# Be careful setting the name to just /login use userlogin instead! -urlpatterns=[ - path('', views.index, name='index'), - path('/delete/', views.delete_project,name='delete'), - path('/cancel/', views.cancel_scan,name='cancel'), - path('/backup/', views.DownloadBackup, name='backup'), -] diff --git a/web/projects/views.py b/web/projects/views.py deleted file mode 100644 index 7eee0fc5..00000000 --- a/web/projects/views.py +++ /dev/null @@ -1,249 +0,0 @@ -from django.shortcuts import get_object_or_404, render -from django.http import HttpResponse -from projects.models import Project -from django.core.files.base import ContentFile -from django.shortcuts import redirect -from django.contrib.auth.decorators import login_required -from django.utils import timezone -from schedules.views import deleteScheduleFromId -from editprofile.imgUser import imgUser -from schedules.views import timezone -from web.settings import BASE_DIR -import shutil, os, time, requests, favicon -from pathlib import Path -from subprocess import Popen -import zipfile - -# Main Projects Page -@login_required(login_url='/login/') -def index(request): - - imagePath = imgUser(request.user.id) - - timezones = timezone() - - projects_output = Project.objects.all() - db_projects_count = Project.objects.values('domain').count() - - path = Path(__file__).resolve().parent.parent.parent / "Recon/" - - files_sorted_by_date = [] - final_date = [] - number_count = 0 - - print("Path da pasta Recon: " + str(path)) - - ''' - Case 'Recon' folder is not created, just load the page without any data. - ''' - - if not path.exists(): - context = {'projects_output':projects_output, "imagePath": imagePath, "timezones":timezones} - return render(request, 'projects.html',context) - elif path.exists(): - # Sort Archives by Creation Date - archives_parsed = sorted(Path(path).iterdir(), key=os.path.getmtime) - - print(archives_parsed) - - for archives in archives_parsed: - archives_by_date = str(archives).split('/')[-1] - files_sorted_by_date.append(archives_by_date) - print(files_sorted_by_date) - - # Get Domain and Creation Date - if db_projects_count != int(len(files_sorted_by_date)): - for i in range(len(files_sorted_by_date)): - sgdomain = files_sorted_by_date[i] - print("SGDOMAIN: "+str(sgdomain)) - - ti_m = os.path.getmtime(path / sgdomain) - m_ti = time.ctime(ti_m) - t_obj = time.strptime(m_ti) - T_stamp = time.strftime("%Y-%m-%d %H:%M:%S", t_obj) - - print("T_stamp: "+str(T_stamp)) - - final_date.append(T_stamp) - - print("final_date: "+str(final_date)) - - pjtfor = Project.objects.filter(domain=sgdomain) - # print("pjt: "+str(pjtfor)) - - - # Save Domain - for pjt in pjtfor: - if not projects_output.filter(domain=pjt, number=pjt.number).exists(): - print("number_count: " +str(number_count)) - # pjt.save() - - # Creation Date - if not projects_output.filter(last_change = final_date[i], number=pjt.number): - # pjt.last_change = final_date[i] - print("SALVOU FINAL_DATE["+str(i)+"]: "+str(final_date[i])) - - # pjt.save() - - - # Number of Projects - project_count_obj = pjt - - print("project_count_obj: "+str(project_count_obj)) - - - # GET ICONS - for i in range(len(files_sorted_by_date)): - sgdomain = files_sorted_by_date[i] - - target_iconfor = Project.objects.filter(domain=sgdomain) - for target_icon in target_iconfor: - puredomain = str(target_icon).split('.')[0]+str(target_icon.number) - - name_icon = puredomain+".ico" - - if not Project.objects.filter(icon = "static/img/target_icon/"+puredomain+".ico", number=target_icon.number).exists(): - try: - try: - icon_url = favicon.get('http://www.'+str(target_icon)) - if not icon_url: - target_icon.icon.name = 'static/img/unknown.ico' - print("NAO EXISTE ICONE: " +str(puredomain)) - print() - target_icon.save() - - else: - icon = icon_url[0] - print("ICON URL: "+str(icon_url)) - - response = requests.get(icon.url, stream=True, timeout=10) - - - - if response.status_code == 200: - target_icon.icon.save(name_icon, ContentFile(response.content), save=True) - print("SALVANDO ICONE: "+name_icon) - else: - target_icon.icon.name = 'static/img/unknown.ico' - print("DIFERENTE DE 200: " +puredomain) - target_icon.save() - - except (requests.exceptions.ConnectionError,requests.exceptions.HTTPError): - target_icon.icon.name = 'static/img/unknown.ico' - print("ERROR: " +puredomain) - target_icon.save() - except (requests.exceptions.ReadTimeout): - print(target_icon.icon.name) - - else: - print("ICON ALREADY EXISTS: "+ name_icon) - print("----------------------------------------------------------------------") - - else: - print("EVERYTHING UP") - - # Project Number - for u in range(len(files_sorted_by_date)): - sgdomain = files_sorted_by_date[u] - number_count = number_count + 1 - - project_count_objfor = Project.objects.filter(domain=sgdomain) - - for project_count_obj in project_count_objfor: - project_count_obj.project_number = number_count - project_count_obj.save() - - print("ID NUMBER: "+ str(project_count_obj.project_number)+" [DOMAIN]: "+str(project_count_obj)) - - - context = {'projects_output':projects_output, "imagePath": imagePath, "timezones":timezones} - - - return render(request, 'projects.html',context) - - -# Delete Projects Function -@login_required(login_url='/login/') -def delete_project(request, id): - if request.method == "POST": - project = get_object_or_404(Project, id=id) - puredomain = str(project.icon).split('.')[0] - - command = str(project.command).split("'") - del command[0::2] - - if project.status != 'FINISHED': - cancel_scan(request, id) - - if os.path.exists(command[-1]): - path_projects_delete = command[-1] - elif os.path.exists(f"{BASE_DIR.parent}/Recon/{str(project)}"): - path_projects_delete = f"{BASE_DIR.parent}/Recon/{str(project)}" - else: - path_projects_delete = "xxx" - - path_icon_delete = str(puredomain)+".ico" - - try: - shutil.rmtree(path_projects_delete, ignore_errors=True) - os.remove(path_icon_delete) - print(path_icon_delete) - except OSError as e: - print("Error: %s - %s." % (e.filename, e.strerror)) - - Project.objects.filter(id=id).delete() - deleteScheduleFromId(request, id=id) - - return redirect('projects:index') - -@login_required(login_url='/login/') -def DownloadBackup(requests, id): - - project = Project.objects.get(id=id) - if project.status == "FINISHED": - command = str(project.command).split("'") - del command[0::2] - - tempFolder = "/tmp" - folderPath = command[-1].rsplit("/",1)[0] - folderName = command[-1].rsplit("/",1)[1] - - if "/" in folderName: - tmp = folderName.rsplit("/", 1) - - folderPath = tmp[0] - folderName = tmp[1] - - if os.path.exists(tempFolder+"/Backup-"+folderName+".zip"): - os.remove(tempFolder+"/Backup-"+folderName+".zip") - - os.chdir(folderPath) - with zipfile.ZipFile(tempFolder+"/Backup-"+folderName+".zip", "w") as zf: - for item in Path(folderName).rglob("*"): - zf.write(item) - zf.close() - - backupFileName = "Backup-"+folderName+".zip" - - file = open(tempFolder+"/"+backupFileName, "rb") - - response = HttpResponse(file, content_type='application/force-download') - response['Content-Disposition'] = 'attachment; filename='+backupFileName - return response - else: - return HttpResponse('Scanning is not completed, please wait.') - -# TODO: Cancel Scan Function -@login_required(login_url='/login/') -def cancel_scan(request, id): - if request.method == "POST": - project = Project.objects.get(id=id) - domain = project.domain - - cancel_cmd = ['pkill', '-f'] - - Popen(cancel_cmd+[str(domain)]).wait() - - Popen(cancel_cmd+['/Tools/']).wait() - - return redirect('projects:index') diff --git a/web/requirements.txt b/web/requirements.txt deleted file mode 100644 index 06900364..00000000 --- a/web/requirements.txt +++ /dev/null @@ -1,39 +0,0 @@ -bcrypt -amqp -virtualenv -django-celery-beat -colorlog -asgiref -async-timeout -beautifulsoup4 -billiard -celery -certifi -charset-normalizer -click -click-didyoumean -click-plugins -click-repl -decorator -Deprecated -Django -favicon -idna -kombu -packaging -Pillow -prompt-toolkit -pyparsing -pytz -redis -requests -six -soupsieve -sqlparse -urllib3>=1.26.4 -validators -vine -wcwidth -wrapt -untangle -PyYAML diff --git a/web/scans/__init__.py b/web/scans/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/web/scans/__pycache__/__init__.cpython-310.pyc b/web/scans/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index e067f00169a864e470cfa64ec74128aaccf2c485..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 129 zcmd1j<>g`k0yf^i$sqbMh(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o2BnzbHSyM87CC yIX^G0q+Gu|HA%lXIWezTKR!M)FS8^*Uaz3?7Kcr4eoARhsvSsoF%ytrVE_OuVHyDd diff --git a/web/scans/__pycache__/apps.cpython-310.pyc b/web/scans/__pycache__/apps.cpython-310.pyc deleted file mode 100644 index 8af61a5b5daba64082af07a36e43b4b7058f5ba0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 406 zcmYjNy-ve05I)B#3jJA7SER04k{3Xz1qrq;-LhD2&WVEDIK+u6SYhTpcqOk)yaE!U z=d=<%=|11j=g;Rev)LHP{-(eA2c2I&Ik%9KCAnP?ARyDQV;N)c2pAxL2jq&q@hre_ z1M%paSP(OJxoVnwQx{d4`^vd7N|xkyPM{!T2y~le9D^)GPAZ|BY;sxKqrmGmKFPW? z32u{}L8WbSSCy;2GY^&0=tkCa*YU_jLez4ngm7aah}vr(Plb5yr9O48P!+P*osc9T zia{aty-#TUCIFY4L<^2caGH5c6+=e+e>v0iWrbAABl;9&0n diff --git a/web/scans/__pycache__/models.cpython-310.pyc b/web/scans/__pycache__/models.cpython-310.pyc deleted file mode 100644 index 046e689e9dc0eb9855d40b8875f3ea17d8da4671..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 14402 zcmcIrOKc-Ydghx=vPn^tM)UMz9_zK7@r>>9dL3utof*lp#}9d6jqQMd(P^<-6s1-- zr>Z#*#X++08jB4u2!bSsJQkP*l7kL8BtU{B*Z}cifds)Ga>=1j0g^)mdq{6B| zstVD7)R+!LS4ag&oi%_o6jB8;#hO5x3aJ5^W-TBsh3G(L*esA)h17wZU~@p`6w&~4 zlAQu_N+DA~PO~#W&M2e_6mkm4FR;r%E-U0TkQdn%AXgM} z2FO+R5|Eb^au&$T>=hudDC8WFYwT4ZuPWpzAm3uI0eMX!PXqZjdmYH@3V8;|FS0j) zyrGb1f&3D?4&=H*&I9>n_9l=w6>FN19^|V59ED?%(D+xGV{y3q|x(PzqI8t%NBSl z-{J0t)$ux1lKhpBxq(akJrXO^&azB9XXQ5vEX#5%|2Wq!%3OgJA7|So^_*iRR(_ms zmxuD&P`)yhuMXvFLwS8DUq`-SO(AWfH#w#~F0@G})j0Yh*KP$Ps6E5#puFMNR|c!h z-7f zYt0AK+saU){hZP9X{T$g=G`%e4y(7J?Pfzv6alU;nOr zV0mVu&j$MJH{(|f(J>w2*F{K_!5pzXU-C7>aLg^sF#M`vK*|G~o^`{xH!y8gqTjdp zsw*t{razO+8GU=eP21Pls^OWdwzZStr_phl{OrP2?z-MpZgpH|&3kb5fwg*7cI0ZH zPgnYn`~vm7NdyzWv`ise$ZA=B7MTw5BK*-aT?@GONZCHmY-Cn4JK1GuWntYF-VW}< zve)7c@H2SNFB^vNxM7^2FJX)`rR?^Jm^I+k74^pQ{4Afr3w)LmdY{w#eq~|ZOoNEz%&(dFg_+9M!|<^x+!%I52+#B!sDoqLMz0@%)g_l( zy{_|~^@!)Gz4F4%m4%y&Z~5hZF#05W_<8DJ#O>gTDCRWx&?P9RWg1yM+uZjzn{DLu zqI~aw0!9Pb#>b%o%Hfn4$WKZ?$|MFYrYsENc+duU8nji5hNdnDT^WOpCKQcTU}`ED zbPZNzweMxxQ%r|a*Zs=MwOfPErsav<>uCKhr_S&SB{bXe8YOik2YOAaaRb*Li7xDQ zJZw;VVY?C49t@Z2$~8kZ=U2rwV>M`cd8at61#_sB(||mb-K49};*x4f8ho7o@^e(x zQ%In*`2n~2GX%t&6;=-?(wjDh3%k(vA9eDqP`rPOsnqnNZ!le#8#tiY-C zfV3hVBik8DYgorHbrhgCG<5{iMN#E~$)bcD9HU6@Dp5X|EaH4HSyZF)V6v!1`Czgj z9S!QkWbvc{{g%^(HI06?P?lqL=r1`b4K@Y+or3<(vL>5`{+?hhXxNNj`_x)pf>pom zthu{SK%3IwA|7`yLq~f*hKLfnOsc(0s=Yf!iRAl{`psX)d(h=(q9tYgff&@dL9P2b zjIFVTY!A6*{wh98D40|=sbwVF*QoGYlpJYxW3}b-i!`NugKCmWJCp2Xq_%U%sBNL| za?jU2tF!L7w%hGl!q1w%X1g8J>$#3!0d3G*GQg!R%UgGuU#1xVM%1qf&-4bu=)hy~ zD|fAjUZDM&XA7gt&DF1=;be>EaS0l?q~Y|Jw5VEEX*<1{uI&y=kI?qez6RPJ<^pXG zbAgQwbHf(4Ko&MwK)|+^p_7_lTd>^$yD0>$Dknu$Iw{qR%x3tbXSx<}#iE|iVs`8I z*W7`_Qq9y7eG*7K(ONvMPNhoRZ(&MPKuwV=Z4s!JD`u8x_c~0lDcpPNnHm>!b9b)K~$QCva({U6T1g za%7pXtc%>*a+HVko>X92$RgD&vO9HYS;!r21kNbt!W!hJ`%`x?3dmc!V+zr`Z|%~| zb~j-=UP7J|QBtbGv^bJHbG?45Bv+_DDan&V?Z?tQV%qtHK9@dHkp{8w3!d2({C&WF zQz9V9=m!*yC&Ig^<2_tTgyHa`p$8FCX}So>d;d8^s6j|&tXhVL8;Py1#MV$^YlLjO z+3_GX{xO_U*dh`HJeJZ~#s!#H}88#Smx^}3| zlOv(h?Rf^H{*A{?6YbHp7c7;O^&%vdkNj6ridmqTU~8sLKXM zBER@GNbjPd9yPW`3FT5r-Xp^ptIm@G3@#b!q|)T1(#Y6byUL0%xFx@~G}zwmIbHIh z?jFq^DF9l4Ze&-KBcGG5vR?{f2B{1ipl^FQBwBWRCeh$XjMLYG9!kG%Jk{AillQ3Y zKy{SWBs7(eocImCRO7YsF>K+R_(HC0xJ^%K4&tv1ESi1E^8U7(}`u3d(QT`>hE-9Z));<#LjX<>wa_HgdjwgSChQCXVOZq8yEd7nR zhsy|ZOo#)IBM!{%uYV*SG?P4_X=*$!(CGU?#0AP7%Z2HvrDueb=`n%p8= zI3bGvETVWeS^G$I*M;hjw1wk|n&zIJhScH ze-rV6a>w#PZ@Z4=acrWjS$sl6;3p9iW|Os#!~uOf(Vy|C{{qc^6;V&QW2vt#nfH6x z#PKZQO^EoPM#Rq~Dj$jV+FIP3@kl54>mMW1DVIvRcn78NDk9v+OoV-85H1S)lr0L) zQP4Su9nJz)71-HCXt>5StS%~k4PNwJbJKDW6HsBep)HLp8VrB*OxFUgQN8Map?9Tk zIg@BB=BR$?g7hN3^6TECzJ(3EpnHBL^0LQc@ApyHKjRWaS+Lrd!5PBS8BI&#gN#mW z7)N^mPs&)zNj6(Df)+7bSIn+Q%lEE&v=NByuO zmN4q|;H(Zs1u^QYesTlkW2fV8ZMn{Y(O;py*Qwh-uL;IkKF(Z!9 z#xk_=0eUeW@wA-!FKSTgBIQ!a-MfR*SQo=MmAqpkLEaJOly}-Vpg2}C0!_hBieuG- zGi2C62;x}vSI>izE zVLcpP@%%jRV8^tN-8jeV;Y?4(a#-KR0Tlt!t@Xwm2Y&=M*U(LQj43IieWIeb(na;; z|3y?%WDO-cZ1Kx{Q{A!IFY_&RpMg&|Yn?zkCpQ~tyq#2{Jr#=flY#9_%2>S^i&j<} zi}rN7Xltw7-LxER^qiQbYX~*oWj~TI}z{pwBqf2-=Opii+qPraa^hLT(k>3>$|In*H%-P&H&Z8&q zMuc;#+;2;6ugi_uiA?IRMJjRnK#Q?@q&ikpR z^wu#1XmgrSgUZL_*k6G5H>jh3hzCx&bUyV0K2>4B-+G9^LT6G7>vtngwc@&CULE1_ z)L`yo!>Mn$c3L%%{-lMhb$$W2S_-IaxPt3x~86EQZQ zMJ#NFHOK5beAytq3w=WvHqJZ9ImmA$G|AWyK9$DaL=`z5^ zF}LgK6aGxEkMjd`I2{waEf2*Be<}tath+tow_+eXIyWA_zXI1x;S2 zxU!zbF_rbf>Vmt~H=Rc-!5Ey7<@h|}-?_tDiMe>h4Aj8E81(P!S%-fD2Od%<pwi^5^#1y7Ta{0xT;9od` ze^4lXK>r^k|DQk}_~zHCOUsnd!901?i<|UBCwKS;CHE+yg9x1V;rRnfXy=vFmL#W* zIZiuN{JWHVLCKetP%N2KV3R+lgaS&O!YrJY{hSt*oK^swyj1CeadJmEP0Diq;9t=b zjTTNS!imtsCL3S79{fAZo8V4H5G{H}&$e={6Rjq$dP{FrTiOwS6l^hX9R-Jo>ponvd7=Cedb)gb5gciv4iK&=D*o5iR1ik zz{8IV@HU409uspI$(=FTCB&?0PRH)9Yu9e>js0EUuDv`Mhr1!xJ`3{bV{pm72vOcPXeC-UIpF*J_XzYJ`G#|p8@vS z8jxgX7aqPwte5LYgi(0qTMy!jRzxMs=F$B)R36(nM9ypf*tun^@^RyQ?YaDO{+8Wo zSEQosu|v8D*oFGcB2*`qZT2`2yh1FaDiXg{k^Ndjt*|FxL*pJXaf1GnqjuF8t+Ecg z$euiO7jYGjPO+z8?{8HwIz2j5g=!75v-b$jJbegG-gJsJ&r$0Ke-LMp5N95%l`1jX zZaSYi%v~hkCh*z5XJqE6jnTQvspvjs&#-4QP+jH~BRVr_H)U8x?GQm^DWyi3m zGZE#8XX1R`L<(tsqP7 zsnp6Y=eZ`EU0(;qbUfffG>wr-o=VlvZl%RO*I`zU$7#WI!`8^SWPE->s#FcLt>Jio zOQm9;tNzgV^vz-uZ&Nu2cBxe-P8tV;dT-virPSoLEgYT4+gCp7bw0WA;cK0bk4;r| zsSczg)+Uvbwf;1r3kjfQ4b%M+W&gWr@VV!ySS-+Imw4ghu7;| z=&&4^oJbw)4RghX4*1=n1opFXS}5TgcY{eP&?KU83EhfrQJba*oKdab%iVT=}-GB)H{onAU1qQ}FN&`&SdqSL)&~JIIT4UzAF1{&<$(4xF}=L1MJtKnfpT~ zUNwiwM0s zSi3LEO2Id+C7Xj(@|x!{sI!^zoQnfta;BR`Ah}Zbhn4rM&UBJoqFzir-&g%24vR}> vx@xX@Cfb&C*3gHHH-r_&10B5%Mbn(WY2t3(Xu@doi||_~u@{s0gEsjet@%-# diff --git a/web/scans/__pycache__/urls.cpython-310.pyc b/web/scans/__pycache__/urls.cpython-310.pyc deleted file mode 100644 index d0e55ce104a3c533e2861f0ca967a5b0a6faadb8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 331 zcmYk1K~BRk5Jm0SNllw%#R0m_rrx8f;sk8iWmBb{p`=Re$W91w2M)rOyk*rZAb}Wn z#Yq14lfVCrZBZ;46)%2{YtGnLg#VQh+|%+=fC2AV&uh*ZBp|w^7qy_XtR-?tQJhoA zVDTY{6j25$(kZDIaP!Ej<%LpM#e*E0@%cgxBcv}a&OW66*cktQ>(Ec#==HWWVgdEU%6L?wOs(Ee_xjsiZ1*HGlW?^!(G))Bm&o?rr6A=?MO=XWyAF|4t9MTfYvjWmV zR%8Q6hggXXA{}N!Y#8YX8(|xeZeSbPDAJ8=6Wfe*l#Q`1NH?*q><*-x**M#Vbc}6h zJCJT+JK3E`x3as~E~Iy`-K>msob6$EBi+XKvVBOmv;C}sbO*bK-HUW5yN?|}dM7)` z?ninTdw@NNbQgPwJ&bfWJH#GAT4smYqe%C#$Ji01ceBUYQKWm>``9t0``B@I0_lEs zl0AX6!rsqLA-#t^$v%MeUiOqqKgcFmi}NFQL& zvyUKsko`0}gY+Tx0y~TJVfG^XDAGeLbvja>`~iL6a@0s&hPfa~=u-2Sz zvD(BmVR8QA$i#3Q!(sj`0MnMVuEz9pQGUdV%xm*{H^QP$_oVMz%=D5`wUuZS!R*R3D)77RqZdz4q(X5##pT%1xSZkqL zZ*qDI+c zy=2FyTZ>H#eJ*g@iE6uDYce}#)fZ}Z+-%fp?Q)!Ffys`UR;!INYKxt!HX5hpJuxb| z7U7#wX=Pi5w^~+(*QQ&|8S7%@Vr{A-?x-wU^@h2>{j!}@Z$(JWF@TtE=y@%x#WX`t zBhBJG0T|PZ|ATKqg=wO8=a2GH9OAQO@c}J-xXb9$F1{u{p3dLUA{M%q$}C-_QI*D2 z8dqsTrAd_99&iRJuW>8&x{0(oHJe ztkN-+Zc*u0mENJ!ag}b9=^I+Jjb4eOSAGw@vEJ4@t#;l*wH{A-6iI?N7fzF6?Mv@wN&CjwVo}u&G*Sk)yI=vk1H>X8T|2bpZ84b?paK`#=9@|d;gZ>So zbsOyT&6b|(YYhd#YSvt@wt+!=SMqD=KNt(6zIhD`N)-BKJfY|~& zuJRf^U)C;G7p=J#H+v;R3pJ}s{lIJHV#89HiZkd7uyd+uIAt=OgCDKtZJGyg=V1BYSt_caWYfms!c#*RnOI07o3tjBuJaLrfTXj zqd1>gY&1lxAu3x75GV5Hjksh^x@GMacOG5z-Xt1V#Biw9n&w=~sWT|*LzGn^elIxR z!6|H`-EQf?G;5+w7wQb-Nj^J^CtR$~sIyXYak^2fw-&ATqJ!U3 z?b;;CUptKkljxtUGa;!ZAn|Kw1c6Litw}bu3kBxe0EVtZ7#r*S&uVFXH5?G~vDM0Z zWl#kA)WDThaiX=WmiKVpTrR8gZ86HC3<j$Lw{op$OR=J0ojhwnTs2$Pc)^PYMb<| z;8;a{BTCXJPy7CL&G=vXJ7e$YuNbAQCb0NuJVn@W2pZpXyJ`GWipH}&8V^y^`@GcD zO@R6zT}l$&Ux@Hec9YB5s}b;+6et~L%Y3eD5JzEYq@Z}znP%Ch#;80#N;UIWHL8JJ z0XfuJ&R>oAu2Px*YJ?T9#6jz)K+6Lz4VNZ5cZgx>i1;x1@)PJwLB0G5%Dtb!DFROd zbVfa7TX8jo{njPR*1C31Y7Ucv0!(4#K6`RS+q%qeXKeyxME689-;2}l;xK8<2ZwrE8@p~q4KdhD+E)DQ zTH`C9vTQfG0pd%X(RG7LF-&u^G(;w|UNRy45TkTo!WvjEbrV7mra(N$ zmtb4X4|dbb5Mv+#9YY6&NdCM3`$aJ4u2R zBC(T^o}+!_U+WlF!`wH^t5fBSe9Wu#81k(Nl>9Q~K0)9$0-pr16@p1ZU=xB#LLf=_ zu29FUPQ=5Kh%?XErcOesK84r6Su(cM7diZelNR)EDe+`B$!qOKbsA;}H8Mt`5TdD8 zJ7>qm4Sbs3fDs_d=j3m@mXT!dGdETtd^=7OuPx~x;usnR3`BkUK0A7GrfzE&gKXdc zkpE8{=HmcfHUN!RAA#I6AnEeZ)r;EL-(-`d32*nwJ}C(y0e_Uua>5p34kZ1A z1anyazm7TF^&T*X2pbZzk_dd*CGe4n&Yr+Hc8%0qru6YuDBZ7Em5!p{q`7dkDxyrP1j*e}vGz225oSn1#T8+W)CbKvj$Pvfts=dby#>Y%nU%d2|G}S9T-z~AapX=Zwc6eR4YL?p|As!LVy_% zp}%USgdHe##59StknnrVEBu6x_#x(DjtK z4qH)NUkqJOi|c9Vl%#{c7P_7h*E1|Hu74qPJ$wEHo(~8?ob4LjR5#ttbhGHO!WA(= zhj|dP5sMRRsE=b_^-(yMCWU9o6dGje8Q#LkwrXreKQu0cT_<(4-5x)Q*$&nzoW`U| zb+$HsbXrb-#E*6sdCg8d(Q3^$YUPAof|Vi>Ogg$LOucq{hVF+^Bk!NAI`>!Q{j=hJ zK{e%^ojFDhucPGfGH-g^yILj8GqS3LbSdYgVDY-pR*@@8p`5PTTHVfiUxEKJ1}lkL z@!r&KeibGBR|o{iWgp5OCvtfJnFMBLdd8KbaSmxTXR{hi=$oRkKjnnOM=)exIt269 zmvBN8Z0|Y2Jax+iqs?c!2@F5uica&rYWStr8h)dd7CdcMMS4Leg)_3=rm+XF8w_}w znsZ9!aLC%(}^hUe4md@A6G;8!`sF0zJIHq4|B1}x-bx+-glN#RtB6eby?>xBbO4nCOlnn7#|ws zdzv^cSqR#KIl3Qzo#I=F2ZKOPlyoR>oj#0V;Z?j3f&F_Ju z=DP+NNA%ukYZ7n1bqSM1^M!5(##2U^8k7?zjwq9+3zbQO1?Gp)Q~s4Z;K$Y)@S7{7 z@S7FK9}ahoaEQ~8@9f-tmVNBry$6=g?(dr^camp!@87fkY}wYv`7dHT39G)N?=DN9 z8d)TE{26lE3&UhZ-#d=E6FRqJ&sJvvEdh@`bHeN-2p$^mr1p+aw`+$g)kAg+8M71p z=-DQ88Swz&tNVxaC~_A`IvlVI;u-LXKO=CbeM#Ke|_?Ns2noDp4 zYZ2U7kl^IB)c)D#LbWYOhrfyM!@mL0(Z_eMXyp|x#eYkjRw{}FY=g*_eB68xSL_ry zcEz}q)KIYV4mBJV(_Zg_7z7D4sVy!YIQmL(POUEmMB8Rn4*OV@F)kX*f0uCn3u?CB z3W>4%QT`301M-ad>A=uR80{sD{+Mvv&MA&BWeXXNH8ln$GRlY5ZTKu|es+6J>vZZsEU47wK%hFFUAS z)|pRM@0-H;r0<=LC+gOF@!Gwa>wa%;8%|?_uYV_A_&>U@PiMZ@Pou!mLQN=qw?_;8 zxZ-FhdmN1v6>v401O(3lXDKfkD1(Z!oWXQmoI#`$Hxph@Z&_-UGVnN3P()quzAm^H zO~!#E!7KwWN|P?T3Maa>DksXU#fe5MYQEY@g^!VjfnYv@7eVXIs#@Zzq=USjIVq>Q z$t!nj)bJ8Mm1HP_zX;`5%_;fj&-;#%v}_OgLovgIW=vAFpky*p-U|8H9>D|uk{AjR zO^~4=6b(#d2>%eNFo?GQ6<7(~;TvA>f|YFY+OfSkO+{qsUWvn5(!II8TkFsud*O4` z!>i0l@bA!tfYop}%F7o6TH z^m^w61y!xHZ-za6Bc5}^u~Q57%r^AJ<8IH;*?N0sK=q7hjH8z%bhh3Xc2pB5woQ$IoO3S5fb-_r(<%b)R%VAherKQi#d(4t zjDEd<`64_r*bR5##hKd;bhh4ZD5>wR;`vlBWYwv!6Ea6-%-?&^2fpyZzFm-1tqAtQ zF1+^zw-@May}htW^@0<_h?p7*VpP3R3>2Hruqb>`#XNpU#yqZ*DTG2O`}U0Cv?$m$ zyU`h+al3}j*4s5cXSp}%(itk=GN)s*XFSoF>+BVOq-WnQ(K{h(*^k$M)9nm8TW@EK zD8t$l)vCH7<0*1_A#<`9{JPFg@Ws*g>jVBnykc-flu`Eg-ADElTxfJyOve&a;W$qZrozofR3RO(|=|)ih<)N_3-$vq@r_@@31w*udFk)HxU9 zACqUaJPMXBtYCz?MFkGr{pBd4_4U`YBKn7>B$HlCRRod#l-Ev0pn!@VLi@<42bVi= zC9&pAJrRBm}V=9X!OKxNE$;X53`m6C&wsoS|5Q&c$BxNO9lF|S38Es zn?dTj2X9L8#nQN?v-Q5W;!1efB&`Zi0w#%bIu&ouQ*bso^Lg&_fqo6=In)leo8n*p z#%(v9`PxnXdy)-;?mvS2Z;M>`bt@%9SrzqvU7jt)XECe#E80>56IJkiffGETd|n3V zH}z$`8^?57*UEHbf;Y%g3VXkUrs~MU1#f)@_Z8uV$E2H;r}ek+#TTw3;2iP;etqHj zdLhI?E_gXeGfsEY1Ro*_F*ne~lQ5}v1pcxjWd2qPOs0-*~T>D(dt;Zv=WoT?CcBt zI=&)*g#gWx@dAy7B7p${B?5y0h&`43pPofcup2g&{-{ULf=<_}*punFb z=!dB8vci=O;G#&|)p0F_a3LO7;JT)8cS21z3yw@FveS< zSz_rJ#wZR0M+yg>XK-NY_EHW<9ylp@0esmF2g`Ny-2yo6W?E(>IP97M3en2!%u4c- zDP--6QBgU;FjoxIwCaMJrYcUhgF-$-g&itXS;IWv5H+b#WmmEs3>xcpgfn3!ht=)9 z(@v643!inzs5rS?8^L2ut6psg^m#;0c+YcKVc%{cd`Pslldt&BSCWkx*;b>Xa^y5#>wJO12Im58ua{sYVOY z;5}D*Q6RU~n#BO0PhuG(@pwK;a)g*Tzmsxz5!gjwH-R#NJp}G1u$RC-0s%wlQ6RXF zSp45XrpMyP1&c3=(1)>~=&!_LhW_r(((1mg0?W;wC1N{86m|?^gi{-RLWJIdRD2;s zicq6(ix7$OPXvTWlz-Au#*`45QDiNJ$mmi+ZfOFdhA8PxPhQq;AXZ-Tk-5&Y5Gzs? zd{$xiiUML9{l&^BL%0kj3VuW3lA=JGi-ao`Qucm5gd?Ryz;_f5uLvL<{l&>2hH#~& z2;eIUmsbD~uKwcWuR^#oF8klEuzA@(VY`ty84Kady6pc+h0n|W3Ez#x$?*`roXh^X z!WU%!Hxeg}5WYOyti;JLDSTtMAWnWEgs;H1D16^i__hY{6>lJ({#giLk=>#2{kg(7 z9>6zn1APBEgl~Xt6ZmvqQuww9@L@SpAMvi6*$}=#wnO1Nr10$w;2XLDzJnorL+nn0 zFUplv=Px&;Tl3r z;a;{2OR0)%AEiqp^+^ zeuzC;kFZ1C1F{R@8$rHcJqMm>lOfyb&G(P;DW)hSj2 zB|gau=SU=u<1~)9HUi9Io1hmB;q=5R4GPBjdHuY8I&w{)kc(4ER$tR?eg8eSb}68J z*y+RMWutwm9{VDckhci^#z<#tZx4q3hiTPZ!+Z=Gr>ZpCX$1=33eYjIUR6INn=})ggc1bQRlrh_>d)OX{B589AcyDhMGYWf`H(7%@Ue zwo8GJjW*lC5u?7bo%4NPV8M5kS~Y3kc2a#Hdm!`??7>yK851W?7u$x@V(H5yswofK z@ek8xBQw&wBm(!i;D2~5fHYAuu+cT)E?rYF2d5#<5cqmU60{2m|_8)9itr+ z?C6Cm=O^j<69fWQ%;RYNr^t$V02jkn%qDWg_pxHgMGyNXdL#ShX04W3?7gc=+qGp8 zR6Y(HEFaye<#im<-P(Axr0v)CXt7uF(X_txPto>m{-CKSn!DI@5Jbc z>2J%!csuf{@k361=;QST6u4H_edyTRVP)|z-0C!FDD*QYL)NVy?q^%17t z%|Sb(-5rWDaP<6zZbqznk);$ck8QoMD@G&Qm?|qTy#RRU#eHu#HI__=@4sF z5o+qEAlo?Hs4h&g>SGVU18tHV&PO_Xz?4?q0Lm+sTh2m`gmTFqcpmTTtgIDr{5QjT1DjX)-S`x<#4S$K#n==Q*&36)wjgBo!AVDX{A=9#CYl{wh{qA6d@|AF~^xVVG9EyaBFSR}1yAr=^m z(&`r4@M#kut$7(i)IVmx|00$Z_xe6CdGKA3Si&1=_3DG2_58`a5Ahy$W6f%iYK$|4$X( z0by7W-ckT>e?R|kuYxzF+$n#i@Je?I;T;a(?eC=dlU4Ah*@!TOG+t1cHv~+f)g3j` zQ=jr-B$t+4P}lQGN7%;Dtl#Kb%rY*RWv4*fwZRF>s|!*oFIEC2zQO9XSvbitet>|W zkArSboYEXVCoXf^p7DHa zbs{_2PWQS+Y^W>SZRdPg1m7(eZ3E2yIPLp)Gd#ZUq^+x7ORlb#D(2il6Huai7Xo_p zOEeIx1f~d3zj8((z{ZbYgdRhmnP+`$-0|0!l%|^y{`#9c^J|akDE9ffZv1;rPx5`< zP`JvqH5YabOYH{AJa(QE+c6SpU~x;vQzSkJ;#Sg_N;x5G-OMNHiCuT*;}9z7`?)tb^{xnATn;w^0RJVzbOi zSd((MOx2$`U!ff0SRsGx_{1|#&3D#sV@HVyh17gxm5`b%N&qG(&;@}u6jF0fFQkU{hr^EK^TKLK${$n>=GP9Re2!Tm-;s+f_>CurArglc$e~4P0+#NcZL3 zLQnr1wF1M_yWCvu4bs~t*3Uxh-UXM3%MGZ!aW+ZOUGTDq8QL*=v0tOd1jyqtlzjuw zHxD7x)5=$iu%XSUbnH)uNSjy2evC+)3?gl!h%vlw{1;Ame1zr1Fq0`}{uUnpUJ@4V z5B-^lg{>2jYDm*absLGV~%1_ z0~#i2?wCqgB@$=OM;bcIFC~Fl1LlGc^P38@H~xjD@kn=H9rG7LmJ2;c4^l zE6l;EP}1I4$Na4jX4*~`jtCK_L8KYGMbhjfAO1svP{EhHIe!NxZh44riE^o+WfKq4#aN}a{U!J`Nw^mD|-}cr+Q~&1n;}`JYUbFaRlu~2(Cy?n3c^)PDiym`}Pr!R02s~iL zeZU6K1DxI$vO8uEuJV?4p;su{W0!gb&i#@f4%o)2XG~gvw3a4Iyg=@^>1T>|wAy^x z6D}=uTHz2@3xs?7Uh=-spCII;L`+%>%rmjvy>O#;Wf(9@Zd)QY;==( z5}6(clv_^|9~|)op4{k+{1)Jb|5f1heLPY#+=%_D+vl&?=T|h~!KgT@#{Z8xyg``A z|6@^y;M>@c%@bco8+a!J+77WYLQ3q!E0}q@;>piFMC)xu zd8XnyaV=fPSa&R6d7i5+v>UCLYsy1`Ksnzze(0xp4)JetDlu2_p27_H9#Lvs?hTxf z4@H2SGTLcNYX=#2I#q}yhr*^PPoF+D@$7T7t?jd5)@bd)xYMMRR!$V=_;+Fyvm*k>nW_ew&3v@htJiGR~V_TNwqtufEAJJp7M`!1Pa{_kYUivYL z6ZkUHfd8dnZ~2yP{Sg}dUaH0JK9e8yivj*oM+lJ0`{H(NqzvaNtVl%qVTKBEK}$x+ z`kU~oi(oj2JOjg_jrF2Xz#b+Aj7qT)bjm5EvT}M2w$q7M@WUAuNMl+TXGd`&s8leV za^8*}`{3z!YghF?tlFn%qa_R+>?=`873S%aS4L(&u*2ON{(JQ)GpLc&ZffaiG@)V0{g`2#D5 zyZzTvVmB;UCiG(#WD}8@Zx!78lj{Cfpe#lE+=!=Q53i*pV$M?-QZf7}MjG4k;iuBX zwtQGw$YMB8BF!SzuwR#)AjK~fVaVi2k0I>rYZeZ1zk)knlGsEgPb(b`~o?O)xTS{0!?rEZS=m8v5-6hNXdRu?ZHv zjA&Jmy_cw(*i6zz(xTx;n+e3QoF%3u@B?bJU(H=O_Tm^98+OXyKcSg={&lp>H}J^L zaR=T{a1tDN7;@&4j#B)d$+8%B!nY&y;+OBJ9LqxW`Q&A7*`Q%6hDAyZ3;Kl=3{wot zC=CJr0Z~6=t#&;th9_pm#uT<)*D*eYKSqp|ptSETKy}B% z6&ep@565U(F#V_=CP+7F2gQ-bqEC#0-@wy+V*uy9pVs?|GCO*Vhg=(0O!mKIMWQ4g zDp9P)Qzsd!V}wf7IPlz+4%Ja?$Wtd9szb|vU1lj-sK$inF2$-mCB@KP1EG>qsAMoy zG88Hq4wa0AN;ZT_Hik+@LnWI+C7VMfW1*5Qp^~kkk~=~riYHAJOrPf@dJ|%xINQVoD6c#&0zqo3(T8M!o83G3l1fCR( z^pkoI9F(kYn(z%1*h1hqfjtEF5+E0z*lAtt9xe7D7JHJ4EgShxx_T#pM+ndkWPF^! z9R#)!*g=3~Am2q`Hvx(s;bj690tX4)L*O9-wA@oH*Wr&-j$%&4JffIM6SF_UQ!N}c z!j$2pp$n}|NKnB-MYnmrmf+69m1rDBw=Fhm6ly4Kuc3$`cro$gEGP!z3{7ERi^?PD%zfjks_D2g-%`ZPd`pm`F4=Al4e)TZtC z|1-NgJ44zsTm#)DXU_Tl@BHU7=ltiJ*|GC^O~EIX|7GQej-vdGGQm$AnHTXd@8Mu5 zhH5Ewbxu_&9kXKf_*}f6m`iXzZYArfxfG`pR=S><%WyhrWp5BaWodI7&PglB`Lw0i z^K*ITq^!bR0k(`aP%q9E8OvIO^`W^TRWVbBX5>Cl4c*B9nsP^-8}1?nV}OwnBQ>i` z6(69L%9QHH%gyFg%uNZiVVJ_r2-Bzvv*N&1uAR5LqN5@ zcvG4JN!_YCrYKt_bG2+CpS0|G9Re-22ew$ zoiUWwzKS0vd?U7o9|Nx zcQnM++L!>S{~OHJYTJEo9^A2hE@~?P9q&gsa_t;@_*3?ftL^YT41l{c#MRq6bAMzM z8?m-tn}E+j@E)V{xI!iGs7Mc;P>cl5!~S{1KX3ETqyBlje;zwQV>U3ZoKxO>d?Rm+ zx6zJO^{y)N?YvQ3j#CO(D{1T)M-BIOVT|*1TveEpG)9xkxMJ))ro@#K%F55dEFh1O zfqgrAmhzC=ZfYZHnb_!gK5k63^X%z3YD=KpV_f5Ll-U;adskiA1x)957SYaKez_T- z!y)ErU|DAF+9swZx3=|+vD3l zgl#NnL%Y|U$NR4(ID2sA_D0ahfj$vI_X5SL>dyTqjVJEYm3|WV7Xmoe1kf)4eJX-J z1@!423f|K|rg{j?exOr8_eIcsK)={ld47En>1QI0XMpaHp!*KN$oP_R*f?@O$!l|M;^N%vW9pN8k{_z0+H<|w|^M5D8f0p^r2l#)?{A0}jD8fI+ z{1-4D&jrTAyFMp?9*>|Wfj-}lZWP-^f3*G?wH0eGaBV-O+IXCRa6PBct`76xnR$Yl z{}EuGVCLyAGcME2JPBr8XaVL)X1*9;PB8NnGanE2wOBjF{4;3(=|KAr87~=UjdR9% zMSR?VkInVK7CSmTd;9-q*wf+T!$;rM!Tu<;uW!C* zY@t_B{_nZAe#P>MdkL=8*1O0f#q*7!_7Jbkq{wp%hH9_+vz+35RB`=Z>Xp3#y^<8B zuUC-TLaz`fq*v}HSIz?KD3+vXq8D2$mZbP?U$Gz+P%M-SE0*NS8nCcpNs8|P?N=-AA}#^!P4vuJ>nd9kb z;c0`E_yx6M3vG~=3NuL?q(wZy3~Ph5+{R4O25B)CV1~6pTJB>eX@j&l7#f35uMN`j zI{bOsAT4foYmRDzv|MB+X@j(=1(;!Nke1MY@JZSrE#3<-!`dJ%e~X!<4bp;DAdlr> zEvyaFlJp}Rp0u0!1^y; z>owo{M!@=SUF(;9>s-M4UtQ~U-};rVH6wCe>#T46YQVa~vpP4>>YJ>S-V9i$x@BMU z%f1z`9;@Qso=e`svxX6))E>d8WyJY5(&I`)#YiTNw{b69_AS?8DR(XNuvB~t${Llf z#ek*bt==BF_qtKKPmh8+e%2T(GuH9&HO4B0`D30D-!M$98nr!wb}R&1{-NLN!|3%J z-0MZ`hotJ3x{1EIuUa*4htLEdv8*#7!a6r%V13haM+Rc+)?=JLlfiO^&O2^ zns~ZvjYcd@9P3)65la&ny4GmK(mL@Ut1lX{G{H{+5O;@b(TJr9>3QBXVrk-6yY)sR zmL~3Q8nHC-`&&dTP5jvw5la)_-y&jZ;%_4nOA|kdL@Z6Xk%*;%eZ7v<7J8jHVZEMP*$J$p*K^_(%=4}FdJen0_ky{ z%3K#aX{~&$GS}sP?BN47Qs%li9N~wRxh~H$pOm>SUXAd>%3POkGM|*WE-DdzSefgx z#e7ocy7-j{zo*PUPKR&xG)|X)1n-{W*2QKDJ#hsUv%p}E~7kdNDu;R|kqs%15 zofppqm|?}8mzS7HiaRf^1ejsPotGtMlH$&b@+OKqdY2V_VpaE432Qh%J=LI}*kV;< zD(Pmc_41;5t?Vqh!%o$)%#vZ(%lLK9DT_tZar6Dz0a+ar!~D__DWU59rk@y8w+-6(QMGKdTucYHp+F=E!eW!fKOB4 zykNV<11o|1}NkoJPJRu*Q7d`Nes zKVPcYt%l={hF$ykc(74^OMTN&uudNv=+^85WBpyAzWA1jeoM0~<(e%F&$_+5#IbVRu!7x43IZlSzdt=J96ln%<@THGi_L`fj3 za1AuJ&!e+}Jq#kQBLxHf}5<5>?{Phra959_N_&dtpW zd)aL8gjp|my}&?sdeA_GZu)DwGngDgI)w7N#)UXP^%arf4pYPtI+7wq`LsPx-9q;{ zrq6cc_qmx_ys2SXZd^LTO}3g%lpEwC{)o6q*~A+d?%+Az7cL^2zU+tY)I!z5s~K*> zDx>svDy18?SL_91es=gnNYI zcMycxh9z>wi;x4I{53y7>Sxc*z^5M{({Ou>!ef4hBi>an;JFFAVcixlU~oj14kzev zk`AZna2f|!!)9^bk6N^GAZTSS&7S3$s!19L>>;_5z5=?V7fo#{BlHFok|-Gpx5o5Umc#%oEZC&Bik zu8--#F17zVhViEF2!F?5r5;i{gI}-xqp)RSbZt7{iP#Xh)aMEAC#A-?^pLtM_DMB; z)-mX{GuZ!$_)zTQuAcY-=3#{dZ0AE;6YbCZg5C@)cs&pAH{#{AKcpA+?!}B4Ub0j1 zfSc;#-KUP8#C>=8Wrj^9fg z8a)JWTmqT{dfG>IMo$CH1HI&<1xAkm9RPaMM~jS30UZSTwvP@mx(nzq&}APTVRRVi zHlV9MI?8Cq*beMnZq*ne=+QX$C{xR}gj+eUQS&Cd@;`w5rmH@Z7|; ztFt#;jXZlH+Eov_y64pc*7LJo9e%bjJ>88}({gD&w%>U0KlrTca;@rAmiBWX6ZF#N z`k)W>w4ivM@Pd!~Do@N2sQDmNjIe_M0-|`6fKCmpn?#+UoL|T=&E|S)|9(DqJgqh* znzy>X|0I1*;WJsD$GycQZpB2aG1)NJIMSEA{`z;!ZrU&x%D6wJcT}-?v`kk$s> zSCjS<+N1EQEw*QObc=@NV7G4L7VdVV!W-sPN_<3~bAg%R=KFf%?P|G-X_?|SM7msd zb5xn%S>A%Y!}rNkrchB_z-)oV`s?Tq@ps>eX35*$S;KCcY6R59cnWrc)%Exi=c1|7!3ZM7p$=QIgf0Ga z0Ll^kva7~ZDVkbMlhmq4(yJMwHPWkkBSo`oV`3eeTg#I)K3OtGayxomBYB#8yzoQ{ zQUTVYu+q|6c`dhASsN!sQWB#Y8B*TS9!^W1N+Qqr$UGB?Jd-2yOeON{8<}T1k!SzN zJeMW%T#oEcAiJ+2uiep*?JFYv0C^p`awh}i^#jON1IQZ`^2TV^)e3o2guGcH*F?x$ zE-8j0c`JDvdHY3;%My?#exEbml&*DC2jKg^1{ov+Ao~4m_Zb{_1HLB+Bxw%d1%VcFIjWcsn z)%tQ&>!${g+Xs--14wlMIWvHq9YBU)D#*d7gtB)q~d~9T% zyAyc~@Jx<$P55{s&)%MAf_F;t)>JOL-On08hGln*5JLC#s(PXi4aM7xDx_Oc2hfYd z&TFeNYZ0=pkZxT$mz3LhFKZ?G<0|H^Ki&Y+22#Ma;+(8;zfT*UmEKjFC9WQ*C^|~o zP|~K7&MRq4NyApo$NI{4;>w>;JfBq3r<8P$lI~T~u=V13{&eDc7cPk>-pB4s3CUSr2l6c~o*h%F1a#V*0$XDbY z%rLLwY!4-}Jv_2kk0kPZb!48eCGzx6#-+yfXd=(o|IaLaBa!W!BS-hGM4oSt%=4W@ zo{J;%d^eHjdn5BamdG=DZ9ksK^8|J+es})l9{u}~z8u}X537;*iTpw0YCjw~`X43o zJT(f>>OS&gJXt=?3(MU{$}@Pt%(s1SjamWY1;eP@J_`(kmsdTP2EOUCI`z3`ljB@l zTAa~&+3W8mKWkQ(BoKl$*p@`5;?yP&<5CL;icF} zp6U3`?qNK9&uIQ6{@{t-Ja0JEZ3Io8Hn%e?bMw1@5cW)H&kz3y;p7$xSebgP_ z$CzI?e0mPtC@KT_^u$&VkyCSgLai_r*_6%r=Hl-e9)CDfQi>KZI>;s zRJks;SEPe^4hHTL?8i(xERL)tD?V=`J;)jkoQa>VQfQs;ibdx2V3&2_X*fuI*K|-m z3hb?Ki1jCX?ikF%iAR#V4jRN|s1sQ@)A2KHw4IJ`I{raXJjEUZFRHnWm#bbq*ff1A ztFgi&4O@cZELG7&s9SA%B85yU3<@t)4;}4T>lARMCIuwa#;)=rdQa2n;EY6d7mCH| zA)`B^$PO9dB*rIX2S7xpP?z=wnh4I2D0!ZhZNR=Z4OtJ&2IG?w!C2ICIC#7}dmhz; z$vKk{>=6oA?3dh@qTFaX6B{Pae!*)~S0a4;WOZrf%vs^BjDL(`%N@qEq1R*0>xGV* zicSq;qOG>$ZBZPtyf`m7ATF=0bS#J3Cp$sAgB<8Gc^Svs6itHIofZnnZZvt`^6kLJ z$>0;qG%$%7n5QX2n?Z2sCK5ndgh>9D>OzxRIFaQwW;lp1JfGvS@-@~%Ypl>QmQm~! zIP*)VWLRjMcH0Qc8T3x{rgEYOZZN?4a4HEqJZA>26gc=JAF^dn;ko zqUXcc0xVbxV$^E=Ys7Arj(4`VaezhRsUB0tjzzl>QfDPC zA77N`Qta1?xN=QKfr+F?*RtM-rBi%7ZnxWfs%JexU5XRJr{hB!5jfEsC;i}4@lmo+ zcxGX_%1aB5*CF$aiJb2f4Ghf;J(3*?tDNV}c3WJT2%lhq=bH^`n3e?~;N^wI6!m;XmKM@Gz`3S?l@H36d1<2j_HWnhnBNl z6dWQf6N;iI)U|BNd`)i2g1kXk7O$&)d66m~wHsK{J1(x>yl@6bClrgvJ)hbQ_g36N zjFC0dZFpQGHJ+DOe$lsc3-i^5`NdI)n|l0ncf%sNcI=B zik8-2(f_GEqaP0E6w~1cYySsp|5m@bAMe`Ze=+uXjQxdvck)=+{{{P#x*OV~xE|3r z`t%3&dxSoj9*_Acd=~SyaSd6~U)KMj-Iuyo-yffux}g0H*p+**zXH2v5B6tZ*M_SU z%XP-C$Jj* zR@g$$CtzC-ZNRYGSm#k-EIwYXvjdCFk=mpG${ENAEPYjyx&WpZB`&LFGO8wtKQHmV zs*(6}67Q=Xi9aiGSw-Edl=w3ezfSnt$Nxu(-_Rcy{Q14a=LhiLNPKAk|GC6(AHaVq z@w*1_rzCC)d@?OQ$ehd&)_zRd_T)Oub}!%EJxE>1@5s|RGsX*H+n6Oa*?q!>bAo$?kQBQ2I46o!-TL;4Q_c(I($Md6}AJ2gu?X(4Fc#g-$HE~y~x+yI^OL;9Dv zcFNSfK#(4p1n~iP;NzY6-XrCXj;4&_@9eJm#TomPz+or?E6Q*}1sL#-^}Oc2s0C-A z7zmKp3?%ZM{3&Y*>I<6}w=~PoCRyR+M2u1zF93)dKj2 z*AmsPNCY_AjX^;HqZ%2qqANmUh-tQ5E>Cr9+o3gGd=VOtXv5YG zA(mSg25Wl!dJ9CCzFBw1UXW7rU3X|V!*(tSC9OIQMwM%E-eL!5#8~RQuXUViO>>tW z(t4)#w@cHd*@OW1XaQ~0V(@~PV)-86V>-jj1ScQ1#%CO-ZjXgMuqRe9%z z%46f~`EXTT;kwEOtLIy-$m$z?;7~sqTNTAc%eN1%-_h^FLgIKSj^%$5Sex1a diff --git a/web/scans/migrations/__pycache__/0003_remove_websuncommonports_webs_uncommon_ports_and_more.cpython-310.pyc b/web/scans/migrations/__pycache__/0003_remove_websuncommonports_webs_uncommon_ports_and_more.cpython-310.pyc deleted file mode 100644 index ad117f1ebfbac011e5410c29679cb7a1f055468b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 745 zcmZWnO>fjN5cOBKNxCb6kdVq5A+3a(6Xicrqyl4aSFOO3Co^yCnR(AKlSxi+{TN@@-xwj^LvVKq432QSCuo2G?MP2c z+Ov{T@`eBd{)GTnbjdGy$w9m$)8q=zk}30f-!`@yTBDsG^#)Yu>X5p-G7OGzyHhlZ zloUwGfHt(`N-UGm0K|r&KSDoId&?9?um|aeLAD8J0-oRC`#ZehWd>s$X5zEAyD4y_ z#MKo}b1<9Do=I0*rLAF?U;7@|M8XdxVvJ`eLiv OOB9s2(DfdB$o~SUPtHOB diff --git a/web/scans/migrations/__pycache__/0004_rename_github_secrets_cms_cms_cms_subdomain_and_more.cpython-310.pyc b/web/scans/migrations/__pycache__/0004_rename_github_secrets_cms_cms_cms_subdomain_and_more.cpython-310.pyc deleted file mode 100644 index d1fc2fc1adf3c1aa898584523ae2759d6d711472..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1351 zcmaJ>&2HO95GI#Dq9odpUB_+Ow0B)3BMH!=2#TmmT@*!psC&|7OL0at6>?YErQ+Jx z_NoWTWAxT%*lSNlfgXAckf5_9C8s~=61$xFW;r|a%`D~KUeBTNNBk!JC3KwMtn%*g zNqIt3k7xl0+`=i{nOm|MbFIz_Z{~mPfCv814*2LUgP*)v0PNfu1~>GLGi0V$=9x;W zTncT%QUVtGEVZ_G#~~?CXzD*{L1*TIGh^Ta%h=3Ae;L>_Xa*arg(QX8Sw_%-2)Y{v zy%z67+~Rxx!1w=Se;4k+fbbsq>A(RT!h0Kk!FIln$ot)mN62ye^%1<^BHMFrkq`d0 z=N=(_>l@?c-m zr}|_we(fe$%vZ2!`cw zYS=eVhrWrEbtO3kTpRi;m6wUS;?MEQbTz6xDe`MH@k>;5sj&|G+B+LhD5`-yumiYA z)cbOs8f2da$j=s1=5Ouukx9c~ZlXF|x{;%a9w=Gw=9;|KN~|BrOU4&COt@3Pna+vxY&=P)$gCg%~%YepOD q?(M))-RF9=WkWr%Y1?>i323Cx2s6>+rgmSrq-pwJIs+ED$KGGmnv)6u diff --git a/web/scans/migrations/__pycache__/0005_remove_subtakeover_takeover_subtakeover_subdomain_and_more.cpython-310.pyc b/web/scans/migrations/__pycache__/0005_remove_subtakeover_takeover_subtakeover_subdomain_and_more.cpython-310.pyc deleted file mode 100644 index 9ccfb7d5668a5f7b9ab98f7a05fb33cf16bf3726..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 851 zcmZuvy>rtr6qhVHv6H4Sb%K>UGt?Pi;Ev;jC%o_y zkF#?I9*D0D1oFA~A`$`el12V4?PZZ?dqp-iy2+}twwI{=U?kLnK9K6d8;bf60Xgew8PmD}Yy{r`4CIDo;5Pi4}BL!$P;h8G)7 zM9{BOP264#hr?&eV5tjK)2x{-$EwEEU{kBK*hRe@LsjUkRJsIJR0eII)*5|<)kico z*i@}{r&qaOTG{4sd;HyJF^W%bffPr=`r}-eSGEf{(aXFc*YqcqV>xYRkuV``GR`FH zPcqEG4w{<X3A5qZOq+G5{~B{_@WQFJy=sAfFZ>%bj~sQE zeobOiRZVO#t;$Jr9be-(Zmo}ZFC?Z*cutq#K-HeBTdDsnZoS-I#qr|CxYydDsGm{_ zm?z}D^gZrz`CCf&J>>Fky|Ss~eeknAH;SYUVXn(*bqwRygH~+Y+-X@?kLmw5c)51- N6?c#h`=NIv{s5l_k+1tw)+1JTc`8i_jOGvgYrsRO!-KPKyc+DDK z@J1AZvlk2mL?0PM$XC&asE8nb#iq$6?PXKpvu0UY)h&&7e$*Ji+Ld8x+d}&Ta(7Ar zS;2u70(ixX2;)@}8sKQP12H5px)88)v8ktjsi(Am{EvF}XT6GxG3*lIi66h*FeTcF zD-|(*I-AWNN{fv-Md|u^r%sTdmDgaqNEl3`mRc$evN0C@ZdscilylfQpQ!h}#n+cW zV}>5{TImx~NS(_Xb=4hDBg%qCQmY1iJj7(N#2WmlGN!80r>boirq|$ax7Ku6b_miv znD^xgc1P39r*b$grOzahfWAh;6Di;Jstz}X46sEFsLLffD~OLz4*{NsPL}=v^W$g7 z$_}~D4(kp#*tGcTIX^Lbmv#=}`((B3Mmi5 zoA3J+fPg1S z?PXr=XTC>X5b%M1B0z<=q#tQUfxRY4u)$oC_$IEaQs-Tzn$`?z1wyukg|5m74@ao& zXEcIj9+1ojukpR=8R6$;|(Ae2{pk?=FM9(1ToKh*&(0#DVL%t zyG25Eh)Fs^V}f}lBp9|*x-+a3#bWc-#%ZtRG>J{fT^r6#%yA~@aXfBDoWJXHXQXHV z)y%t-6lq8ui|OUFNRiLST)RH}8lJ9=T`+sF`k?Lm9H6#O(1gUN>^BR1&-d6b#_V_B yW9Qq*O=@j;nOXbhn^k4PIBA7OaB1kR_QYU diff --git a/web/scans/migrations/__pycache__/0008_screenshots_port.cpython-310.pyc b/web/scans/migrations/__pycache__/0008_screenshots_port.cpython-310.pyc deleted file mode 100644 index 3e2203482f7cf8edcc45056f90b24a67028ff95c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 625 zcmZuvJ#QN^4As}!XV(FW49Pzz;Kk>y9gHGHfp%(^X1YZo#aaArk&F@Kcs7Z zj@C|@yA=ph>e3<}LV$#xNbvDdaz{sFM)rGhU*B@ZekE{dioh9pctrsiaKl<&@>Z0B zvyTh}$R7+O@~ym zcIEcAc5UnIvv-vR-8zr)MW{WZ4I9^msCLeG7k6nzssGYc_L5Xp-?Txy?lx0NDCyF+ zYB8!lW~&AboYQoam%GK!}cC}=Hj^np5^#Laf!eR|^o zc{rw!Ga=Q3$^;jD!qt8L(C5#mDvn@V*|nR&vY)e`u1`Kq@_0I>J~3ZBDt=4fiiO1S HQk=-Yz+J4z diff --git a/web/scans/migrations/__pycache__/0009_rename_address_portscanactive_ip_and_more.cpython-310.pyc b/web/scans/migrations/__pycache__/0009_rename_address_portscanactive_ip_and_more.cpython-310.pyc deleted file mode 100644 index 6aa34dd2032d69bb797df902d224bf8920f3e30f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 829 zcmYjPJ8u**5caOUcYAx6BNWg>fwWCFq5=t_L`YDPf=9EBDV%qItWJQ{ zDI~L`<|r0fC;$Soi@*lN1~9xx2@eV9m@h^!f&#`cx!|x9qYCm&Z#@rgJTp;`VVBx_ z=tu8w`l2Q?+bHVq(fpL((GL}XL2G^5u1Zc5hN)z) zmgsZc!4~y+wm}7+JELNbGBrVJFJkb(I_1{((|3MU9bN_pWy_8`)qn9rz!SCZ9A#JM zvcrDn<|U7}%`~LQPFn21=W}gc$aeXhPLIlo&t)ttr7xtU*47;oo=Ew1t-25y7~l$f zz`kkGnl$7af56~;8?zEw>eDUB4*}w_p~u1e=*?W2O;~Z*IlRGoh1>u9iXrXiH%fmex@=cc^*{p|+*^G!;Y|)`+dVQl zOS$+bvNQ>wxZWV~SBSu;wYx15ViLs+1)0kP~Z#~q+Gh;SP7w4NN}POg413sXU9o4?ApQJw1q2N^F#P2 z`~xIkIpxT;QU&A9XNAH@9(nf7c%I*QRkxcEwBMcEeBLJHmkV|+z~Bs*d4>)M(2`U% zqm`F=l)NF}f&YzwFX+Vo>SsO#?@2FQU@qx-HmQn{=5?W@v8_shC{6C5T|2_y441h< zCrCztWFF8F&3q9|Lf1gFCT$2Hgch{tK1B0167NNly~x1|=|Fex?VNSEx8mMj5?U`}^>9RHg?in9nFqG*q7{C2GtN*9`(qiQGpe*ctdA$cVj2hhS8HdVTIFwg7a zQm|smxCB-yEo|4!2B0clNCQ|Najeaih7Vu-i}ZZ)12yjXHXM{(eza{6Lq02OoD;uh zrI4d~jA+tKin*3HXnZ&n#Aa*Kn`xX(KWq2@v~ZaLx`=pw@Fxg8>d`|Q-0t6X{)=vFlRJ6Q&!*o+ oFR(F8xE!ey7&LM;5;WlUy35^DaU9mWVQuSx7TWv}{sX^c zTPj-0lTI+}nfKn>&Sn`w`#ZfUKQcmohvL*CEH2R7Cm4VL)uf>XZCJr5 zc};);|3QE&y5-+_!6AA_=J6lAOXkdHOA%Yl2Fy3=W_P3q%*vVkBXW_hJMFLZVF!Sl#+uaBxZcB+6o-P*W z3n6u@skA6_z+24$_w!d8mD6ck1{E zKZ$!7wuNy0A$QhxdEjl<-Bpf!^FgNk?$vS>?EEO$i5@7|LB!LuQ*LLwuP*#KBlPw$ zhJ-L4{flBo8NElNn?p9mEV&+TF6WU?VJl5#&tNsM8kh{tJJg5l5l-~U^MgJwP)5>* JZTHy&{vWwP#ozz{ diff --git a/web/scans/migrations/__pycache__/0012_alter_webfullinfo_port_alter_webfullinfo_url.cpython-310.pyc b/web/scans/migrations/__pycache__/0012_alter_webfullinfo_port_alter_webfullinfo_url.cpython-310.pyc deleted file mode 100644 index 2f33ac2214ee124b13254947d5e7ffd8b0c0f08b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 694 zcmZuvJ8u&~5Z>20cNa&ZLLgBiU6V`F&>$3q6e`j{5nZ!d->lE)tar`cS%Dhb{0Cb8 z(zaBt=p#iidyZun9%)ANp800iot)$Z>F@ZlzF~y?iN-z&G%k_5*C>DhHDp06x?mNf z74)I6$kN|6v-NMNx?$CXdAEk*4ZwM77n!O>KNLm6pc&d?h1t_6$Mf; zpbf3K7V{(y(4+Yg2uNUb&*12O>rbEj+1B5%Duo;uI1bsT?POdd>1u@=yg55NKbOk% z+RHmVnXZg!?bJziz}M?~W}R_OtGjT#mmS2?`TolqaNF2=VwAl_pz516+BW^H;Almt z(yE0HVvxzS)dm7zd9(h7mDfcUQaNNu8FDGHVr4YyV=2F`l!;Z*0=?80w5@RukMBc5 z{yqY@iv2u~j_kusZS0V{rGC2iZ{lBIbPx|WrFX6`y{?^|_ID-XDTkYsy9X-qIu{2n z5#b(iBa+kQcOSpU2)TQOA|;HAeW|J1u<}GOr5}GMFpdxKo%6cm@nZaJTP9 Vdx;%G`gU`fcQg)()1I;C{6DQ9!7=~< diff --git a/web/scans/migrations/__pycache__/__init__.cpython-310.pyc b/web/scans/migrations/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 730a3e9b760a5637882556d211ce7b5554de5f99..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 140 zcmd1j<>g`kfg`kf`2@JlR@-j5P=LBfgA@QE@lA|DGb33nv8xc8Hzx{2;!Haeo=mYiGERP za(-S~Nx6P`YLb3&a$;Vweo1O>K~7>xYDr>xv3`7fW?p7Ve7s&kI^f`tJ9V2U6= diff --git a/web/scans/templatetags/__pycache__/dict_handler.cpython-310.pyc b/web/scans/templatetags/__pycache__/dict_handler.cpython-310.pyc deleted file mode 100644 index ac7d1362eced22d5160e8113bf8ca57500e21717..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 351 zcmYjL!Ait15X~grZjr8^A@pKS-b4hivKMc?NVb`+UE8K4k)^OV#qZcX`b)Wb@)tzG zNm*eZyf=Bno6MNS;*1e~^55#6>Sr{k5M*wN?t(-!4K3@S1Yihzeqg0Qu}L)7;*GsR zIn&7lD`sDmWd(%HV>h(QW5r|Vi1<#3cSM(y?AV^|K*NF4Fkgl^Dr)ov5A#k9L(|tm zJmdJm?407?SJE2erNzqhYrmCSTuE1{-pT)cUe!+Ort*(Zs@E;r%VD%}XQYb<qE@do1Wv6Zt75`GSU D#A;7a diff --git a/web/scans/templatetags/dict_handler.py b/web/scans/templatetags/dict_handler.py deleted file mode 100644 index f524dbb2..00000000 --- a/web/scans/templatetags/dict_handler.py +++ /dev/null @@ -1,6 +0,0 @@ -from django import template -register = template.Library() - -@register.filter -def get(mapping, key): - return mapping.get(key, '') \ No newline at end of file diff --git a/web/scans/tests.py b/web/scans/tests.py deleted file mode 100644 index 0503a808..00000000 --- a/web/scans/tests.py +++ /dev/null @@ -1,38 +0,0 @@ -from django.test import TestCase -from scans.tasks import * -from projects.models import Project -from scans.models import * -from time import sleep -from web.settings import BASE_DIR -import os - -class TestCase(TestCase): - def setUp(self): - Project.objects.create(number=3, domain='target.com.br', last_change=timezone.now(), command='') - Project.objects.create(number=4, domain='target.com.br', - last_change=timezone.now(), - command="('../reconftw', '-d','target.com.br', '-s', '-o', '/home/ubuntu/reconftw-hakai/Recon/target.com.br_v2')") - print('#######################') - - def test(self): - # print('aaaaaaaaaaaa') - # pobj = Project.objects.filter(domain='target.com.br')[0] - # print(f"==> PK: {pobj.number}") - # # print(pobj[0].id) - # # d = pobj[0].id - # c = pobj.command.split("'") - # del c[0::2] - # print(f"{c[-1]}/{pobj.domain}") - - # celery_task = subdomains_dns_f2db.delay(pobj.pk) - - # sleep(10) - - # sub_scan = SubdomainsDNS.objects.filter(pk=pobj.pk) - # print(sub_scan) - - l = os.listdir(f"{BASE_DIR.parent}/Recon/target.com.br/subdomains") - if 'subdomains.txt' in l: - print("has file") - - diff --git a/web/scans/urls.py b/web/scans/urls.py deleted file mode 100644 index 2778c0c6..00000000 --- a/web/scans/urls.py +++ /dev/null @@ -1,11 +0,0 @@ -from django.urls import path -from . import views - -# Namespace name -app_name = 'scans' - -# Be careful setting the name to just /login use userlogin instead! -urlpatterns=[ - path('results/', views.index, name='index'), - path('new', views.new_scan, name='new'), -] diff --git a/web/scans/utils.py b/web/scans/utils.py deleted file mode 100644 index 57bb4860..00000000 --- a/web/scans/utils.py +++ /dev/null @@ -1,1473 +0,0 @@ -from re import search, compile, sub -from .models import * -from pathlib import Path -from os import listdir -from itertools import groupby -from ast import literal_eval -from base64 import b64encode -import json -import time - - -def monitor(domain): - '''function to monitor scans statuses''' - stop = False - - while stop != True: - scans= Project.objects.filter(domain=domain) - if scans.count() >= 1: - allStatus = [] - - for scan in scans: - if scan.status == "SCANNING": - allStatus.append("SCANNING") - else: - allStatus.append("FINISHED") - - if "SCANNING" not in allStatus: - stop = True - else: - time.sleep(0.5) - else: - stop = True - - # print(stop) - - - -# FUNCTIONS THAT SCRAP FILES AND SAVE TO THE DATABASE -def files_to_db(type_scan, project_id): - """function to call files scrapers' functions and save to database""" - print('[+] saving to db [+]') - - if type_scan == '-r': # RECON - domaininfogeneral_f2db(project_id) - domaininfoname_f2db(project_id) - domaininfoemail_f2db(project_id) - domaininfoip_f2db(project_id) - emails_f2db(project_id) - dorks_f2db(project_id) - gitdorks_f2db(project_id) - softwareinfo_f2db(project_id) - authorsinfo_f2db(project_id) - metadataresults_f2db(project_id) - favicontest_f2db(project_id) - subdomains_dns_f2db(project_id) - subdomains_f2db(project_id) - s3buckets_f2db(project_id) - cloudasset_f2db(project_id) - zonetransfer_f2db(project_id) - subtakeover_f2db(project_id) - webprobes_f2db(project_id) - webfullinfo_uncommon_f2db(project_id) - webs_uncommon_ports_f2db(project_id) - webfullinfo_f2db(project_id) - screenshots_f2db(project_id) - portscanactive_f2db(project_id) - portscanpassive_f2db(project_id) - cdnproviders_f2db(project_id) - webwafs_f2db(project_id) - nucleioutputs_f2db(project_id) - cms_f2db(project_id) - fuzzingfull_f2db(project_id) - urlextract_f2db(project_id) - urlgf_f2db(project_id) - jschecks_f2db(project_id) - webdicts_f2db(project_id) - - elif type_scan == '-s': # SUBDOMAINS - subdomains_dns_f2db(project_id) - subdomains_f2db(project_id) - s3buckets_f2db(project_id) - cloudasset_f2db(project_id) - zonetransfer_f2db(project_id) - subtakeover_f2db(project_id) - webprobes_f2db(project_id) - webfullinfo_uncommon_f2db(project_id) - webfullinfo_f2db(project_id) - webs_uncommon_ports_f2db(project_id) - screenshots_f2db(project_id) - - - - elif type_scan == '-p': # PASSIVE - domaininfogeneral_f2db(project_id) - domaininfoname_f2db(project_id) - domaininfoemail_f2db(project_id) - domaininfoip_f2db(project_id) - emails_f2db(project_id) - dorks_f2db(project_id) - gitdorks_f2db(project_id) - softwareinfo_f2db(project_id) - authorsinfo_f2db(project_id) - metadataresults_f2db(project_id) - favicontest_f2db(project_id) - subdomains_f2db(project_id) - subdomains_dns_f2db(project_id) - portscanpassive_f2db(project_id) - cdnproviders_f2db(project_id) - webfullinfo_f2db(project_id) - - - - elif type_scan == '-w': # WEB - s3buckets_f2db(project_id) - cloudasset_f2db(project_id) - subtakeover_f2db(project_id) - webwafs_f2db(project_id) - nucleioutputs_f2db(project_id) - cms_f2db(project_id) - fuzzingfull_f2db(project_id) - urlextract_f2db(project_id) - urlgf_f2db(project_id) - jschecks_f2db(project_id) - webdicts_f2db(project_id) - vulns_f2db(project_id) - - elif type_scan == '-n': # OSINT - domaininfoemail_f2db(project_id) - domaininfogeneral_f2db(project_id) - domaininfoip_f2db(project_id) - domaininfoname_f2db(project_id) - ipsinfos_f2db(project_id) - emails_f2db(project_id) - dorks_f2db(project_id) - gitdorks_f2db(project_id) - metadataresults_f2db(project_id) - zonetransfer_f2db(project_id) - favicontest_f2db(project_id) - - elif type_scan == '-a': # ALL - domaininfogeneral_f2db(project_id) - domaininfoname_f2db(project_id) - domaininfoemail_f2db(project_id) - domaininfoip_f2db(project_id) - emails_f2db(project_id) - dorks_f2db(project_id) - gitdorks_f2db(project_id) - softwareinfo_f2db(project_id) - authorsinfo_f2db(project_id) - metadataresults_f2db(project_id) - favicontest_f2db(project_id) - subdomains_dns_f2db(project_id) - subdomains_f2db(project_id) - s3buckets_f2db(project_id) - cloudasset_f2db(project_id) - zonetransfer_f2db(project_id) - subtakeover_f2db(project_id) - webprobes_f2db(project_id) - webs_uncommon_ports_f2db(project_id) - webfullinfo_f2db(project_id) - webfullinfo_uncommon_f2db(project_id) - screenshots_f2db(project_id) - portscanactive_f2db(project_id) - portscanpassive_f2db(project_id) - cdnproviders_f2db(project_id) - webwafs_f2db(project_id) - nucleioutputs_f2db(project_id) - cms_f2db(project_id) - fuzzingfull_f2db(project_id) - urlextract_f2db(project_id) - urlgf_f2db(project_id) - jschecks_f2db(project_id) - webdicts_f2db(project_id) - vulns_f2db(project_id) - - print('[+] finished saving to db [+]') - - - -def subdomains_dns_f2db(project_id): - print("[+] subdomains_dns: saving to db [+]") - subdomains_save = SubdomainsDNS.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/subdomains/subdomains_dnsregs.json" - - if Path(file_path).is_file(): - with open(file_path) as f: - subs = f.readlines() - - for index, s in enumerate(subs): - print(f"[+] saving {index} of {len(subs)}") - j = json.loads(s.rstrip()) - subdomains_save.create(host=j['host'], - resolver=j['resolver'], - cname=j.get('cname', 'N/A'), - a_record=j.get('a', 'N/A'), - aaaa_record=j.get('aaaa', 'N/A'), - mx_record=j.get('mx', 'N/A'), - soa_record=j.get('soa', 'N/A'), - ns_record=j.get('ns', 'N/A'), - internal_ips_record=j.get('internal_ips', 'N/A'), - project_id=project_id - ) - else: - print("does not exist") - print("[+] subdomains_dns: finished saving!! [+]") - - - -def s3buckets_f2db(project_id): - print("[+] s3buckets: saving to db [+]") - s3buckets_save = S3Buckets.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/subdomains/s3buckets.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - s3 = f.readlines() - - for s in s3: - if '|' in s: - j = s.rstrip().split('|') - s3buckets_save.create(url=j[0], - bucket_exists=(True if 'bucket_exists' in j[1] else False), - auth_users=j[2].split(',')[0].split(':')[1].lstrip(), - all_users=j[2].split(',')[1].split(':')[1].lstrip(), - project_id=project_id - ) - print("[+] s3buckets: finished saving!! [+]") - - - -def webfullinfo_f2db(project_id): - print("[+] webfullinfo: saving to db [+]") - webfullinfo_save = WebFullInfo.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/webs/web_full_info.txt" - - filep = Path(file_path) - - if filep.is_file(): - - filep.write_text(filep.read_text().replace('}\n{', '},{')) - - with open(file_path, 'r+') as f: - content = f.read() - f.seek(0) - f.write('['+content) - c2 = f.read() - f.write(c2+']') - - f = open(file_path).read() - wfi = json.loads(f) - - for w in wfi: - webfullinfo_save.create(url=w.get('url', 'N/A').split('/')[2].split(':')[0], - port=w.get('port', 'N/A'), - technologies=w.get('technologies', 'N/A'), - a=w.get('a', 'N/A'), - location=w.get('location', 'N/A'), - webserver=w.get('webserver', 'N/A'), - method=w.get('method', 'N/A'), - host_ip=w.get('host_ip', 'N/A'), - status_code=w.get('status-code', 'N/A'), - tls_grab=w.get('tls-grab', 'N/A'), - project_id=project_id - ) - print("[+] webfullinfo: finished saving!! [+]") - - - -def webfullinfo_uncommon_f2db(project_id): - print("[+] webfullinfo_uncommon: saving to db [+]") - webfullinfo_save = WebFullInfoUncommon.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/webs/web_full_info_uncommon.txt" - filep = Path(file_path) - - if filep.is_file(): - - filep.write_text(filep.read_text().replace('}\n{', '},{')) - - with open(file_path, 'r+') as f: - content = f.read() - f.seek(0) - f.write('['+content) - c2 = f.read() - f.write(c2+']') - - f = open(file_path).read() - wfi = json.loads(f) - - for w in wfi: - webfullinfo_save.create(url=w.get('url', 'N/A').split('/')[2].split(':')[0], - port=w.get('port', 'N/A'), - tech=w.get('tech', 'N/A'), - ip=w.get('host', 'N/A'), - project_id=project_id) - print("[+] webfullinfo_uncommon: finished saving!! [+]") - - - -def cloudasset_f2db(project_id): - - cloudasset_save = CloudAssets.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/subdomains/cloud_assets.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - ca = f.readlines() - - for i in ca: - if 'Protected' in i: - protected_s3 = i.split(': ')[-1].strip() - else: - protected_s3 = 'N/A' - - if 'App Found' in i: - appfound = i.split(': ')[-1].strip() - else: - appfound = 'N/A' - - if 'Storage Account' in i: - storage_acc = i.split(': ')[-1].strip() - else: - storage_acc = 'N/A' - - if 'Azure' in i: - azure = i.split(': ')[-1].strip() - else: - azure = 'N/A' - - if 'Google' in i: - google = i.split(': ')[-1].strip() - else: - google = 'N/A' - - - cloudasset_save.create(protected_s3bucket=protected_s3, appfound=appfound, storage_account=storage_acc, azure=azure, google=google, project_id=project_id) - - - -def domaininfoip_f2db(project_id): - - domaininfoip_save = DomainInfoIP.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/osint/domain_info_ip.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - dip = f.read() - - domaininfoip_save.create(domain_info_ip=dip, project_id=project_id) - - - -def portscanpassive_f2db(project_id): - - portscanpassive_save = PortscanPassive.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/hosts/portscan_passive.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - passive = f.readlines() - - d = [] - dc = {} - for i in passive: - if search(r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})', i): - if search(r"\(.*.\)", i): - l = i.split(' ') - dc['ip'] = l[0].rstrip() - dc['host'] = l[1].rstrip().strip('()') - else: - dc['ip'] = i.rstrip() - dc['host'] = 'N/A' - - elif 'Ports' in i: - dc['ports'] = i.strip().split(':')[1].split(', ') - - elif 'Tags' in i: - dc['tags'] = i.strip().split(':')[1].strip() - - elif 'CPEs' in i: - dc['cpes'] = (', '+i.strip().split('CPEs: ')[1]).split(', cpe:/a:')[1::] - - elif search(r'^\n', i): - d.append(dc) - dc = {} - - for j in d: - portscanpassive_save.create(ip=j.get('ip', 'N/A'), - host=j.get('host', 'N/A'), - ports=j.get('ports', 'N/A'), - tags=j.get('tags', 'N/A'), - project_id=project_id - ) - - - -def portscanactive_f2db(project_id): - - portscanactive_save = PortscanActive.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/hosts/portscan_active.gnmap" - - if Path(file_path).is_file(): - with open(file_path) as f: - psa = f.readlines() - - for i in psa[1:-1:2]: - addr = i.strip().split(': ')[1].split(' ')[0] - status = i.strip().split(': ')[2] - hostname = i.split(' (')[1].split(')')[0] - openports = [] - for op in psa[psa.index(i)+1].split('\t')[1].split(': ')[1].split(', '): - openports.append(op.strip('/').split('//')) - - portscanactive_save.create(ip=addr, hostname=hostname, status=status, - openports=openports, project_id=project_id) - - - -def dorks_f2db(project_id): - - dorks_save = Dorks.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/osint/dorks.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - dorks = f.read() - - dorks_save.create(dorks=dorks, project_id=project_id) - - - -def gitdorks_f2db(project_id): - - gitdorks_save = GitDorks.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/osint/gitdorks.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - gitdorks = f.read() - - gitdorks_save.create(git_dorks=gitdorks, project_id=project_id) - - - -def fuzzingfull_f2db(project_id): - - fuzzingfull_save = FuzzingFull.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/fuzzing/fuzzing_full.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - fuzzinfull = f.readlines() - - fuzz_list = [] - - for i in fuzzinfull[:-1:]: - row = [] - row.append(i.split(' ')[0]) - row.append(i.split(' ')[1]) - row.append(i.split(' ')[2].strip()) - fuzz_list.append(row) - - fuzzingfull_save.create(fuzzing_full=fuzz_list, project_id=project_id) - - - -def subdomains_f2db(project_id): - - subdomains_save = Subdomains.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/subdomains/subdomains.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - subdomains = f.read() - - subdomains_save.create(subdomains=subdomains, project_id=project_id) - - - -def domaininfoname_f2db(project_id): - - domaininfo_name_save = DomainInfoName.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/osint/domain_info_name.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - din = f.read() - - domaininfo_name_save.create(domain_info_name=din, project_id=project_id) - - - -def domaininfogeneral_f2db(project_id): - - domaininfo_general_save = DomainInfoGeneral.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/osint/domain_info_general.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - dig = f.read() - - domaininfo_general_save.create(domain_info_general=dig, project_id=project_id) - - - -def domaininfoemail_f2db(project_id): - - domaininfo_email_save = DomainInfoEmail.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/osint/domain_info_email.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - die = f.read() - - domaininfo_email_save.create(domain_info_email=die, project_id=project_id) - - - -def emails_f2db(project_id): - - emails_save = Emails.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/osint/emails.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - emails = f.read() - - emails_save.create(emails=emails, project_id=project_id) - - - -def softwareinfo_f2db(project_id): - - softwareinfo_save = SoftwareInfo.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/osint/software.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - soft = f.read() - - softwareinfo_save.create(software_info=soft, project_id=project_id) - - - -def authorsinfo_f2db(project_id): - - authorsinfo_save = AuthorsInfo.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/osint/authors.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - authors = f.read() - - authorsinfo_save.create(authors_info=authors, project_id=project_id) - - - - -def metadataresults_f2db(project_id): - - metadataresults_save = MetadataResults.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/osint/metadata_result.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - metadata = f.read() - - metadataresults_save.create(metadata_results=metadata, project_id=project_id) - - - -def zonetransfer_f2db(project_id): - - zonetransfer_save = Zonetransfer.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/subdomains/zonetransfer.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - zt = f.read() - - zonetransfer_save.create(zonetransfer=zt, project_id=project_id) - - - -def favicontest_f2db(project_id): - - favicontest_save = Favicontest.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/hosts/favicontest.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - favicontest = f.read() - - favicontest_save.create(favicontest=favicontest, project_id=project_id) - - - -def subtakeover_f2db(project_id): - - subtakeover_save = SubTakeover.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/webs/takeover.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - subtakeover = f.readlines() - - for s in subtakeover: - type_takeover = s.split('] ')[1].strip('[') - subdomain = s.split('] ')[-1] - subtakeover_save.create(type_takeover=type_takeover, subdomain=subdomain, project_id=project_id) - - - -def screenshots_f2db(project_id): - - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - ss_path = f"{path[-1]}/{project_obj[0].domain}/screenshots" - - if Path(ss_path).is_dir() and len(listdir(ss_path)) > 0: - ss_list = listdir(ss_path) - - for s in ss_list: - with open(f"{ss_path}/{s}", 'rb') as f: - img = f.read() - - hn = sub(r'https?-', '',s.replace('.png','')) - - i = hn.rfind('-') - - if '-' in hn and i > 0 and hn[i+1::].isnumeric(): - hn = f"{hn[:i:]}:{hn[i+1::]}" - - ScreenShots.objects.create(hostname=hn, screenshot=img, project_id=project_id) - - - -def webprobes_f2db(project_id): - - webprobes_save = WebProbes.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/webs/webs.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - webs = f.read() - - webprobes_save.create(webprobes=webs, project_id=project_id) - - - -def webwafs_f2db(project_id): - - webwafs_save = WebProbes.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/webs/web_wafs.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - webwafs = f.read() - - webwafs_save.create(webwafs=webwafs, project_id=project_id) - - - -def nucleioutputs_f2db(project_id): - - nucleioutputs_save = NucleiOutputs.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - nuclei_path = f"{path[-1]}/{project_obj[0].domain}/nuclei_output" - - if Path(nuclei_path).is_dir() and len(listdir(nuclei_path)) > 0: - - ld = listdir(nuclei_path) - - severities = ['info','low','medium','high','critical'] - - j = {} - - for s in severities: - sev_list = [] - if f'{s}.txt' in ld: - with open (f"{nuclei_path}/{s}.txt") as f: - raw_list = f.readlines() - - for i in raw_list: - sev_list.append(sub(r'\[', '', i.strip()).split(']')) - else: - sev_list.append('N/A') - - j[f'{s}'] = sev_list - - nucleioutputs_save.create(info=j.get('info', 'N/A'), low=j.get('low', 'N/A'), - medium=j.get('medium', 'N/A'), high=j.get('high', 'N/A'), - critical=j.get('critical', 'N/A'), project_id=project_id) - - - -def urlgf_f2db(project_id): - - urlgf_save = URLgf.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - gf_path = f"{path[-1]}/{project_obj[0].domain}/gf" - - if Path(gf_path).is_dir() and len(listdir(gf_path)) > 0: - - ld = listdir(gf_path) - - if 'xss.txt' in ld: - with open(f"{gf_path}/xss.txt") as f: - xss = f.readlines() - f.close() - else: - xss = 'N/A' - - if 'ssti.txt' in ld: - with open(f"{gf_path}/ssti.txt") as f: - ssti = f.readlines() - f.close() - else: - ssti = 'N/A' - - if 'ssrf.txt' in ld: - with open(f"{gf_path}/ssrf.txt") as f: - ssrf = f.readlines() - f.close() - else: - ssrf = 'N/A' - - if 'sqli.txt' in ld: - with open(f"{gf_path}/sqli.txt") as f: - sqli = f.readlines() - f.close() - else: - sqli = 'N/A' - - if 'redirect.txt' in ld: - with open(f"{gf_path}/redirect.txt") as f: - redirect = f.readlines() - f.close() - else: - redirect = 'N/A' - - if 'rce.txt' in ld: - with open(f"{gf_path}/rce.txt") as f: - rce = f.readlines() - f.close() - else: - rce = 'N/A' - - if 'potential.txt' in ld: - with open(f"{gf_path}/potential.txt") as f: - potential = f.readlines() - f.close() - else: - potential = 'N/A' - - if 'endpoints.txt' in ld: - with open(f"{gf_path}/endpoints.txt") as f: - endpoints = f.readlines() - f.close() - else: - endpoints = 'N/A' - - if 'lfi.txt' in ld: - with open(f"{gf_path}/lfi.txt") as f: - lfi = f.read() - f.close() - else: - lfi = 'N/A' - else: - xss = ssti = ssrf = sqli = redirect = rce = potential = endpoints = lfi = 'N/A' - - urlgf_save.create(xss=xss, ssti=ssti, ssrf=ssrf, sqli=sqli, - redirect=redirect, rce=rce, potential=potential, endpoints=endpoints, - lfi=lfi, project_id=project_id) - - - -def vulns_f2db(project_id): - - vulns_save = Vulns.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - vulns_path = f"{path[-1]}/{project_obj[0].domain}/vulns" - - if Path(vulns_path).is_dir() and len(listdir(vulns_path)) > 0: - - ld = listdir(vulns_path) - - if 'brokenLinks.txt' in ld: - with open(f"{vulns_path}/brokenLinks.txt") as f: - brokenlinks = [x[:-1] for x in f.readlines()[:-1:]] - f.close() - else: - brokenlinks = 'N/A' - - if 'xss.txt' in ld: - with open(f"{vulns_path}/xss.txt") as f: - xss = f.read() - f.close() - else: - xss = 'N/A' - - if 'cors.txt' in ld: - with open(f"{vulns_path}/cors.txt") as f: - cors = f.read() - else: - cors = 'N/A' - - if 'redirect.txt' in ld: - with open(f"{vulns_path}/redirect.txt") as f: - redirect = f.read() - f.close() - else: - redirect = 'N/A' - - if 'ssrf_requested_url.txt' in ld: - with open(f"{vulns_path}/ssrf_requested_url.txt") as f: - ssrf_requested_url = f.read() - f.close() - else: - ssrf_requested_url = 'N/A' - - if 'ssrf_requested_headers.txt' in ld: - with open(f"{vulns_path}/ssrf_requested_headers.txt") as f: - ssrf_requested_headers = f.read() - f.close() - else: - ssrf_requested_headers = 'N/A' - - if 'ssrf_callback.txt' in ld: - with open(f"{vulns_path}/ssrf_callback.txt") as f: - ssrf_callback = f.read() - f.close() - else: - ssrf_callback = 'N/A' - - if 'crlf.txt' in ld: - with open(f"{vulns_path}/crlf.txt") as f: - crlf = f.read() - f.close() - else: - crlf = 'N/A' - - if 'lfi.txt' in ld: - with open(f"{vulns_path}/lfi.txt") as f: - lfi = f.read() - f.close() - else: - lfi = 'N/A' - - if 'ssti.txt' in ld: - with open(f"{vulns_path}/ssti.txt") as f: - ssti = f.read() - f.close() - else: - ssti = 'N/A' - - if 'testssl.txt' in ld: - with open(f"{vulns_path}/testssl.txt") as f: - testssl = f.read() - f.close() - else: - testssl = 'N/A' - - if 'command_injection.txt' in ld: - with open(f"{vulns_path}/command_injection.txt") as f: - rcommand_injectionce = f.read() - f.close() - else: - command_injection = 'N/A' - - if 'prototype_pollution.txt' in ld: - with open(f"{vulns_path}/prototype_pollution.txt") as f: - prototype_pollution = f.read() - f.close() - else: - prototype_pollution = 'N/A' - - if 'smuggling.txt' in ld: - with open(f"{vulns_path}/smuggling.txt") as f: - - urls = {'method': '', 'endpoint': '', 'cookies': ''} - - url = '' - - for line in f.readlines(): - if "[+] url" in line.lower(): - url = line.split(":", 1)[-1].replace("\n", "").replace(" ", "") - urls[url] ={} - - - if "[+] method" in line.lower(): - urls['method'] = line.split(":", 1)[-1].replace("\n", "").replace(" ", "") - - if "[+] endpoint" in line.lower(): - urls['endpoint'] = line.split(":", 1)[-1].replace("\n", "").replace(" ", "") - - - if "[+] cookies" in line.lower(): - urls['cookies'] = line.split(":", 1)[-1].replace("\n", "").replace(" ", "") - - - elif " ok " in line.lower() or "DISCONNECTED".lower() in line.lower(): - if line.replace(" ", "") != "": - var = line.split(" ")[0].replace(":", "").replace("[", "").replace("]", "").replace("\n", "") - urls[url][var] = line.split(":", 1)[-1].replace(" ", "", 1).replace("\n", "") - - - smuggling = str(json.dumps(urls)) - - f.close() - else: - smuggling = 'N/A' - - if 'webcache.txt' in ld: - with open(f"{vulns_path}/webcache.txt") as f: - webcache = f.read() - f.close() - else: - webcache = 'N/A' - - - vulns_save.create(brokenlinks=brokenlinks, xss=xss, cors=cors, redirect=redirect, - ssrf_requested_url=ssrf_requested_url, ssrf_requested_headers=ssrf_requested_headers, - ssrf_callback=ssrf_callback, crlf=crlf, lfi=lfi, ssti=ssti, testssl=testssl, - command_injection=command_injection, prototype_pollution=prototype_pollution, - smuggling=smuggling, webcache=webcache, project_id=project_id) - - - -def webs_uncommon_ports_f2db(project_id): - - webs_unc_p_save = WebsUncommonPorts.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/webs/webs_uncommon_ports.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - wup = f.readlines() - - w = [] - for i in wup[:-1:]: - w.append(sub(r'https?:\/\/', '', i.strip())) - - w.sort() - keyf = lambda text: text.split(":")[0] - sorted_list = [list(items) for gr, items in groupby(sorted(w), key=keyf)] - - for s in sorted_list: - ports = [] - for sn in s: - ports.append(sn.split(':')[1]) - - host = s[0].split(':')[0] - webs_unc_p_save.create(host=host, ports=ports, project_id=project_id) - - - -def webdicts_f2db(project_id): - - webdicts_save = WebDicts.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - webdicts_path = f"{path[-1]}/{project_obj[0].domain}/webs" - - if Path(webdicts_path).is_dir() and len(listdir(webdicts_path)) > 0: - - ld = listdir(webdicts_path) - - if 'dict_params.txt' in ld: - with open(f"{webdicts_path}/dict_params.txt") as f: - dict_params = [x[:-1] for x in f.readlines()] - f.close() - else: - dict_params = ['N/A'] - - if 'dict_values.txt' in ld: - with open(f"{webdicts_path}/dict_values.txt") as f: - dict_values = [x[:-1] for x in f.readlines()] - f.close() - else: - dict_values = ['N/A'] - - if 'dict_words.txt' in ld: - with open(f"{webdicts_path}/dict_words.txt") as f: - dict_words = [x[:-1] for x in f.readlines()] - f.close() - else: - dict_words = ['N/A'] - - if 'all_paths.txt' in ld: - with open(f"{webdicts_path}/all_paths.txt") as f: - all_paths = [x[:-1] for x in f.readlines()] - f.close() - else: - all_paths = ['N/A'] - - if 'password_dict.txt' in ld: - with open(f"{webdicts_path}/password_dict.txt") as f: - password_dict = f.read() - f.close() - else: - password_dict = 'N/A' - - - webdicts_save.create(dict_params=dict_params, dict_values=dict_values, dict_words=dict_words, - all_paths=all_paths, password_dict=password_dict, project_id=project_id) - - - -def urlextract_f2db(project_id): - - urlextract_save = URLExtract.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/webs/url_extract.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - urle = f.read() - - urlextract_save.create(url_extract=urle, project_id=project_id) - - - -def urlextract_f2db(project_id): - - urlextract_save = URLExtract.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/webs/url_extract.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - urle = f.read() - - urlextract_save.create(url_extract=urle, project_id=project_id) - - - -def cdnproviders_f2db(project_id): - - cdnprov_save = CDNProviders.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/hosts/cdn_providers.txt" - - if Path(file_path).is_file(): - with open(file_path) as f: - cdnp = f.read() - - cdnprov_save.create(cdn_providers=cdnp, project_id=project_id) - - - -def jschecks_f2db(project_id): - - jschecks_save = JSChecks.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - jschecks_path = f"{path[-1]}/{project_obj[0].domain}/js" - - if Path(jschecks_path).is_dir() and len(listdir(jschecks_path)) > 0: - - ld = listdir(jschecks_path) - - files = ['js_livelinks','url_extract_js','js_endpoints','js_secrets'] - - j = {} - - for s in files: - js_list = [] - if f'{s}.txt' in ld: - with open (f"{jschecks_path}/{s}.txt") as f: - raw_list = f.readlines() - - if 'secrets' not in s: - for i in raw_list: - js_list.append(i.strip()) - else: - for i in raw_list: - js_list.append(sub(r'\[', '', i.strip()).split('] ')) - - else: - js_list.append('N/A') - - j[f'{s}'] = js_list - - - jschecks_save.create(js_livelinks=j.get('js_livelinks','N/A'), url_extract_js=j.get('url_extract_js','N/A'), - js_endpoints=j.get('js_endpoints','N/A'), js_secrets=j.get('js_secrets','N/A'), project_id=project_id) - - - -def ipsinfos_f2db(project_id): - - ipsinfos_save = IPsInfos.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - ipsinfos_path = f"{path[-1]}/{project_obj[0].domain}/osint" - - c1 = compile(r'ip_.*._relations\.txt') - c2 = compile(r'ip_.*.whois\.txt') - c3 = compile(r'ip_.*._location\.txt') - - if Path(ipsinfos_path).is_dir() and len(listdir(ipsinfos_path)) > 0: - - ld = listdir(ipsinfos_path) - - if any(c1.search(i) for i in ld): - with open(f"{ipsinfos_path}/ip_domain_relations.txt") as f: - ip_domain_relations = f.read() - f.close() - else: - ip_domain_relations = 'N/A' - - if any(c2.search(i) for i in ld): - with open(f"{ipsinfos_path}/ip_domain_whois.txt") as f: - ip_domain_whois = f.read() - f.close() - else: - ip_domain_whois = 'N/A' - - if any(c3.search(i) for i in ld): - with open(f"{ipsinfos_path}/ip_domain_location.txt") as f: - ip_domain_location = f.read() - f.close() - else: - ip_domain_location = 'N/A' - - ipsinfos_save.create(ip_domain_relations=ip_domain_relations, ip_domain_whois=ip_domain_whois, - ip_domain_location=ip_domain_location, project_id=project_id) - - - -def osintusersinfo_f2db(project_id): - - osintusers_save = OSINTUsersInfo.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - osintusers_path = f"{path[-1]}/{project_obj[0].domain}/js" - - if Path(osintusers_path).is_dir() and len(listdir(osintusers_path)) > 0: - - ld = listdir(osintusers_path) - - if 'emails.txt' in ld: - with open(f"{osintusers_path}/emails.txt") as f: - emails = f.read() - f.close() - else: - emails = 'N/A' - - if 'users.txt' in ld: - with open(f"{osintusers_path}/users.txt") as f: - users = f.read() - f.close() - else: - users = 'N/A' - - if 'passwords.txt' in ld: - with open(f"{osintusers_path}/passwords.txt") as f: - passwords = f.read() - f.close() - else: - passwords = 'N/A' - - if 'employees.txt' in ld: - with open(f"{osintusers_path}/employees.txt") as f: - employees = f.read() - f.close() - else: - employees = 'N/A' - - if 'linkedin.txt' in ld: - with open(f"{osintusers_path}/linkedin.txt") as f: - linkedin = f.read() - f.close() - else: - linkedin = 'N/A' - - - osintusers_save.create(emails=emails, users=users, passwords=passwords, employees=employees, - linkedin=linkedin, project_id=project_id) - - - -def githubsecrets_f2db(project_id): - - githubsecrets_save = GithubCompanySecrets.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - file_path = f"{path[-1]}/{project_obj[0].domain}/osint/github_company_secrets.json" - - if Path(file_path).is_file(): - with open(file_path) as f: - ghs = f.read() - - githubsecrets_save.create(github_secrets=ghs, project_id=project_id) - - - -def cms_f2db(project_id): - - cms_save = CMS.objects - project_obj = Project.objects.filter(pk=project_id) - - path = project_obj[0].command.split("'") - del path[0::2] - - cms_path = f"{path[-1]}/{project_obj[0].domain}/cms/" - - if Path(cms_path).is_dir() and len(listdir(cms_path)) > 0: - cms_files = listdir(cms_path) - - for s in cms_files: - with open(s) as f: - cms = f.read() - - cms_save.create(subdomain=s, cms=cms, project_id=project_id) - - else: - cms_save.create(subdomain='N/A', cms='N/A', project_id=project_id) - - - -def subdomains_context(project_id): - - subs_context = [] - - for s in SubdomainsDNS.objects.filter(project_id=project_id).order_by('host'): - j = {} - - subd = s.host - ipaddr = s.a_record - ports = [] - if WebsUncommonPorts.objects.filter(project_id=project_id, host=subd).exists(): - ports += literal_eval(WebsUncommonPorts.objects.filter(project_id=project_id, host=subd).values('ports').get()['ports']) - if WebFullInfo.objects.filter(project_id=project_id, url=subd).exists(): - ports.append(str(literal_eval(WebFullInfo.objects.filter(project_id=project_id, url=subd).values('port').first()['port']))) - if SubTakeover.objects.filter(project_id=project_id, subdomain=subd).exists(): - subtakeover = SubTakeover.objects.filter(project_id=project_id, subdomain=subd).values('type_takeover').get()['type_takeover'] - else: - subtakeover = 'NO' - - j['subdomain'] = subd - j['ip_address'] = ipaddr - j['ports'] = ports - j['subtakeover'] = subtakeover - - subs_context.append(j) - - return subs_context - - - -def screenshots_context(number): - ss = [] - for i in ScreenShots.objects.filter(project_id=number): - s = [] - s.append(i.hostname) - s.append(i.port) - s.append(b64encode(i.screenshot).decode('utf-8')) - if WebFullInfo.objects.filter(project_id=number, url=i.hostname, port=i.port).exists(): - s.append(literal_eval(WebFullInfo.objects.filter(project_id=number, url=i.hostname, port=i.port).values('technologies').get()['technologies'])[0]) - elif WebFullInfoUncommon.objects.filter(project_id=number, url=i.hostname, port=i.port).exists(): - s.append(literal_eval(WebFullInfoUncommon.objects.filter(project_id=number, url=i.hostname, port=i.port).values('tech').get()['tech'])[0]) - else: - s.append('N/A') - ss.append(s) - - return ss - - -def delete_results(project_id): - SubdomainsDNS.objects.filter(project_id=project_id).delete() - S3Buckets.objects.filter(project_id=project_id).delete() - WebFullInfo.objects.filter(project_id=project_id).delete() - CloudAssets.objects.filter(project_id=project_id).delete() - PortscanActive.objects.filter(project_id=project_id).delete() - PortscanPassive.objects.filter(project_id=project_id).delete() - GitDorks.objects.filter(project_id=project_id).delete() - Dorks.objects.filter(project_id=project_id).delete() - FuzzingFull.objects.filter(project_id=project_id).delete() - Subdomains.objects.filter(project_id=project_id).delete() - DomainInfoEmail.objects.filter(project_id=project_id).delete() - DomainInfoGeneral.objects.filter(project_id=project_id).delete() - DomainInfoIP.objects.filter(project_id=project_id).delete() - DomainInfoName.objects.filter(project_id=project_id).delete() - Emails.objects.filter(project_id=project_id).delete() - SoftwareInfo.objects.filter(project_id=project_id).delete() - AuthorsInfo.objects.filter(project_id=project_id).delete() - MetadataResults.objects.filter(project_id=project_id).delete() - Zonetransfer.objects.filter(project_id=project_id).delete() - Favicontest.objects.filter(project_id=project_id).delete() - SubTakeover.objects.filter(project_id=project_id).delete() - ScreenShots.objects.filter(project_id=project_id).delete() - WebProbes.objects.filter(project_id=project_id).delete() - WebFullInfoUncommon.objects.filter(project_id=project_id).delete() - WebWafs.objects.filter(project_id=project_id).delete() - NucleiOutputs.objects.filter(project_id=project_id).delete() - URLgf.objects.filter(project_id=project_id).delete() - Vulns.objects.filter(project_id=project_id).delete() - WebsUncommonPorts.objects.filter(project_id=project_id).delete() - WebDicts.objects.filter(project_id=project_id).delete() - URLExtract.objects.filter(project_id=project_id).delete() - CDNProviders.objects.filter(project_id=project_id).delete() - JSChecks.objects.filter(project_id=project_id).delete() - IPsInfos.objects.filter(project_id=project_id).delete() - OSINTUsersInfo.objects.filter(project_id=project_id).delete() - GithubCompanySecrets.objects.filter(project_id=project_id).delete() - CMS.objects.filter(project_id=project_id).delete() \ No newline at end of file diff --git a/web/scans/views.py b/web/scans/views.py deleted file mode 100644 index bc99cdb8..00000000 --- a/web/scans/views.py +++ /dev/null @@ -1,448 +0,0 @@ -# Create your views here. -from web.celery import app -from django.shortcuts import render,redirect -from django.contrib.auth.decorators import login_required -from projects.models import Project -from scans.models import * -import validators -from scans.tasks import * -from editprofile.imgUser import imgUser -import base64 -from ast import literal_eval -from json import loads - - -@login_required(login_url='/login/') -def index(request, number): - - - imagePath = imgUser(request.user.id) - - target = Project.objects.get(id=number) - scan_subdomains = SubdomainsDNS.objects.all() - - context = { - "imagePath": imagePath, - "title_domain_target": str(target).upper(), - "domain_target": str(target), - "scan_subdomains": scan_subdomains, - "status":target.status, - } - - command = str(target.command).split("'") - del command[0::2] - - type_scan = command[3] - - - if type_scan == '-r': # RECON - domain_info_general = DomainInfoGeneral.objects.filter(project_id=number).last() - context['domain_info_general'] = [] if domain_info_general == None else domain_info_general.domain_info_general.splitlines() - # context['domain_info_email'] = DomainInfoEmail.objects.filter(project_id=number).last() - # context['domain_info_ip'] = DomainInfoIP.objects.filter(project_id=number).last() - context['domain_info_name'] = DomainInfoName.objects.filter(project_id=number).last() - context['osintusersinfo'] = OSINTUsersInfo.objects.filter(project_id=number) - metadatas = MetadataResults.objects.filter(project_id=number).last() - context['metadata_results'] = [] if metadatas == None else metadatas.metadata_results.splitlines() - emails = Emails.objects.filter(project_id=number).last() - context['emails'] = [] if emails == None else emails.emails.splitlines() - context['google_dorks'] = Dorks.objects.filter(project_id=number).last() - git_dorks = GitDorks.objects.filter(project_id=number).last() - context['git_dorks'] = [] if git_dorks == None else git_dorks.git_dorks.splitlines() - software_infos = SoftwareInfo.objects.filter(project_id=number).last() - context['software_infos'] = "" if software_infos == None else software_infos.software_info - context['software_infos_count'] = len(context['software_infos'].splitlines()) - context['metadata_results_count'] = 0 - context['domain_info_general_count'] = 0 - context['google_dorks_count'] = 0 - context['git_dorks_count'] = 0 - context['osintusersinfouser_count'] = 0 - context['osintusersinfopassword_count'] = 0 - - for line in context['git_dorks']: - if "Too many errors, auto stop" not in git_dorks.git_dorks: - if line != "" and str(context['title_domain_target']).lower() in line.lower(): - context['git_dorks_count'] += 1 - - for line in [] if context['google_dorks'] == None else context['google_dorks'].dorks.splitlines(): - if line != "": - if "http" in line and line[0] != "#": - context['google_dorks_count'] += 1 - - for line in context['metadata_results']: - if "URL: " in line: - context['metadata_results_count'] += 1 - - for info in context['domain_info_general']: - if info != "": - if info[0] != "%" and info[0] != ";": - context['domain_info_general_count'] += 1 - - for info in context['osintusersinfo']: - if info.users != "": - context['osintusersinfouser_count'] += 1 - if info.passwords != "": - context['osintusersinfopassword_count'] += 1 - - context['authors_infos'] = AuthorsInfo.objects.filter(project_id=number).last() - context['zonetransfer'] = Zonetransfer.objects.filter(project_id=number).last() - context['favicontest'] = Favicontest.objects.filter(project_id=number).last() - context['subdomains_dns'] = SubdomainsDNS.objects.filter(project_id=number).order_by('host') - context['subdomains'] = Subdomains.objects.filter(project_id=number).last() - context['s3buckets'] = S3Buckets.objects.filter(project_id=number).last() - context['cloud_assets'] = CloudAssets.objects.filter(project_id=number) - context['web_probes'] = WebProbes.objects.filter(project_id=number).last() - context['web_uncommon_ports'] = WebsUncommonPorts.objects.filter(project_id=number).last() - context['screenshots'] = screenshots_context(number) - context['portscan_active'] = PortscanActive.objects.filter(project_id=number).last() - context['portscan_passive'] = PortscanPassive.objects.filter(project_id=number).last() - context['cdn_providers'] = CDNProviders.objects.filter(project_id=number).last() - context['web_wafs'] = WebWafs.objects.filter(project_id=number).last() - nuclei_outputs = NucleiOutputs.objects.filter(project_id=number).only('info', 'low', 'medium', 'high', 'critical').last() - context['nuclei_outputs_info'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.info else literal_eval(nuclei_outputs.info) - context['nuclei_outputs_low'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.low else literal_eval(nuclei_outputs.low) - context['nuclei_outputs_medium'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.medium else literal_eval(nuclei_outputs.medium) - context['nuclei_outputs_high'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.high else literal_eval(nuclei_outputs.high) - context['nuclei_outputs_critical'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.critical else literal_eval(nuclei_outputs.critical) - # fuzzing_paths = FuzzingFull.objects.filter(project_id=number).values('fuzzing_full').last() - # context['fuzzing_full'] = [['N/A']*3] if fuzzing_paths == None or 'N/A' in fuzzing_paths.fuzzing_full else literal_eval(fuzzing_paths.fuzzing_full) - context['url_extract'] = URLExtract.objects.filter(project_id=number).last() - context['url_gf'] = URLgf.objects.filter(project_id=number).last() - jschecks = JSChecks.objects.filter(project_id=number).last() - context['js_checks_livelinks'] = ['N/A'] if jschecks == None or 'N/A' in jschecks.js_livelinks else literal_eval(jschecks.js_livelinks) - context['js_checks_url_extract_js'] = ['N/A'] if jschecks == None or 'N/A' in jschecks.url_extract_js else literal_eval(jschecks.url_extract_js) - context['js_checks_js_endpoints'] = ['N/A'] if jschecks == None or 'N/A' in jschecks.js_endpoints else literal_eval(jschecks.js_endpoints) - context['js_checks_js_secrets'] = [['N/A']*5] if jschecks == None or 'N/A' in jschecks.js_secrets else literal_eval(jschecks.js_secrets) - web_dicts = WebDicts.objects.filter(project_id=number).only('dict_params', 'dict_values', 'dict_words', 'all_paths', 'password_dict').last() - context['web_dicts_params'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.dict_params else literal_eval(web_dicts.dict_params) - context['web_dicts_values'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.dict_values else literal_eval(web_dicts.dict_values) - context['web_dicts_words'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.dict_words else literal_eval(web_dicts.dict_words) - context['web_dicts_paths'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.all_paths else literal_eval(web_dicts.all_paths) - context['web_dicts_passwords'] = 'N/A' if web_dicts == None or 'N/A' in web_dicts.password_dict else web_dicts.password_dict.splitlines() - context['cms_scanners'] = CMS.objects.filter(project_id=number) - context['subdomains_table'] = subdomains_context(project_id=number) - return render(request, "scans_recon.html", context) - - elif type_scan == '-s': # SUBDOMAINS - context['subdomains_dns'] = SubdomainsDNS.objects.filter(project_id=number).order_by('host') - context['subdomains'] = Subdomains.objects.filter(project_id=number).last() - context['s3buckets'] = S3Buckets.objects.filter(project_id=number).last() - context['cloud_assets'] = CloudAssets.objects.filter(project_id=number) - context['zonetransfer'] = Zonetransfer.objects.filter(project_id=number).last() - context['subdomain_takeover'] = SubTakeover.objects.filter(project_id=number).last() - context['web_probes'] = WebProbes.objects.filter(project_id=number).last() - context['web_uncommon_ports'] = WebsUncommonPorts.objects.filter(project_id=number) - context['screenshots'] = screenshots_context(number) - context['subdomains_table'] = subdomains_context(project_id=number) - return render(request, "scans_subdomains.html", context) - - elif type_scan == '-p': # PASSIVE - context['domain_info_email'] = DomainInfoEmail.objects.filter(project_id=number).last() - domain_info_general = DomainInfoGeneral.objects.filter(project_id=number).last() - context['domain_info_general'] = [] if domain_info_general == None else domain_info_general.domain_info_general.splitlines() - context['domain_info_ip'] = DomainInfoIP.objects.filter(project_id=number).last() - context['domain_info_name'] = DomainInfoName.objects.filter(project_id=number).last() - emails = Emails.objects.filter(project_id=number).last() - context['emails'] = [] if emails == None else emails.emails.splitlines() - context['google_dorks'] = Dorks.objects.filter(project_id=number).last() - git_dorks = GitDorks.objects.filter(project_id=number).last() - context['git_dorks'] = [] if git_dorks == None else git_dorks.git_dorks.splitlines() - software_infos = SoftwareInfo.objects.filter(project_id=number).last() - context['software_infos'] = "" if software_infos == None else software_infos.software_info - context['authors_infos'] = AuthorsInfo.objects.filter(project_id=number).last() - metadatas = MetadataResults.objects.filter(project_id=number).last() - context['metadata_results'] = [] if metadatas == None else metadatas.metadata_results.splitlines() - context['favicontest'] = Favicontest.objects.filter(project_id=number).last() - context['subdomains_dns'] = SubdomainsDNS.objects.filter(project_id=number) - context['subdomains'] = Subdomains.objects.filter(project_id=number).last() - context['portscan_passive'] = PortscanPassive.objects.filter(project_id=number).last() - cdn_providers = CDNProviders.objects.filter(project_id=number).last() - context['cdn_providers'] = [] if cdn_providers == None else cdn_providers.cdn_providers.splitlines() - context['osintusersinfo'] = OSINTUsersInfo.objects.filter(project_id=number) - context['software_infos_count'] = len(context['software_infos'].splitlines()) - context['metadata_results_count'] = 0 - context['domain_info_general_count'] = 0 - context['google_dorks_count'] = 0 - context['git_dorks_count'] = 0 - context['osintusersinfouser_count'] = 0 - context['osintusersinfopassword_count'] = 0 - - for line in context['git_dorks']: - if "Too many errors, auto stop" not in git_dorks.git_dorks: - if line != "" and str(context['title_domain_target']).lower() in line.lower(): - context['git_dorks_count'] += 1 - - for line in [] if context['google_dorks'] == None else context['google_dorks'].dorks.splitlines(): - if line != "": - if "http" in line and line[0] != "#": - context['google_dorks_count'] += 1 - - for line in context['metadata_results']: - if "URL: " in line: - context['metadata_results_count'] += 1 - - for info in context['domain_info_general']: - if info != "": - if info[0] != "%" and info[0] != ";": - context['domain_info_general_count'] += 1 - - for info in context['osintusersinfo']: - if info.users != "": - context['osintusersinfouser_count'] += 1 - if info.passwords != "": - context['osintusersinfopassword_count'] += 1 - context['subdomains_table'] = subdomains_context(project_id=number) - return render(request, "scans_passive.html", context) - - elif type_scan == '-w': # WEB - context['s3buckets'] = S3Buckets.objects.filter(project_id=number).last() - context['cloud_assets'] = CloudAssets.objects.filter(project_id=number) - context['subdomain_takeover'] = SubTakeover.objects.filter(project_id=number).last() - context['web_wafs'] = WebWafs.objects.filter(project_id=number).last() - nuclei_outputs = NucleiOutputs.objects.filter(project_id=number).only('info', 'low', 'medium', 'high', 'critical').last() - context['nuclei_outputs_info'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.info else literal_eval(nuclei_outputs.info) - context['nuclei_outputs_low'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.low else literal_eval(nuclei_outputs.low) - context['nuclei_outputs_medium'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.medium else literal_eval(nuclei_outputs.medium) - context['nuclei_outputs_high'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.high else literal_eval(nuclei_outputs.high) - context['nuclei_outputs_critical'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.critical else literal_eval(nuclei_outputs.critical) - context['cms_scanners'] = CMS.objects.filter(project_id=number) - # fuzzing_paths = FuzzingFull.objects.filter(project_id=number).values('fuzzing_full').last() - # context['fuzzing_full'] = [['N/A']*3] if fuzzing_paths == None or 'N/A' in fuzzing_paths.fuzzing_full else literal_eval(fuzzing_paths.fuzzing_full) - context['url_extract'] = URLExtract.objects.filter(project_id=number).values("url_extract").last() - context['url_gf'] = URLgf.objects.filter(project_id=number).last() - jschecks = JSChecks.objects.filter(project_id=number).last() - context['js_checks_livelinks'] = ['N/A'] if jschecks == None or 'N/A' in jschecks.js_livelinks else literal_eval(jschecks.js_livelinks) - context['js_checks_url_extract_js'] = ['N/A'] if jschecks == None or 'N/A' in jschecks.url_extract_js else literal_eval(jschecks.url_extract_js) - context['js_checks_js_endpoints'] = ['N/A'] if jschecks == None or 'N/A' in jschecks.js_endpoints else literal_eval(jschecks.js_endpoints) - context['js_checks_js_secrets'] = [['N/A']*5] if jschecks == None or 'N/A' in jschecks.js_secrets else literal_eval(jschecks.js_secrets) - web_dicts = WebDicts.objects.filter(project_id=number).only('dict_params', 'dict_values', 'dict_words', 'all_paths', 'password_dict').last() - context['web_dicts_params'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.dict_params else literal_eval(web_dicts.dict_params) - context['web_dicts_values'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.dict_values else literal_eval(web_dicts.dict_values) - context['web_dicts_words'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.dict_words else literal_eval(web_dicts.dict_words) - context['web_dicts_paths'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.all_paths else literal_eval(web_dicts.all_paths) - context['web_dicts_passwords'] = 'N/A' if web_dicts == None or 'N/A' in web_dicts.password_dict else web_dicts.password_dict.splitlines() - vulns = Vulns.objects.filter(project_id=number).last() - context['redirect'] = ["N/A"] if vulns == None else vulns.redirect.splitlines() - context['crlf'] = ["N/A"] if vulns == None else vulns.crlf.splitlines() - context['xss'] = ["N/A"] if vulns == None else vulns.xss.splitlines() - context['lfi'] = ["N/A"] if vulns == None else vulns.lfi.splitlines() - context['ssrf'] = ["N/A"] if vulns == None else vulns.ssrf_requested_url.splitlines() - context['ssti'] = ["N/A"] if vulns == None else vulns.ssti.splitlines() - context['cors'] = ["N/A"] if vulns == None else loads(vulns.cors) - context['command_injection'] = ["N/A"] if vulns == None else vulns.command_injection.splitlines() - smuggling = {} if vulns == None else loads(vulns.smuggling) - context['smuggling_Method'] = smuggling['method'] if "method" in smuggling else "N/A" - context['smuggling_Endpoint'] = smuggling['endpoint'] if "endpoint" in smuggling else "N/A" - context['smuggling_Cookies'] = smuggling['cookies'] if "cookies" in smuggling else "N/A" - if "method" in smuggling: smuggling.pop("method") - if "endpoint" in smuggling: smuggling.pop("endpoint") - if "cookies" in smuggling: smuggling.pop("cookies") - context['smuggling'] = [2*"N/A"] if vulns == None else smuggling - context['brokenlinks'] = ["N/A"] if vulns == None else literal_eval(vulns.brokenlinks) - return render(request, "scans_web.html", context) - - elif type_scan == '-n': # OSINT - context['domain_info_email'] = DomainInfoEmail.objects.filter(project_id=number).last() - context['domain_info_general'] = DomainInfoGeneral.objects.filter(project_id=number).last() - context['domain_info_ip'] = DomainInfoIP.objects.filter(project_id=number).last() - context['domain_info_name'] = DomainInfoName.objects.filter(project_id=number).last() - context['ips_infos'] = IPsInfos.objects.filter(project_id=number).last() - context['emails'] = Emails.objects.filter(project_id=number).last() - context['google_dorks'] = Dorks.objects.filter(project_id=number).last() - context['git_dorks'] = GitDorks.objects.filter(project_id=number).last() - metadatas = MetadataResults.objects.filter(project_id=number).last() - context['metadata_results'] = [] if metadatas == None else metadatas.metadata_results.splitlines() - context['zonetransfer'] = Zonetransfer.objects.filter(project_id=number).last() - context['favicontest'] = Favicontest.objects.filter(project_id=number).last() - return render(request, "scans_osint.html", context) - - elif type_scan == '-a': # ALL - domain_info_general = DomainInfoGeneral.objects.filter(project_id=number).last() - software_infos = SoftwareInfo.objects.filter(project_id=number).last() - emails = Emails.objects.filter(project_id=number).last() - metadatas = MetadataResults.objects.filter(project_id=number).last() - git_dorks = GitDorks.objects.filter(project_id=number).last() - vulns = Vulns.objects.filter(project_id=number).last() - #not in use # context['domain_info_email'] = DomainInfoEmail.objects.filter(project_id=number).last() - context['domain_info_general'] = [] if domain_info_general == None else domain_info_general.domain_info_general.splitlines() - #not in use # context['domain_info_ip'] = DomainInfoIP.objects.filter(project_id=number).last() - context['osintusersinfo'] = OSINTUsersInfo.objects.filter(project_id=number) - context['domain_info_name'] = DomainInfoName.objects.filter(project_id=number).last() - context['emails'] = [] if emails == None else emails.emails.splitlines() - context['google_dorks'] = Dorks.objects.filter(project_id=number).last() - context['git_dorks'] = [] if git_dorks == None else git_dorks.git_dorks.splitlines() - context['software_infos'] = "" if software_infos == None else software_infos.software_info - context['authors_infos'] = AuthorsInfo.objects.filter(project_id=number).last() - context['metadata_results'] = [] if metadatas == None else metadatas.metadata_results.splitlines() - context['zonetransfer'] = Zonetransfer.objects.filter(project_id=number).last() - #not in use # context['favicontest'] = Favicontest.objects.filter(project_id=number).last() - context['subdomains_dns'] = SubdomainsDNS.objects.filter(project_id=number).order_by('host') - context['subdomains'] = Subdomains.objects.filter(project_id=number).last() - context['s3buckets'] = S3Buckets.objects.filter(project_id=number).last() - context['cloud_assets'] = CloudAssets.objects.filter(project_id=number) - #not in use # context['web_probes'] = WebProbes.objects.filter(project_id=number).last() - context['redirect'] = ["N/A"] if vulns == None else vulns.redirect.splitlines() - context['crlf'] = ["N/A"] if vulns == None else vulns.crlf.splitlines() - context['xss'] = ["N/A"] if vulns == None else vulns.xss.splitlines() - context['lfi'] = ["N/A"] if vulns == None else vulns.lfi.splitlines() - context['ssrf'] = ["N/A"] if vulns == None else vulns.ssrf_requested_url.splitlines() - context['ssti'] = ["N/A"] if vulns == None else vulns.ssti.splitlines() - context['cors'] = ["N/A"] if vulns == None else loads(vulns.cors) - context['command_injection'] = ["N/A"] if vulns == None else vulns.command_injection.splitlines() - smuggling = {} if vulns == None else loads(vulns.smuggling) - context['smuggling_Method'] = smuggling['method'] if "method" in smuggling else "N/A" - context['smuggling_Endpoint'] = smuggling['endpoint'] if "endpoint" in smuggling else "N/A" - context['smuggling_Cookies'] = smuggling['cookies'] if "cookies" in smuggling else "N/A" - if "method" in smuggling: smuggling.pop("method") - if "endpoint" in smuggling: smuggling.pop("endpoint") - if "cookies" in smuggling: smuggling.pop("cookies") - context['smuggling'] = [2*"N/A"] if vulns == None else smuggling - context['brokenlinks'] = ["N/A"] if vulns == None else literal_eval(vulns.brokenlinks) - context['software_infos_count'] = len(context['software_infos'].splitlines()) - context['metadata_results_count'] = 0 - context['domain_info_general_count'] = 0 - context['google_dorks_count'] = 0 - context['git_dorks_count'] = 0 - context['osintusersinfouser_count'] = 0 - context['osintusersinfopassword_count'] = 0 - - for line in context['git_dorks']: - if "Too many errors, auto stop" not in git_dorks.git_dorks: - if line != "" and str(context['title_domain_target']).lower() in line.lower(): - context['git_dorks_count'] += 1 - - for line in [] if context['google_dorks'] == None else context['google_dorks'].dorks.splitlines(): - if line != "": - if "http" in line and line[0] != "#": - context['google_dorks_count'] += 1 - - for line in context['metadata_results']: - if "URL: " in line: - context['metadata_results_count'] += 1 - - for info in context['domain_info_general']: - if info != "": - if info[0] != "%" and info[0] != ";": - context['domain_info_general_count'] += 1 - - for info in context['osintusersinfo']: - if info.users != "": - context['osintusersinfouser_count'] += 1 - if info.passwords != "": - context['osintusersinfopassword_count'] += 1 - - context['screenshots'] = screenshots_context(number) - context['portscan_active'] = PortscanActive.objects.filter(project_id=number).last() - context['portscan_passive'] = PortscanPassive.objects.filter(project_id=number).last() - context['cdn_providers'] = CDNProviders.objects.filter(project_id=number).last() - context['web_wafs'] = WebWafs.objects.filter(project_id=number).last() - nuclei_outputs = NucleiOutputs.objects.filter(project_id=number).only('info', 'low', 'medium', 'high', 'critical').last() - context['nuclei_outputs_info'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.info else literal_eval(nuclei_outputs.info) - context['nuclei_outputs_low'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.low else literal_eval(nuclei_outputs.low) - context['nuclei_outputs_medium'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.medium else literal_eval(nuclei_outputs.medium) - context['nuclei_outputs_high'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.high else literal_eval(nuclei_outputs.high) - context['nuclei_outputs_critical'] = [['N/A']*5] if nuclei_outputs == None or 'N/A' in nuclei_outputs.critical else literal_eval(nuclei_outputs.critical) - # fuzzing_paths = FuzzingFull.objects.filter(project_id=number).values('fuzzing_full').last() - #context['fuzzing_full'] = [['N/A']*3] if fuzzing_paths == None or 'N/A' in fuzzing_paths.fuzzing_full else literal_eval(fuzzing_paths.fuzzing_full) - context['url_extract'] = URLExtract.objects.filter(project_id=number).values("url_extract").last() - #context['url_gf'] = URLgf.objects.filter(project_id=number).last() - jschecks = JSChecks.objects.filter(project_id=number).last() - context['js_checks_livelinks'] = ['N/A'] if jschecks == None or 'N/A' in jschecks.js_livelinks else literal_eval(jschecks.js_livelinks) - context['js_checks_url_extract_js'] = ['N/A'] if jschecks == None or 'N/A' in jschecks.url_extract_js else literal_eval(jschecks.url_extract_js) - context['js_checks_js_endpoints'] = ['N/A'] if jschecks == None or 'N/A' in jschecks.js_endpoints else literal_eval(jschecks.js_endpoints) - context['js_checks_js_secrets'] = [['N/A']*5] if jschecks == None or 'N/A' in jschecks.js_secrets else literal_eval(jschecks.js_secrets) - web_dicts = WebDicts.objects.filter(project_id=number).only('dict_params', 'dict_values', 'dict_words', 'all_paths', 'password_dict').last() - context['web_dicts_params'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.dict_params else literal_eval(web_dicts.dict_params) - context['web_dicts_values'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.dict_values else literal_eval(web_dicts.dict_values) - context['web_dicts_words'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.dict_words else literal_eval(web_dicts.dict_words) - context['web_dicts_paths'] = ['N/A'] if web_dicts == None or 'N/A' in web_dicts.all_paths else literal_eval(web_dicts.all_paths) - context['web_dicts_passwords'] = 'N/A' if web_dicts == None or 'N/A' in web_dicts.password_dict else web_dicts.password_dict.splitlines() - context['cms_scanners'] = CMS.objects.filter(project_id=number) - context['subdomains_table'] = subdomains_context(project_id=number) - return render(request, "scans.html", context) - - return render(request, "scans.html", context) - - -@login_required(login_url='/login/') -def new_scan(request): - ''' - type_domain = 0 -> single domain scan - type_domain = 1 -> list domain scan - ''' - if request.method == "POST": - type_domain = request.POST.get('typeDomain') - - if type_domain == "0": - single_domain = request.POST.get('singleDomain') - print("Single Domain") - - if validators.domain(single_domain): - command = ['../reconftw.sh','-d',single_domain] - - req_params = list(request.POST) - - # MODE OPTIONS - if req_params[4] == 'switch-recon': - command.append('-r') - elif req_params[4] == 'switch-subdomains': - command.append('-s') - elif req_params[4] == 'switch-passive': - command.append('-p') - elif req_params[4] == 'switch-all': - command.append('-a') - elif req_params[4] == 'switch-web': - command.append('-w') - elif req_params[4] == 'switch-osint': - command.append('-n') - - # GENERAL OPTIONS - if 'switch-deep' in req_params: - command.append('--deep') - if 'switch-vps' in req_params: - command.append('-v') - - # RUN new_scan_single_domain TASK - print("=====>>>> about to run new_scan_single_domain") - celery_task = new_scan_single_domain.apply_async(command, queue="default") - - - elif type_domain == "1": - list_domain = request.POST.get('listDomain') - print("List Domain") - list_domain = list(map(str.strip, list_domain.split("\n"))) - - for single_domain in list_domain: - if validators.domain(single_domain): - command = ['../reconftw.sh','-d',single_domain] - - req_params = list(request.POST) - - # MODE OPTIONS - if req_params[4] == 'switch-recon': - command.append('-r') - elif req_params[4] == 'switch-subdomains': - command.append('-s') - elif req_params[4] == 'switch-passive': - command.append('-p') - elif req_params[4] == 'switch-all': - command.append('-a') - elif req_params[4] == 'switch-web': - command.append('-w') - elif req_params[4] == 'switch-osint': - command.append('-n') - - # GENERAL OPTIONS - if 'switch-deep' in req_params: - command.append('--deep') - if 'switch-vps' in req_params: - command.append('-v') - - # RUN new_scan_single_domain TASK - print("=====>>>> about to run new_scan_single_domain") - celery_task = new_scan_single_domain.apply_async(command, queue="default") - - else: - print("Wrong!!") - - return redirect('projects:index') diff --git a/web/schedules/__init__.py b/web/schedules/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/web/schedules/__pycache__/__init__.cpython-310.pyc b/web/schedules/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 7ff027593da67046141f850e2fafbb71306218ca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 133 zcmd1j<>g`k0v_JK$sqbMh(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o2BlzbHSyM87CC zIX^G0q+Gu|HA%lXIU_ZtG$*xKKR!M)FS8^*Uaz3?7Kcr4K1kFKWI{0$kYHf|0I#qc Avj6}9 diff --git a/web/schedules/__pycache__/urls.cpython-310.pyc b/web/schedules/__pycache__/urls.cpython-310.pyc deleted file mode 100644 index 14bb3671b64d413adb720ed2f09c55fdbabdca92..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 412 zcmYk1OHRWu5J2tPX`QwyNL+zUH_ZhQ;sk8Ei;$|2nf{d6k?j(}4OnmxuH-GNUIBr` zxHO2dJodBfH)E4!V@8o?-_?6YePy}~0-2)|Hz-!j=Q^}_$G)(L_-*zNfC`eohcZ&cm zp+*;!-!=GRG{&pCY%mCoT@V}eVF*~G$NTv>OkiK?jaeLQ?d*VLfms diff --git a/web/schedules/__pycache__/views.cpython-310.pyc b/web/schedules/__pycache__/views.cpython-310.pyc deleted file mode 100644 index fdc46532ff3300f367e6f33ce7009b172eb4246a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7635 zcmaJ`S$7*pc1B|d2$CR0t=7nGd&U+`ii<7TV?}aIQoBT&Bhu1{GajUXDgq5Oy20vh z((D0~Nn~eNHfKF24`j}nhxsQt`3e1+m-r9l!6zQ)yH(vFsOW?eE~;n-MZk8 zj+RsS>)-mW;k}+p{U<#P|7Gy-2|nJxgYXnL?9>*0r){H~-b!~fTN#6%Gj_I{+sdhS zB9pcA-NIHu@p5*tTiPnc{pGDP#^mjhZe^>Ycm;d3JGwQh+C_V;JH9op+9kW%o!FY_ zPHs)&xy(ntOl_SKIbPwTU#6ZLTT^I{@p08Yjdqn!sP-AOC;2JWK8yAgKdsv5xUrF% zKJyKBlUkcjM|mk6E@V`cf?HBF{AnW^v)!iU)TG!6z~bOk?)iT2vG97X;|V;MHt(_G5UF6IczSSjx!n$~VZn@bNCDQz;98^YqsS&+sg0j^{xOya-z2WzZ2`0UhOIpyRv>I>9GFPw^?x z)BFtRS$+=mJih??8ovno1AYngb#8*b!QcGaz&da7AL8jp{B6*8_+`*({x0YYp9Q_b z=RoKA0_Y-t5A=P074#ba0Q5utW6+QIbb#8J??`Bd>3?&e*wDBL(pIHKIj4e zeb7JPzXJV3{zstC`In$y@jnLr6aJ^P68|&)=ln1DU-B3HulQf{zu|w&|Bipn|DOK? z|HpqF{k3tBYNig1e(F~0^S2Jt{j{Izq~&*gXD0Tq~@9TJ)1dv*4$uSmxa}2#68z>?Z6JgNj+vk zjk(A6m}P~f&o<_lY#23*yB9uE-7)XfQWu8bVDpb$ueRhgg)O}BZ8gurkCkA&in$3V zGB1C&VZLpyFo$tA9gZi1H?4L6Uiib|fiZK|TxM;CF}AhsO2=YhnN*Ti2pGasdc=oX zrlV@SS0~0ejYb#LRUT&YG(%WIoUdM=<%S8^F0O=j5n_`~U7_729ZH zz1Q@5A1J+6gyTybS*(`YK+H?9Iw|-HtOh`@iZ85&TU)XG{cwJGmAe5zwSkpn*pHGj zNeT~$RYFR!AD+_C&u`Fy`UqZo*z&pISx)qfB|t8_P1k4PjF!BncBOfM(CT8rp9Gfg z&YF)c*Ibs&X}D^ax0y#ao9t2#jG3>*5$IF*mHQ62$RaKbuNX7U%OnExi8r%E2j^Rj z@J%gzRU1RhjhFd9Hn#$t8q7;hN)L*eiCu&P0O(u;FL41a9P_FPNE@UDYr5goupR?$ zg8iD<+lGO0^2G%E_piSX2S3p>Fyam7*49~I59WPe!!Ma<*>HzT$@)vO2}83SHoqJQ zhx`$l&P$vs9;JFr)iZ0}6)wj3?w)h(i+5e_w^%(qtv8{E-IYQ(O5w?5HDy%078h=K zW>|EHKk)B07wNLalHKZXhM;t1aCE1V*8B?ViP{q(xkwIQ$1viXijO!p2Yuu7onH!F z+NKU(3tBklO)PDz;IzU#R)_kH*o0>mbc{RZD<_fet_4$mEcQEXwrh35Q5A}5?|W9G z6|T@eld%dCVwyhhl*9oh)1L^j}{8E=( zgR^dULr)uPp0E-3gj93k8^a&FU4hW;yNyoEwY#(C6H7SoknwoFWnniZ18O&ir%Sk) zO)}p?0@%e4mjezoJs(Q7p>P|LLQG1w6BxbOK42WN*P zYT9LC$-vjPm5hpciJB|SlJ3csHET59$34IxMR;*oH_ao=;e8TLoSfo4AZ|GwEBxq$ zc+8zOAMRVb6lI$@unl*c%ypC5FcX}@6U%li1gSBl`U8QZVz%gNPfoNyW>;clRxQWt zoZNtBjmH~!YB{{+ijIzJ$wULLM(SaJa={PZ(rW9dp?Re4KI8C&W!7w=s)Ji;7g%+@ zhPxMD86Kl~Wax9|x`2mxYd2joaBNRI^%??B}{F_4;h(WY{geqPSC7LHT^Szq>XY*MV9cqUPtHDlU;hjhm_%3mM$V^ z7PKJMfL&H+@YcAWEXZqhkUGLqwG2wFM&Kje$G&$>DY79g^8wpORfy2+uwEcpEdP-C za6TR+hIZ}x)oat3-EV2O#&OB+GL$UwoVwo8yjgS2-E}>yZb=xTMgr)f;*7T#3zKWY z-gTGW;$!ACT?EzGxjs;uJ|m`MUXDAbWBixF?zo-AM0Lp+GiS|v%yDs?53sDn)~gDM zhsd#xyN~$P3D0V{^|0X%?QSTdo3)1^;i4AQ)h~iY+j3yPD=qZxs3wV;Bsyz;CWG#d z6;9~Y0+esHM?n|ZCR*xQ9#W5~d*B+Fh4qm1;*H&xoxq2NZwxOSGqHrtke-&A&zMBk z3_L1D5i#+?>bDYGi%WIIiv5CFJz*t&q5C&APoL1H+u3)MHT65erykIQeyj)WPz-93 z*qpVL_`k&8o2LZXg{>p8MQE5v(eOnON|iTgQQimlSp}50y0X>$)p_l+MTt_5Jwt zQiDc^ z$|@v5K~cQv!h--vlz#|pcA~qY`23iX>C?DRL5_jOEDDT zU_5O-oJeNdgu}pydqI@-Wgw!|bWY}RrxN91Zn*D>@___|^(fZ^LLw^f1gAL615kpw6QEd%vOTy!l-;&$p&-=c49z#r#Wt=cYKcOZ!1U-+vYg2q z#q>p^n2~Sc@dQj2Fa|JciAQW~bWE;3_ir6>eW3Y4AM0bNDU*&P*pG7j@e)c=3Q>F2($tx~=#Hy2oi}`#D~I zVYG7x#S_o@!vY^UG7d`p60R*%eDtf#&6Hp4XPz6pf)$?oC5$fj^Zf#J8GDgADElLd zljGxOQ+~xCJsdm2W#`!xulCFS_+hnQI>Jrgi|oM&pX`s+Ci)|XlSe5d<)7lG)WJ`! zsS}$nM0vbqakd{7HgOQtC_NPjZK_$^Yz0!a@^?^_qqDfd`~Ytn`%!)a@11O4jsod# z)3fF`zy1FE@1wNEqg)Fy(u>jy3sIqKIe{;{-x%@=$rvY}fMH#%KMtMCG#=2DNLuIdLTYi+Wg(JzdqI?6Ei~4AU9x}*t zvZz{)yBB5Kp6f(8j^ye^1=KHf#N#OIv0b6-)JMK9HkD@7Wo2s zO^v*!o#koRX_s1uIl7BJO7qNdOu&wc@%;&Wrb zD-aKMpN8F|hZo^DBSZbASM*ng`pf7)IeHYD!OI3KRBohp#{4nzGWf-~Up<`YBRJAs z`HM6{)silg0quB=>s2sGpcB4ot!Baoy0Qj?KMhab>CUW9q+3 zGVH62f7SGqVdCGLu)Sh>GA(}!p%XSm(VjlaranRA*rrsAVr!bLt4$F$^TMFC zg;U8XPMT*2octhhg#&N6LeU^hfVpCEL>oFAeIP0x6kftt?>>*hkX{SP ztWlgP#gOBOU&3chPmn;=!#r_jDjSpONqD6E9J~gFyX24RjDdgoc<_<757XpT%A@As zS($bQDLo^vQR@)i#WV1z?0F#4H=dJMsh)!@5__gyK)TP-EbSulRt{K|6nbUF!EZtd zXQ$wga7e?ti)*hm>1WVq2)2rff65}|W9m|{DvP{Dj~hfbi6|@FqSi5(c^w8-v{U_4 z)c-RgKPU1Mm{TXgyd&N2ef~=bdNi(_A%nYMQ!o#{$1K$5EZ(9?O-U%t8|BM^DhgPkG99q6&(lIn+L?DAIKc z{rVOadUzj2d8q0gofyWG!f3ARVjrGVS?9c-vetl|$^BX#DJAJwFe!hh5>FK{Di$l} zQC6dNHw2P`OI1Y08(0Lt6n;!$$eRTzrm6__6#fzXiQ>D8@SYwk8|e{zX7M?PPkGGv EUv*x9#sB~S diff --git a/web/schedules/admin.py b/web/schedules/admin.py deleted file mode 100644 index 8c38f3f3..00000000 --- a/web/schedules/admin.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.contrib import admin - -# Register your models here. diff --git a/web/schedules/apps.py b/web/schedules/apps.py deleted file mode 100644 index 8020b121..00000000 --- a/web/schedules/apps.py +++ /dev/null @@ -1,6 +0,0 @@ -from django.apps import AppConfig - - -class SchedulesConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'schedules' diff --git a/web/schedules/migrations/__init__.py b/web/schedules/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/web/schedules/models.py b/web/schedules/models.py deleted file mode 100644 index 71a83623..00000000 --- a/web/schedules/models.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.db import models - -# Create your models here. diff --git a/web/schedules/urls.py b/web/schedules/urls.py deleted file mode 100644 index 6e129cff..00000000 --- a/web/schedules/urls.py +++ /dev/null @@ -1,13 +0,0 @@ -from django.urls import path -from . import views - -# Namespace name -app_name = 'schedules' - -# Be careful setting the name to just /login use userlogin instead! -urlpatterns=[ - path('timezone', views.define_timezone, name='timezone'), - path('new', views.schedule_scan, name='new'), - path('get', views.getSchedules, name='get'), - path('delete', views.deleteSchedule, name='delete'), -] diff --git a/web/schedules/views.py b/web/schedules/views.py deleted file mode 100644 index 6d5e655f..00000000 --- a/web/schedules/views.py +++ /dev/null @@ -1,138 +0,0 @@ -import json -from django.shortcuts import render,redirect -from django.contrib.auth.decorators import login_required -import json -from django.http import HttpResponse -# Create your views here. -from web.settings import TIME_ZONE -import urllib -from pathlib import Path -from datetime import datetime - -from projects.models import Project -from django_celery_beat.models import CrontabSchedule, PeriodicTask - - -def timezone(): - timezones = {'Etc/GMT+12': '(GMT-12:00) International Date Line West', 'Pacific/Midway': '(GMT-11:00) Midway Island, Samoa', 'Pacific/Honolulu': '(GMT-10:00) Hawaii', 'US/Alaska': '(GMT-09:00) Alaska', 'America/Los_Angeles': '(GMT-08:00) Pacific Time (US & Canada)', 'America/Tijuana': '(GMT-08:00) Tijuana, Baja California', 'US/Arizona': '(GMT-07:00) Arizona', 'America/Chihuahua': '(GMT-07:00) Chihuahua, La Paz, Mazatlan', 'US/Mountain': '(GMT-07:00) Mountain Time (US & Canada)', 'America/Managua': '(GMT-06:00) Central America', 'US/Central': '(GMT-06:00) Central Time (US & Canada)', 'America/Mexico_City': '(GMT-06:00) Guadalajara, Mexico City, Monterrey', 'Canada/Saskatchewan': '(GMT-06:00) Saskatchewan', 'America/Bogota': '(GMT-05:00) Bogota, Lima, Quito, Rio Branco', 'US/Eastern': '(GMT-05:00) Eastern Time (US & Canada)', 'US/East-Indiana': '(GMT-05:00) Indiana (East)', 'Canada/Atlantic': '(GMT-04:00) Atlantic Time (Canada)', 'America/Caracas': '(GMT-04:00) Caracas, La Paz', 'America/Manaus': '(GMT-04:00) Manaus', 'America/Santiago': '(GMT-04:00) Santiago', 'Canada/Newfoundland': '(GMT-03:30) Newfoundland', 'America/Sao_Paulo': '(GMT-03:00) Brasilia', 'America/Argentina/Buenos_Aires': '(GMT-03:00) Buenos Aires, Georgetown', 'America/Godthab': '(GMT-03:00) Greenland', 'America/Montevideo': '(GMT-03:00) Montevideo', 'America/Noronha': '(GMT-02:00) Mid-Atlantic', 'Atlantic/Cape_Verde': '(GMT-01:00) Cape Verde Is.', 'Atlantic/Azores': '(GMT-01:00) Azores', 'Africa/Casablanca': '(GMT+00:00) Casablanca, Monrovia, Reykjavik', 'Etc/Greenwich': '(GMT+00:00) Greenwich Mean Time : Dublin, Edinburgh, Lisbon, London', 'Europe/Amsterdam': '(GMT+01:00) Amsterdam, Berlin, Bern, Rome, Stockholm, Vienna', 'Europe/Belgrade': '(GMT+01:00) Belgrade, Bratislava, Budapest, Ljubljana, Prague', 'Europe/Brussels': '(GMT+01:00) Brussels, Copenhagen, Madrid, Paris', 'Europe/Sarajevo': '(GMT+01:00) Sarajevo, Skopje, Warsaw, Zagreb', 'Africa/Lagos': '(GMT+01:00) West Central Africa', 'Asia/Amman': '(GMT+02:00) Amman', 'Europe/Athens': '(GMT+02:00) Athens, Bucharest, Istanbul', 'Asia/Beirut': '(GMT+02:00) Beirut', 'Africa/Cairo': '(GMT+02:00) Cairo', 'Africa/Harare': '(GMT+02:00) Harare, Pretoria', 'Europe/Helsinki': '(GMT+02:00) Helsinki, Kyiv, Riga, Sofia, Tallinn, Vilnius', 'Asia/Jerusalem': '(GMT+02:00) Jerusalem', 'Europe/Minsk': '(GMT+02:00) Minsk', 'Africa/Windhoek': '(GMT+02:00) Windhoek', 'Asia/Kuwait': '(GMT+03:00) Kuwait, Riyadh, Baghdad', 'Europe/Moscow': '(GMT+03:00) Moscow, St. Petersburg, Volgograd', 'Africa/Nairobi': '(GMT+03:00) Nairobi', 'Asia/Tbilisi': '(GMT+03:00) Tbilisi', 'Asia/Tehran': '(GMT+03:30) Tehran', 'Asia/Muscat': '(GMT+04:00) Abu Dhabi, Muscat', 'Asia/Baku': '(GMT+04:00) Baku', 'Asia/Yerevan': '(GMT+04:00) Yerevan', 'Asia/Kabul': '(GMT+04:30) Kabul', 'Asia/Yekaterinburg': '(GMT+05:00) Yekaterinburg', 'Asia/Karachi': '(GMT+05:00) Islamabad, Karachi, Tashkent', 'Asia/Calcutta': '(GMT+05:30) Sri Jayawardenapura', 'Asia/Katmandu': '(GMT+05:45) Kathmandu', 'Asia/Almaty': '(GMT+06:00) Almaty, Novosibirsk', 'Asia/Dhaka': '(GMT+06:00) Astana, Dhaka', 'Asia/Rangoon': '(GMT+06:30) Yangon (Rangoon)', 'Asia/Bangkok': '(GMT+07:00) Bangkok, Hanoi, Jakarta', 'Asia/Krasnoyarsk': '(GMT+07:00) Krasnoyarsk', 'Asia/Hong_Kong': '(GMT+08:00) Beijing, Chongqing, Hong Kong, Urumqi', 'Asia/Kuala_Lumpur': '(GMT+08:00) Kuala Lumpur, Singapore', 'Asia/Irkutsk': '(GMT+08:00) Irkutsk, Ulaan Bataar', 'Australia/Perth': '(GMT+08:00) Perth', 'Asia/Taipei': '(GMT+08:00) Taipei', 'Asia/Tokyo': '(GMT+09:00) Osaka, Sapporo, Tokyo', 'Asia/Seoul': '(GMT+09:00) Seoul', 'Asia/Yakutsk': '(GMT+09:00) Yakutsk', 'Australia/Adelaide': '(GMT+09:30) Adelaide', 'Australia/Darwin': '(GMT+09:30) Darwin', 'Australia/Brisbane': '(GMT+10:00) Brisbane', 'Australia/Canberra': '(GMT+10:00) Canberra, Melbourne, Sydney', 'Australia/Hobart': '(GMT+10:00) Hobart', 'Pacific/Guam': '(GMT+10:00) Guam, Port Moresby', 'Asia/Vladivostok': '(GMT+10:00) Vladivostok', 'Asia/Magadan': '(GMT+11:00) Magadan, Solomon Is., New Caledonia', 'Pacific/Auckland': '(GMT+12:00) Auckland, Wellington', 'Pacific/Fiji': '(GMT+12:00) Fiji, Kamchatka, Marshall Is.', 'Pacific/Tongatapu': "(GMT+13:00) Nuku'alofa"} - - response = [] - - for item in timezones: - if item == TIME_ZONE: - response.append([urllib.parse.quote_plus(item), timezones[item], 'true']) - else: - response.append([urllib.parse.quote_plus(item), timezones[item], '']) - - return response - - -@login_required -def define_timezone(request): - - if request.method == "POST": - post = request.POST - - file = Path('web/settings.py') - file.write_text(file.read_text().replace(TIME_ZONE, post['timezone_offset'], 1)) - - - return redirect('projects:index') - -@login_required -def schedule_scan(request): - - if request.method == "POST": - - post = request.POST - - days = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"] - daylist = "" - - for item in days: - if item in post: - daylist += item+", " - - daylist = daylist[:-2] - - - id = post['id'] - hours = post["hours"] if post["hours"] != "" else "00" - minutes = post["minutes"] if post["minutes"] != "" else "00" - nome = Project.objects.get(id=id).domain - command = Project.objects.get(id=id).command.split("'") - del command[0::2] - - - if len(daylist) != 0: - schedule = CrontabSchedule.objects.create( - hour=hours, - minute=minutes, - day_of_week=daylist - ) - - task = PeriodicTask.objects.create( - name=id+"-"+str(datetime.now()), - task='new_scan_single_domain', - crontab=schedule, - args=json.dumps(command) - ) - - task.enabled = True - task.save() - - - return redirect('projects:index') - -@login_required -def getSchedules(request): - - reqid = request.POST["projectId"] - - get = PeriodicTask.objects.all() - - schedules = {} - - for item in get: - if item.name != "": - if item.name.split("-", 1)[0] == reqid: - if item.id not in schedules: - schedules[item.id] = {} - - schedules[item.id]["name"] = item.name - schedules[item.id]["contrabId"] = int(item.crontab_id) - schedules[item.id]["hours"] = int(CrontabSchedule.objects.get(id=item.crontab_id).hour) - schedules[item.id]["minutes"] = int(CrontabSchedule.objects.get(id=item.crontab_id).minute) - schedules[item.id]["days"] = CrontabSchedule.objects.get(id=item.crontab_id).day_of_week.split(", ") - - print(schedules) - - return HttpResponse(json.dumps(schedules), content_type="application/json") - -@login_required -def deleteSchedule(request): - - scheduleName = request.POST['schedule-name'] - crontabId = request.POST['crontab-id'] - - PeriodicTask.objects.get(name=scheduleName).delete() - CrontabSchedule.objects.get(id=crontabId).delete() - - return redirect('projects:index') - -@login_required -def deleteScheduleFromId(request, id): - - get = PeriodicTask.objects.all() - - for item in get: - if item.name != "": - if item.name.split("-", 1)[0] == str(id): - - scheduleName = item.name - crontabId = int(item.crontab_id) - - PeriodicTask.objects.get(name=scheduleName).delete() - CrontabSchedule.objects.get(id=crontabId).delete() - - return redirect('projects:index') \ No newline at end of file diff --git a/web/static/css/bootstrap.min.css b/web/static/css/bootstrap.min.css deleted file mode 100644 index 1472dec0..00000000 --- a/web/static/css/bootstrap.min.css +++ /dev/null @@ -1,7 +0,0 @@ -@charset "UTF-8";/*! - * Bootstrap v5.1.3 (https://getbootstrap.com/) - * Copyright 2011-2021 The Bootstrap Authors - * Copyright 2011-2021 Twitter, Inc. - * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) - */:root{--bs-blue:#0d6efd;--bs-indigo:#6610f2;--bs-purple:#6f42c1;--bs-pink:#d63384;--bs-red:#dc3545;--bs-orange:#fd7e14;--bs-yellow:#ffc107;--bs-green:#198754;--bs-teal:#20c997;--bs-cyan:#0dcaf0;--bs-white:#fff;--bs-gray:#6c757d;--bs-gray-dark:#343a40;--bs-gray-100:#f8f9fa;--bs-gray-200:#e9ecef;--bs-gray-300:#dee2e6;--bs-gray-400:#ced4da;--bs-gray-500:#adb5bd;--bs-gray-600:#6c757d;--bs-gray-700:#495057;--bs-gray-800:#343a40;--bs-gray-900:#212529;--bs-primary:#0d6efd;--bs-secondary:#6c757d;--bs-success:#198754;--bs-info:#0dcaf0;--bs-warning:#ffc107;--bs-danger:#dc3545;--bs-light:#f8f9fa;--bs-dark:#212529;--bs-primary-rgb:13,110,253;--bs-secondary-rgb:108,117,125;--bs-success-rgb:25,135,84;--bs-info-rgb:13,202,240;--bs-warning-rgb:255,193,7;--bs-danger-rgb:220,53,69;--bs-light-rgb:248,249,250;--bs-dark-rgb:33,37,41;--bs-white-rgb:255,255,255;--bs-black-rgb:0,0,0;--bs-body-color-rgb:33,37,41;--bs-body-bg-rgb:255,255,255;--bs-font-sans-serif:system-ui,-apple-system,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans","Liberation Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";--bs-font-monospace:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;--bs-gradient:linear-gradient(180deg, rgba(255, 255, 255, 0.15), rgba(255, 255, 255, 0));--bs-body-font-family:var(--bs-font-sans-serif);--bs-body-font-size:1rem;--bs-body-font-weight:400;--bs-body-line-height:1.5;--bs-body-color:#212529;--bs-body-bg:#fff}*,::after,::before{box-sizing:border-box}@media (prefers-reduced-motion:no-preference){:root{scroll-behavior:smooth}}body{margin:0;font-family:var(--bs-body-font-family);font-size:var(--bs-body-font-size);font-weight:var(--bs-body-font-weight);line-height:var(--bs-body-line-height);color:var(--bs-body-color);text-align:var(--bs-body-text-align);background-color:var(--bs-body-bg);-webkit-text-size-adjust:100%;-webkit-tap-highlight-color:transparent}hr{margin:1rem 0;color:inherit;background-color:currentColor;border:0;opacity:.25}hr:not([size]){height:1px}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{margin-top:0;margin-bottom:.5rem;font-weight:500;line-height:1.2}.h1,h1{font-size:calc(1.375rem + 1.5vw)}@media (min-width:1200px){.h1,h1{font-size:2.5rem}}.h2,h2{font-size:calc(1.325rem + .9vw)}@media (min-width:1200px){.h2,h2{font-size:2rem}}.h3,h3{font-size:calc(1.3rem + .6vw)}@media (min-width:1200px){.h3,h3{font-size:1.75rem}}.h4,h4{font-size:calc(1.275rem + .3vw)}@media (min-width:1200px){.h4,h4{font-size:1.5rem}}.h5,h5{font-size:1.25rem}.h6,h6{font-size:1rem}p{margin-top:0;margin-bottom:1rem}abbr[data-bs-original-title],abbr[title]{-webkit-text-decoration:underline dotted;text-decoration:underline dotted;cursor:help;-webkit-text-decoration-skip-ink:none;text-decoration-skip-ink:none}address{margin-bottom:1rem;font-style:normal;line-height:inherit}ol,ul{padding-left:2rem}dl,ol,ul{margin-top:0;margin-bottom:1rem}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}dt{font-weight:700}dd{margin-bottom:.5rem;margin-left:0}blockquote{margin:0 0 1rem}b,strong{font-weight:bolder}.small,small{font-size:.875em}.mark,mark{padding:.2em;background-color:#fcf8e3}sub,sup{position:relative;font-size:.75em;line-height:0;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}a{color:#0d6efd;text-decoration:underline}a:hover{color:#0a58ca}a:not([href]):not([class]),a:not([href]):not([class]):hover{color:inherit;text-decoration:none}code,kbd,pre,samp{font-family:var(--bs-font-monospace);font-size:1em;direction:ltr;unicode-bidi:bidi-override}pre{display:block;margin-top:0;margin-bottom:1rem;overflow:auto;font-size:.875em}pre code{font-size:inherit;color:inherit;word-break:normal}code{font-size:.875em;color:#d63384;word-wrap:break-word}a>code{color:inherit}kbd{padding:.2rem .4rem;font-size:.875em;color:#fff;background-color:#212529;border-radius:.2rem}kbd kbd{padding:0;font-size:1em;font-weight:700}figure{margin:0 0 1rem}img,svg{vertical-align:middle}table{caption-side:bottom;border-collapse:collapse}caption{padding-top:.5rem;padding-bottom:.5rem;color:#6c757d;text-align:left}th{text-align:inherit;text-align:-webkit-match-parent}tbody,td,tfoot,th,thead,tr{border-color:inherit;border-style:solid;border-width:0}label{display:inline-block}button{border-radius:0}button:focus:not(:focus-visible){outline:0}button,input,optgroup,select,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,select{text-transform:none}[role=button]{cursor:pointer}select{word-wrap:normal}select:disabled{opacity:1}[list]::-webkit-calendar-picker-indicator{display:none}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button}[type=button]:not(:disabled),[type=reset]:not(:disabled),[type=submit]:not(:disabled),button:not(:disabled){cursor:pointer}::-moz-focus-inner{padding:0;border-style:none}textarea{resize:vertical}fieldset{min-width:0;padding:0;margin:0;border:0}legend{float:left;width:100%;padding:0;margin-bottom:.5rem;font-size:calc(1.275rem + .3vw);line-height:inherit}@media (min-width:1200px){legend{font-size:1.5rem}}legend+*{clear:left}::-webkit-datetime-edit-day-field,::-webkit-datetime-edit-fields-wrapper,::-webkit-datetime-edit-hour-field,::-webkit-datetime-edit-minute,::-webkit-datetime-edit-month-field,::-webkit-datetime-edit-text,::-webkit-datetime-edit-year-field{padding:0}::-webkit-inner-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:textfield}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-color-swatch-wrapper{padding:0}::-webkit-file-upload-button{font:inherit}::file-selector-button{font:inherit}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}output{display:inline-block}iframe{border:0}summary{display:list-item;cursor:pointer}progress{vertical-align:baseline}[hidden]{display:none!important}.lead{font-size:1.25rem;font-weight:300}.display-1{font-size:calc(1.625rem + 4.5vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-1{font-size:5rem}}.display-2{font-size:calc(1.575rem + 3.9vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-2{font-size:4.5rem}}.display-3{font-size:calc(1.525rem + 3.3vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-3{font-size:4rem}}.display-4{font-size:calc(1.475rem + 2.7vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-4{font-size:3.5rem}}.display-5{font-size:calc(1.425rem + 2.1vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-5{font-size:3rem}}.display-6{font-size:calc(1.375rem + 1.5vw);font-weight:300;line-height:1.2}@media (min-width:1200px){.display-6{font-size:2.5rem}}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none}.list-inline-item{display:inline-block}.list-inline-item:not(:last-child){margin-right:.5rem}.initialism{font-size:.875em;text-transform:uppercase}.blockquote{margin-bottom:1rem;font-size:1.25rem}.blockquote>:last-child{margin-bottom:0}.blockquote-footer{margin-top:-1rem;margin-bottom:1rem;font-size:.875em;color:#6c757d}.blockquote-footer::before{content:"— "}.img-fluid{max-width:100%;height:auto}.img-thumbnail{padding:.25rem;background-color:#fff;border:1px solid #dee2e6;border-radius:.25rem;max-width:100%;height:auto}.figure{display:inline-block}.figure-img{margin-bottom:.5rem;line-height:1}.figure-caption{font-size:.875em;color:#6c757d}.container,.container-fluid,.container-lg,.container-md,.container-sm,.container-xl,.container-xxl{width:100%;padding-right:var(--bs-gutter-x,.75rem);padding-left:var(--bs-gutter-x,.75rem);margin-right:auto;margin-left:auto}@media (min-width:576px){.container,.container-sm{max-width:540px}}@media (min-width:768px){.container,.container-md,.container-sm{max-width:720px}}@media (min-width:992px){.container,.container-lg,.container-md,.container-sm{max-width:960px}}@media (min-width:1200px){.container,.container-lg,.container-md,.container-sm,.container-xl{max-width:1140px}}@media (min-width:1400px){.container,.container-lg,.container-md,.container-sm,.container-xl,.container-xxl{max-width:1320px}}.row{--bs-gutter-x:1.5rem;--bs-gutter-y:0;display:flex;flex-wrap:wrap;margin-top:calc(-1 * var(--bs-gutter-y));margin-right:calc(-.5 * var(--bs-gutter-x));margin-left:calc(-.5 * var(--bs-gutter-x))}.row>*{flex-shrink:0;width:100%;max-width:100%;padding-right:calc(var(--bs-gutter-x) * .5);padding-left:calc(var(--bs-gutter-x) * .5);margin-top:var(--bs-gutter-y)}.col{flex:1 0 0%}.row-cols-auto>*{flex:0 0 auto;width:auto}.row-cols-1>*{flex:0 0 auto;width:100%}.row-cols-2>*{flex:0 0 auto;width:50%}.row-cols-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-4>*{flex:0 0 auto;width:25%}.row-cols-5>*{flex:0 0 auto;width:20%}.row-cols-6>*{flex:0 0 auto;width:16.6666666667%}.col-auto{flex:0 0 auto;width:auto}.col-1{flex:0 0 auto;width:8.33333333%}.col-2{flex:0 0 auto;width:16.66666667%}.col-3{flex:0 0 auto;width:25%}.col-4{flex:0 0 auto;width:33.33333333%}.col-5{flex:0 0 auto;width:41.66666667%}.col-6{flex:0 0 auto;width:50%}.col-7{flex:0 0 auto;width:58.33333333%}.col-8{flex:0 0 auto;width:66.66666667%}.col-9{flex:0 0 auto;width:75%}.col-10{flex:0 0 auto;width:83.33333333%}.col-11{flex:0 0 auto;width:91.66666667%}.col-12{flex:0 0 auto;width:100%}.offset-1{margin-left:8.33333333%}.offset-2{margin-left:16.66666667%}.offset-3{margin-left:25%}.offset-4{margin-left:33.33333333%}.offset-5{margin-left:41.66666667%}.offset-6{margin-left:50%}.offset-7{margin-left:58.33333333%}.offset-8{margin-left:66.66666667%}.offset-9{margin-left:75%}.offset-10{margin-left:83.33333333%}.offset-11{margin-left:91.66666667%}.g-0,.gx-0{--bs-gutter-x:0}.g-0,.gy-0{--bs-gutter-y:0}.g-1,.gx-1{--bs-gutter-x:0.25rem}.g-1,.gy-1{--bs-gutter-y:0.25rem}.g-2,.gx-2{--bs-gutter-x:0.5rem}.g-2,.gy-2{--bs-gutter-y:0.5rem}.g-3,.gx-3{--bs-gutter-x:1rem}.g-3,.gy-3{--bs-gutter-y:1rem}.g-4,.gx-4{--bs-gutter-x:1.5rem}.g-4,.gy-4{--bs-gutter-y:1.5rem}.g-5,.gx-5{--bs-gutter-x:3rem}.g-5,.gy-5{--bs-gutter-y:3rem}@media (min-width:576px){.col-sm{flex:1 0 0%}.row-cols-sm-auto>*{flex:0 0 auto;width:auto}.row-cols-sm-1>*{flex:0 0 auto;width:100%}.row-cols-sm-2>*{flex:0 0 auto;width:50%}.row-cols-sm-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-sm-4>*{flex:0 0 auto;width:25%}.row-cols-sm-5>*{flex:0 0 auto;width:20%}.row-cols-sm-6>*{flex:0 0 auto;width:16.6666666667%}.col-sm-auto{flex:0 0 auto;width:auto}.col-sm-1{flex:0 0 auto;width:8.33333333%}.col-sm-2{flex:0 0 auto;width:16.66666667%}.col-sm-3{flex:0 0 auto;width:25%}.col-sm-4{flex:0 0 auto;width:33.33333333%}.col-sm-5{flex:0 0 auto;width:41.66666667%}.col-sm-6{flex:0 0 auto;width:50%}.col-sm-7{flex:0 0 auto;width:58.33333333%}.col-sm-8{flex:0 0 auto;width:66.66666667%}.col-sm-9{flex:0 0 auto;width:75%}.col-sm-10{flex:0 0 auto;width:83.33333333%}.col-sm-11{flex:0 0 auto;width:91.66666667%}.col-sm-12{flex:0 0 auto;width:100%}.offset-sm-0{margin-left:0}.offset-sm-1{margin-left:8.33333333%}.offset-sm-2{margin-left:16.66666667%}.offset-sm-3{margin-left:25%}.offset-sm-4{margin-left:33.33333333%}.offset-sm-5{margin-left:41.66666667%}.offset-sm-6{margin-left:50%}.offset-sm-7{margin-left:58.33333333%}.offset-sm-8{margin-left:66.66666667%}.offset-sm-9{margin-left:75%}.offset-sm-10{margin-left:83.33333333%}.offset-sm-11{margin-left:91.66666667%}.g-sm-0,.gx-sm-0{--bs-gutter-x:0}.g-sm-0,.gy-sm-0{--bs-gutter-y:0}.g-sm-1,.gx-sm-1{--bs-gutter-x:0.25rem}.g-sm-1,.gy-sm-1{--bs-gutter-y:0.25rem}.g-sm-2,.gx-sm-2{--bs-gutter-x:0.5rem}.g-sm-2,.gy-sm-2{--bs-gutter-y:0.5rem}.g-sm-3,.gx-sm-3{--bs-gutter-x:1rem}.g-sm-3,.gy-sm-3{--bs-gutter-y:1rem}.g-sm-4,.gx-sm-4{--bs-gutter-x:1.5rem}.g-sm-4,.gy-sm-4{--bs-gutter-y:1.5rem}.g-sm-5,.gx-sm-5{--bs-gutter-x:3rem}.g-sm-5,.gy-sm-5{--bs-gutter-y:3rem}}@media (min-width:768px){.col-md{flex:1 0 0%}.row-cols-md-auto>*{flex:0 0 auto;width:auto}.row-cols-md-1>*{flex:0 0 auto;width:100%}.row-cols-md-2>*{flex:0 0 auto;width:50%}.row-cols-md-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-md-4>*{flex:0 0 auto;width:25%}.row-cols-md-5>*{flex:0 0 auto;width:20%}.row-cols-md-6>*{flex:0 0 auto;width:16.6666666667%}.col-md-auto{flex:0 0 auto;width:auto}.col-md-1{flex:0 0 auto;width:8.33333333%}.col-md-2{flex:0 0 auto;width:16.66666667%}.col-md-3{flex:0 0 auto;width:25%}.col-md-4{flex:0 0 auto;width:33.33333333%}.col-md-5{flex:0 0 auto;width:41.66666667%}.col-md-6{flex:0 0 auto;width:50%}.col-md-7{flex:0 0 auto;width:58.33333333%}.col-md-8{flex:0 0 auto;width:66.66666667%}.col-md-9{flex:0 0 auto;width:75%}.col-md-10{flex:0 0 auto;width:83.33333333%}.col-md-11{flex:0 0 auto;width:91.66666667%}.col-md-12{flex:0 0 auto;width:100%}.offset-md-0{margin-left:0}.offset-md-1{margin-left:8.33333333%}.offset-md-2{margin-left:16.66666667%}.offset-md-3{margin-left:25%}.offset-md-4{margin-left:33.33333333%}.offset-md-5{margin-left:41.66666667%}.offset-md-6{margin-left:50%}.offset-md-7{margin-left:58.33333333%}.offset-md-8{margin-left:66.66666667%}.offset-md-9{margin-left:75%}.offset-md-10{margin-left:83.33333333%}.offset-md-11{margin-left:91.66666667%}.g-md-0,.gx-md-0{--bs-gutter-x:0}.g-md-0,.gy-md-0{--bs-gutter-y:0}.g-md-1,.gx-md-1{--bs-gutter-x:0.25rem}.g-md-1,.gy-md-1{--bs-gutter-y:0.25rem}.g-md-2,.gx-md-2{--bs-gutter-x:0.5rem}.g-md-2,.gy-md-2{--bs-gutter-y:0.5rem}.g-md-3,.gx-md-3{--bs-gutter-x:1rem}.g-md-3,.gy-md-3{--bs-gutter-y:1rem}.g-md-4,.gx-md-4{--bs-gutter-x:1.5rem}.g-md-4,.gy-md-4{--bs-gutter-y:1.5rem}.g-md-5,.gx-md-5{--bs-gutter-x:3rem}.g-md-5,.gy-md-5{--bs-gutter-y:3rem}}@media (min-width:992px){.col-lg{flex:1 0 0%}.row-cols-lg-auto>*{flex:0 0 auto;width:auto}.row-cols-lg-1>*{flex:0 0 auto;width:100%}.row-cols-lg-2>*{flex:0 0 auto;width:50%}.row-cols-lg-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-lg-4>*{flex:0 0 auto;width:25%}.row-cols-lg-5>*{flex:0 0 auto;width:20%}.row-cols-lg-6>*{flex:0 0 auto;width:16.6666666667%}.col-lg-auto{flex:0 0 auto;width:auto}.col-lg-1{flex:0 0 auto;width:8.33333333%}.col-lg-2{flex:0 0 auto;width:16.66666667%}.col-lg-3{flex:0 0 auto;width:25%}.col-lg-4{flex:0 0 auto;width:33.33333333%}.col-lg-5{flex:0 0 auto;width:41.66666667%}.col-lg-6{flex:0 0 auto;width:50%}.col-lg-7{flex:0 0 auto;width:58.33333333%}.col-lg-8{flex:0 0 auto;width:66.66666667%}.col-lg-9{flex:0 0 auto;width:75%}.col-lg-10{flex:0 0 auto;width:83.33333333%}.col-lg-11{flex:0 0 auto;width:91.66666667%}.col-lg-12{flex:0 0 auto;width:100%}.offset-lg-0{margin-left:0}.offset-lg-1{margin-left:8.33333333%}.offset-lg-2{margin-left:16.66666667%}.offset-lg-3{margin-left:25%}.offset-lg-4{margin-left:33.33333333%}.offset-lg-5{margin-left:41.66666667%}.offset-lg-6{margin-left:50%}.offset-lg-7{margin-left:58.33333333%}.offset-lg-8{margin-left:66.66666667%}.offset-lg-9{margin-left:75%}.offset-lg-10{margin-left:83.33333333%}.offset-lg-11{margin-left:91.66666667%}.g-lg-0,.gx-lg-0{--bs-gutter-x:0}.g-lg-0,.gy-lg-0{--bs-gutter-y:0}.g-lg-1,.gx-lg-1{--bs-gutter-x:0.25rem}.g-lg-1,.gy-lg-1{--bs-gutter-y:0.25rem}.g-lg-2,.gx-lg-2{--bs-gutter-x:0.5rem}.g-lg-2,.gy-lg-2{--bs-gutter-y:0.5rem}.g-lg-3,.gx-lg-3{--bs-gutter-x:1rem}.g-lg-3,.gy-lg-3{--bs-gutter-y:1rem}.g-lg-4,.gx-lg-4{--bs-gutter-x:1.5rem}.g-lg-4,.gy-lg-4{--bs-gutter-y:1.5rem}.g-lg-5,.gx-lg-5{--bs-gutter-x:3rem}.g-lg-5,.gy-lg-5{--bs-gutter-y:3rem}}@media (min-width:1200px){.col-xl{flex:1 0 0%}.row-cols-xl-auto>*{flex:0 0 auto;width:auto}.row-cols-xl-1>*{flex:0 0 auto;width:100%}.row-cols-xl-2>*{flex:0 0 auto;width:50%}.row-cols-xl-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-xl-4>*{flex:0 0 auto;width:25%}.row-cols-xl-5>*{flex:0 0 auto;width:20%}.row-cols-xl-6>*{flex:0 0 auto;width:16.6666666667%}.col-xl-auto{flex:0 0 auto;width:auto}.col-xl-1{flex:0 0 auto;width:8.33333333%}.col-xl-2{flex:0 0 auto;width:16.66666667%}.col-xl-3{flex:0 0 auto;width:25%}.col-xl-4{flex:0 0 auto;width:33.33333333%}.col-xl-5{flex:0 0 auto;width:41.66666667%}.col-xl-6{flex:0 0 auto;width:50%}.col-xl-7{flex:0 0 auto;width:58.33333333%}.col-xl-8{flex:0 0 auto;width:66.66666667%}.col-xl-9{flex:0 0 auto;width:75%}.col-xl-10{flex:0 0 auto;width:83.33333333%}.col-xl-11{flex:0 0 auto;width:91.66666667%}.col-xl-12{flex:0 0 auto;width:100%}.offset-xl-0{margin-left:0}.offset-xl-1{margin-left:8.33333333%}.offset-xl-2{margin-left:16.66666667%}.offset-xl-3{margin-left:25%}.offset-xl-4{margin-left:33.33333333%}.offset-xl-5{margin-left:41.66666667%}.offset-xl-6{margin-left:50%}.offset-xl-7{margin-left:58.33333333%}.offset-xl-8{margin-left:66.66666667%}.offset-xl-9{margin-left:75%}.offset-xl-10{margin-left:83.33333333%}.offset-xl-11{margin-left:91.66666667%}.g-xl-0,.gx-xl-0{--bs-gutter-x:0}.g-xl-0,.gy-xl-0{--bs-gutter-y:0}.g-xl-1,.gx-xl-1{--bs-gutter-x:0.25rem}.g-xl-1,.gy-xl-1{--bs-gutter-y:0.25rem}.g-xl-2,.gx-xl-2{--bs-gutter-x:0.5rem}.g-xl-2,.gy-xl-2{--bs-gutter-y:0.5rem}.g-xl-3,.gx-xl-3{--bs-gutter-x:1rem}.g-xl-3,.gy-xl-3{--bs-gutter-y:1rem}.g-xl-4,.gx-xl-4{--bs-gutter-x:1.5rem}.g-xl-4,.gy-xl-4{--bs-gutter-y:1.5rem}.g-xl-5,.gx-xl-5{--bs-gutter-x:3rem}.g-xl-5,.gy-xl-5{--bs-gutter-y:3rem}}@media (min-width:1400px){.col-xxl{flex:1 0 0%}.row-cols-xxl-auto>*{flex:0 0 auto;width:auto}.row-cols-xxl-1>*{flex:0 0 auto;width:100%}.row-cols-xxl-2>*{flex:0 0 auto;width:50%}.row-cols-xxl-3>*{flex:0 0 auto;width:33.3333333333%}.row-cols-xxl-4>*{flex:0 0 auto;width:25%}.row-cols-xxl-5>*{flex:0 0 auto;width:20%}.row-cols-xxl-6>*{flex:0 0 auto;width:16.6666666667%}.col-xxl-auto{flex:0 0 auto;width:auto}.col-xxl-1{flex:0 0 auto;width:8.33333333%}.col-xxl-2{flex:0 0 auto;width:16.66666667%}.col-xxl-3{flex:0 0 auto;width:25%}.col-xxl-4{flex:0 0 auto;width:33.33333333%}.col-xxl-5{flex:0 0 auto;width:41.66666667%}.col-xxl-6{flex:0 0 auto;width:50%}.col-xxl-7{flex:0 0 auto;width:58.33333333%}.col-xxl-8{flex:0 0 auto;width:66.66666667%}.col-xxl-9{flex:0 0 auto;width:75%}.col-xxl-10{flex:0 0 auto;width:83.33333333%}.col-xxl-11{flex:0 0 auto;width:91.66666667%}.col-xxl-12{flex:0 0 auto;width:100%}.offset-xxl-0{margin-left:0}.offset-xxl-1{margin-left:8.33333333%}.offset-xxl-2{margin-left:16.66666667%}.offset-xxl-3{margin-left:25%}.offset-xxl-4{margin-left:33.33333333%}.offset-xxl-5{margin-left:41.66666667%}.offset-xxl-6{margin-left:50%}.offset-xxl-7{margin-left:58.33333333%}.offset-xxl-8{margin-left:66.66666667%}.offset-xxl-9{margin-left:75%}.offset-xxl-10{margin-left:83.33333333%}.offset-xxl-11{margin-left:91.66666667%}.g-xxl-0,.gx-xxl-0{--bs-gutter-x:0}.g-xxl-0,.gy-xxl-0{--bs-gutter-y:0}.g-xxl-1,.gx-xxl-1{--bs-gutter-x:0.25rem}.g-xxl-1,.gy-xxl-1{--bs-gutter-y:0.25rem}.g-xxl-2,.gx-xxl-2{--bs-gutter-x:0.5rem}.g-xxl-2,.gy-xxl-2{--bs-gutter-y:0.5rem}.g-xxl-3,.gx-xxl-3{--bs-gutter-x:1rem}.g-xxl-3,.gy-xxl-3{--bs-gutter-y:1rem}.g-xxl-4,.gx-xxl-4{--bs-gutter-x:1.5rem}.g-xxl-4,.gy-xxl-4{--bs-gutter-y:1.5rem}.g-xxl-5,.gx-xxl-5{--bs-gutter-x:3rem}.g-xxl-5,.gy-xxl-5{--bs-gutter-y:3rem}}.table{--bs-table-bg:transparent;--bs-table-accent-bg:transparent;--bs-table-striped-color:#212529;--bs-table-striped-bg:rgba(0, 0, 0, 0.05);--bs-table-active-color:#212529;--bs-table-active-bg:rgba(0, 0, 0, 0.1);--bs-table-hover-color:#212529;--bs-table-hover-bg:rgba(0, 0, 0, 0.075);width:100%;margin-bottom:1rem;color:#212529;vertical-align:top;border-color:#dee2e6}.table>:not(caption)>*>*{padding:.5rem .5rem;background-color:var(--bs-table-bg);border-bottom-width:1px;box-shadow:inset 0 0 0 9999px var(--bs-table-accent-bg)}.table>tbody{vertical-align:inherit}.table>thead{vertical-align:bottom}.table>:not(:first-child){border-top:2px solid currentColor}.caption-top{caption-side:top}.table-sm>:not(caption)>*>*{padding:.25rem .25rem}.table-bordered>:not(caption)>*{border-width:1px 0}.table-bordered>:not(caption)>*>*{border-width:0 1px}.table-borderless>:not(caption)>*>*{border-bottom-width:0}.table-borderless>:not(:first-child){border-top-width:0}.table-striped>tbody>tr:nth-of-type(odd)>*{--bs-table-accent-bg:var(--bs-table-striped-bg);color:var(--bs-table-striped-color)}.table-active{--bs-table-accent-bg:var(--bs-table-active-bg);color:var(--bs-table-active-color)}.table-hover>tbody>tr:hover>*{--bs-table-accent-bg:var(--bs-table-hover-bg);color:var(--bs-table-hover-color)}.table-primary{--bs-table-bg:#cfe2ff;--bs-table-striped-bg:#c5d7f2;--bs-table-striped-color:#000;--bs-table-active-bg:#bacbe6;--bs-table-active-color:#000;--bs-table-hover-bg:#bfd1ec;--bs-table-hover-color:#000;color:#000;border-color:#bacbe6}.table-secondary{--bs-table-bg:#e2e3e5;--bs-table-striped-bg:#d7d8da;--bs-table-striped-color:#000;--bs-table-active-bg:#cbccce;--bs-table-active-color:#000;--bs-table-hover-bg:#d1d2d4;--bs-table-hover-color:#000;color:#000;border-color:#cbccce}.table-success{--bs-table-bg:#d1e7dd;--bs-table-striped-bg:#c7dbd2;--bs-table-striped-color:#000;--bs-table-active-bg:#bcd0c7;--bs-table-active-color:#000;--bs-table-hover-bg:#c1d6cc;--bs-table-hover-color:#000;color:#000;border-color:#bcd0c7}.table-info{--bs-table-bg:#cff4fc;--bs-table-striped-bg:#c5e8ef;--bs-table-striped-color:#000;--bs-table-active-bg:#badce3;--bs-table-active-color:#000;--bs-table-hover-bg:#bfe2e9;--bs-table-hover-color:#000;color:#000;border-color:#badce3}.table-warning{--bs-table-bg:#fff3cd;--bs-table-striped-bg:#f2e7c3;--bs-table-striped-color:#000;--bs-table-active-bg:#e6dbb9;--bs-table-active-color:#000;--bs-table-hover-bg:#ece1be;--bs-table-hover-color:#000;color:#000;border-color:#e6dbb9}.table-danger{--bs-table-bg:#f8d7da;--bs-table-striped-bg:#eccccf;--bs-table-striped-color:#000;--bs-table-active-bg:#dfc2c4;--bs-table-active-color:#000;--bs-table-hover-bg:#e5c7ca;--bs-table-hover-color:#000;color:#000;border-color:#dfc2c4}.table-light{--bs-table-bg:#f8f9fa;--bs-table-striped-bg:#ecedee;--bs-table-striped-color:#000;--bs-table-active-bg:#dfe0e1;--bs-table-active-color:#000;--bs-table-hover-bg:#e5e6e7;--bs-table-hover-color:#000;color:#000;border-color:#dfe0e1}.table-dark{--bs-table-bg:#212529;--bs-table-striped-bg:#2c3034;--bs-table-striped-color:#fff;--bs-table-active-bg:#373b3e;--bs-table-active-color:#fff;--bs-table-hover-bg:#323539;--bs-table-hover-color:#fff;color:#fff;border-color:#373b3e}.table-responsive{overflow-x:auto;-webkit-overflow-scrolling:touch}@media (max-width:575.98px){.table-responsive-sm{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media (max-width:767.98px){.table-responsive-md{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media (max-width:991.98px){.table-responsive-lg{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media (max-width:1199.98px){.table-responsive-xl{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media (max-width:1399.98px){.table-responsive-xxl{overflow-x:auto;-webkit-overflow-scrolling:touch}}.form-label{margin-bottom:.5rem}.col-form-label{padding-top:calc(.375rem + 1px);padding-bottom:calc(.375rem + 1px);margin-bottom:0;font-size:inherit;line-height:1.5}.col-form-label-lg{padding-top:calc(.5rem + 1px);padding-bottom:calc(.5rem + 1px);font-size:1.25rem}.col-form-label-sm{padding-top:calc(.25rem + 1px);padding-bottom:calc(.25rem + 1px);font-size:.875rem}.form-text{margin-top:.25rem;font-size:.875em;color:#6c757d}.form-control{display:block;width:100%;padding:.375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#212529;background-color:#fff;background-clip:padding-box;border:1px solid #ced4da;-webkit-appearance:none;-moz-appearance:none;appearance:none;border-radius:.25rem;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.form-control{transition:none}}.form-control[type=file]{overflow:hidden}.form-control[type=file]:not(:disabled):not([readonly]){cursor:pointer}.form-control:focus{color:#212529;background-color:#fff;border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.form-control::-webkit-date-and-time-value{height:1.5em}.form-control::-moz-placeholder{color:#6c757d;opacity:1}.form-control::placeholder{color:#6c757d;opacity:1}.form-control:disabled,.form-control[readonly]{background-color:#e9ecef;opacity:1}.form-control::-webkit-file-upload-button{padding:.375rem .75rem;margin:-.375rem -.75rem;-webkit-margin-end:.75rem;margin-inline-end:.75rem;color:#212529;background-color:#e9ecef;pointer-events:none;border-color:inherit;border-style:solid;border-width:0;border-inline-end-width:1px;border-radius:0;-webkit-transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}.form-control::file-selector-button{padding:.375rem .75rem;margin:-.375rem -.75rem;-webkit-margin-end:.75rem;margin-inline-end:.75rem;color:#212529;background-color:#e9ecef;pointer-events:none;border-color:inherit;border-style:solid;border-width:0;border-inline-end-width:1px;border-radius:0;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.form-control::-webkit-file-upload-button{-webkit-transition:none;transition:none}.form-control::file-selector-button{transition:none}}.form-control:hover:not(:disabled):not([readonly])::-webkit-file-upload-button{background-color:#dde0e3}.form-control:hover:not(:disabled):not([readonly])::file-selector-button{background-color:#dde0e3}.form-control::-webkit-file-upload-button{padding:.375rem .75rem;margin:-.375rem -.75rem;-webkit-margin-end:.75rem;margin-inline-end:.75rem;color:#212529;background-color:#e9ecef;pointer-events:none;border-color:inherit;border-style:solid;border-width:0;border-inline-end-width:1px;border-radius:0;-webkit-transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.form-control::-webkit-file-upload-button{-webkit-transition:none;transition:none}}.form-control:hover:not(:disabled):not([readonly])::-webkit-file-upload-button{background-color:#dde0e3}.form-control-plaintext{display:block;width:100%;padding:.375rem 0;margin-bottom:0;line-height:1.5;color:#212529;background-color:transparent;border:solid transparent;border-width:1px 0}.form-control-plaintext.form-control-lg,.form-control-plaintext.form-control-sm{padding-right:0;padding-left:0}.form-control-sm{min-height:calc(1.5em + .5rem + 2px);padding:.25rem .5rem;font-size:.875rem;border-radius:.2rem}.form-control-sm::-webkit-file-upload-button{padding:.25rem .5rem;margin:-.25rem -.5rem;-webkit-margin-end:.5rem;margin-inline-end:.5rem}.form-control-sm::file-selector-button{padding:.25rem .5rem;margin:-.25rem -.5rem;-webkit-margin-end:.5rem;margin-inline-end:.5rem}.form-control-sm::-webkit-file-upload-button{padding:.25rem .5rem;margin:-.25rem -.5rem;-webkit-margin-end:.5rem;margin-inline-end:.5rem}.form-control-lg{min-height:calc(1.5em + 1rem + 2px);padding:.5rem 1rem;font-size:1.25rem;border-radius:.3rem}.form-control-lg::-webkit-file-upload-button{padding:.5rem 1rem;margin:-.5rem -1rem;-webkit-margin-end:1rem;margin-inline-end:1rem}.form-control-lg::file-selector-button{padding:.5rem 1rem;margin:-.5rem -1rem;-webkit-margin-end:1rem;margin-inline-end:1rem}.form-control-lg::-webkit-file-upload-button{padding:.5rem 1rem;margin:-.5rem -1rem;-webkit-margin-end:1rem;margin-inline-end:1rem}textarea.form-control{min-height:calc(1.5em + .75rem + 2px)}textarea.form-control-sm{min-height:calc(1.5em + .5rem + 2px)}textarea.form-control-lg{min-height:calc(1.5em + 1rem + 2px)}.form-control-color{width:3rem;height:auto;padding:.375rem}.form-control-color:not(:disabled):not([readonly]){cursor:pointer}.form-control-color::-moz-color-swatch{height:1.5em;border-radius:.25rem}.form-control-color::-webkit-color-swatch{height:1.5em;border-radius:.25rem}.form-select{display:block;width:100%;padding:.375rem 2.25rem .375rem .75rem;-moz-padding-start:calc(0.75rem - 3px);font-size:1rem;font-weight:400;line-height:1.5;color:#212529;background-color:#fff;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23343a40' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right .75rem center;background-size:16px 12px;border:1px solid #ced4da;border-radius:.25rem;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out;-webkit-appearance:none;-moz-appearance:none;appearance:none}@media (prefers-reduced-motion:reduce){.form-select{transition:none}}.form-select:focus{border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.form-select[multiple],.form-select[size]:not([size="1"]){padding-right:.75rem;background-image:none}.form-select:disabled{background-color:#e9ecef}.form-select:-moz-focusring{color:transparent;text-shadow:0 0 0 #212529}.form-select-sm{padding-top:.25rem;padding-bottom:.25rem;padding-left:.5rem;font-size:.875rem;border-radius:.2rem}.form-select-lg{padding-top:.5rem;padding-bottom:.5rem;padding-left:1rem;font-size:1.25rem;border-radius:.3rem}.form-check{display:block;min-height:1.5rem;padding-left:1.5em;margin-bottom:.125rem}.form-check .form-check-input{float:left;margin-left:-1.5em}.form-check-input{width:1em;height:1em;margin-top:.25em;vertical-align:top;background-color:#fff;background-repeat:no-repeat;background-position:center;background-size:contain;border:1px solid rgba(0,0,0,.25);-webkit-appearance:none;-moz-appearance:none;appearance:none;-webkit-print-color-adjust:exact;color-adjust:exact}.form-check-input[type=checkbox]{border-radius:.25em}.form-check-input[type=radio]{border-radius:50%}.form-check-input:active{filter:brightness(90%)}.form-check-input:focus{border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.form-check-input:checked{background-color:#0d6efd;border-color:#0d6efd}.form-check-input:checked[type=checkbox]{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 20 20'%3e%3cpath fill='none' stroke='%23fff' stroke-linecap='round' stroke-linejoin='round' stroke-width='3' d='M6 10l3 3l6-6'/%3e%3c/svg%3e")}.form-check-input:checked[type=radio]{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='2' fill='%23fff'/%3e%3c/svg%3e")}.form-check-input[type=checkbox]:indeterminate{background-color:#0d6efd;border-color:#0d6efd;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 20 20'%3e%3cpath fill='none' stroke='%23fff' stroke-linecap='round' stroke-linejoin='round' stroke-width='3' d='M6 10h8'/%3e%3c/svg%3e")}.form-check-input:disabled{pointer-events:none;filter:none;opacity:.5}.form-check-input:disabled~.form-check-label,.form-check-input[disabled]~.form-check-label{opacity:.5}.form-switch{padding-left:2.5em}.form-switch .form-check-input{width:2em;margin-left:-2.5em;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='rgba%280, 0, 0, 0.25%29'/%3e%3c/svg%3e");background-position:left center;border-radius:2em;transition:background-position .15s ease-in-out}@media (prefers-reduced-motion:reduce){.form-switch .form-check-input{transition:none}}.form-switch .form-check-input:focus{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%2386b7fe'/%3e%3c/svg%3e")}.form-switch .form-check-input:checked{background-position:right center;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%23fff'/%3e%3c/svg%3e")}.form-check-inline{display:inline-block;margin-right:1rem}.btn-check{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.btn-check:disabled+.btn,.btn-check[disabled]+.btn{pointer-events:none;filter:none;opacity:.65}.form-range{width:100%;height:1.5rem;padding:0;background-color:transparent;-webkit-appearance:none;-moz-appearance:none;appearance:none}.form-range:focus{outline:0}.form-range:focus::-webkit-slider-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .25rem rgba(13,110,253,.25)}.form-range:focus::-moz-range-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .25rem rgba(13,110,253,.25)}.form-range::-moz-focus-outer{border:0}.form-range::-webkit-slider-thumb{width:1rem;height:1rem;margin-top:-.25rem;background-color:#0d6efd;border:0;border-radius:1rem;-webkit-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;-webkit-appearance:none;appearance:none}@media (prefers-reduced-motion:reduce){.form-range::-webkit-slider-thumb{-webkit-transition:none;transition:none}}.form-range::-webkit-slider-thumb:active{background-color:#b6d4fe}.form-range::-webkit-slider-runnable-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem}.form-range::-moz-range-thumb{width:1rem;height:1rem;background-color:#0d6efd;border:0;border-radius:1rem;-moz-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;-moz-appearance:none;appearance:none}@media (prefers-reduced-motion:reduce){.form-range::-moz-range-thumb{-moz-transition:none;transition:none}}.form-range::-moz-range-thumb:active{background-color:#b6d4fe}.form-range::-moz-range-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem}.form-range:disabled{pointer-events:none}.form-range:disabled::-webkit-slider-thumb{background-color:#adb5bd}.form-range:disabled::-moz-range-thumb{background-color:#adb5bd}.form-floating{position:relative}.form-floating>.form-control,.form-floating>.form-select{height:calc(3.5rem + 2px);line-height:1.25}.form-floating>label{position:absolute;top:0;left:0;height:100%;padding:1rem .75rem;pointer-events:none;border:1px solid transparent;transform-origin:0 0;transition:opacity .1s ease-in-out,transform .1s ease-in-out}@media (prefers-reduced-motion:reduce){.form-floating>label{transition:none}}.form-floating>.form-control{padding:1rem .75rem}.form-floating>.form-control::-moz-placeholder{color:transparent}.form-floating>.form-control::placeholder{color:transparent}.form-floating>.form-control:not(:-moz-placeholder-shown){padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:focus,.form-floating>.form-control:not(:placeholder-shown){padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:-webkit-autofill{padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-select{padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:not(:-moz-placeholder-shown)~label{opacity:.65;transform:scale(.85) translateY(-.5rem) translateX(.15rem)}.form-floating>.form-control:focus~label,.form-floating>.form-control:not(:placeholder-shown)~label,.form-floating>.form-select~label{opacity:.65;transform:scale(.85) translateY(-.5rem) translateX(.15rem)}.form-floating>.form-control:-webkit-autofill~label{opacity:.65;transform:scale(.85) translateY(-.5rem) translateX(.15rem)}.input-group{position:relative;display:flex;flex-wrap:wrap;align-items:stretch;width:100%}.input-group>.form-control,.input-group>.form-select{position:relative;flex:1 1 auto;width:1%;min-width:0}.input-group>.form-control:focus,.input-group>.form-select:focus{z-index:3}.input-group .btn{position:relative;z-index:2}.input-group .btn:focus{z-index:3}.input-group-text{display:flex;align-items:center;padding:.375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#212529;text-align:center;white-space:nowrap;background-color:#e9ecef;border:1px solid #ced4da;border-radius:.25rem}.input-group-lg>.btn,.input-group-lg>.form-control,.input-group-lg>.form-select,.input-group-lg>.input-group-text{padding:.5rem 1rem;font-size:1.25rem;border-radius:.3rem}.input-group-sm>.btn,.input-group-sm>.form-control,.input-group-sm>.form-select,.input-group-sm>.input-group-text{padding:.25rem .5rem;font-size:.875rem;border-radius:.2rem}.input-group-lg>.form-select,.input-group-sm>.form-select{padding-right:3rem}.input-group:not(.has-validation)>.dropdown-toggle:nth-last-child(n+3),.input-group:not(.has-validation)>:not(:last-child):not(.dropdown-toggle):not(.dropdown-menu){border-top-right-radius:0;border-bottom-right-radius:0}.input-group.has-validation>.dropdown-toggle:nth-last-child(n+4),.input-group.has-validation>:nth-last-child(n+3):not(.dropdown-toggle):not(.dropdown-menu){border-top-right-radius:0;border-bottom-right-radius:0}.input-group>:not(:first-child):not(.dropdown-menu):not(.valid-tooltip):not(.valid-feedback):not(.invalid-tooltip):not(.invalid-feedback){margin-left:-1px;border-top-left-radius:0;border-bottom-left-radius:0}.valid-feedback{display:none;width:100%;margin-top:.25rem;font-size:.875em;color:#198754}.valid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:.875rem;color:#fff;background-color:rgba(25,135,84,.9);border-radius:.25rem}.is-valid~.valid-feedback,.is-valid~.valid-tooltip,.was-validated :valid~.valid-feedback,.was-validated :valid~.valid-tooltip{display:block}.form-control.is-valid,.was-validated .form-control:valid{border-color:#198754;padding-right:calc(1.5em + .75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%23198754' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(.375em + .1875rem) center;background-size:calc(.75em + .375rem) calc(.75em + .375rem)}.form-control.is-valid:focus,.was-validated .form-control:valid:focus{border-color:#198754;box-shadow:0 0 0 .25rem rgba(25,135,84,.25)}.was-validated textarea.form-control:valid,textarea.form-control.is-valid{padding-right:calc(1.5em + .75rem);background-position:top calc(.375em + .1875rem) right calc(.375em + .1875rem)}.form-select.is-valid,.was-validated .form-select:valid{border-color:#198754}.form-select.is-valid:not([multiple]):not([size]),.form-select.is-valid:not([multiple])[size="1"],.was-validated .form-select:valid:not([multiple]):not([size]),.was-validated .form-select:valid:not([multiple])[size="1"]{padding-right:4.125rem;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23343a40' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e"),url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%23198754' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e");background-position:right .75rem center,center right 2.25rem;background-size:16px 12px,calc(.75em + .375rem) calc(.75em + .375rem)}.form-select.is-valid:focus,.was-validated .form-select:valid:focus{border-color:#198754;box-shadow:0 0 0 .25rem rgba(25,135,84,.25)}.form-check-input.is-valid,.was-validated .form-check-input:valid{border-color:#198754}.form-check-input.is-valid:checked,.was-validated .form-check-input:valid:checked{background-color:#198754}.form-check-input.is-valid:focus,.was-validated .form-check-input:valid:focus{box-shadow:0 0 0 .25rem rgba(25,135,84,.25)}.form-check-input.is-valid~.form-check-label,.was-validated .form-check-input:valid~.form-check-label{color:#198754}.form-check-inline .form-check-input~.valid-feedback{margin-left:.5em}.input-group .form-control.is-valid,.input-group .form-select.is-valid,.was-validated .input-group .form-control:valid,.was-validated .input-group .form-select:valid{z-index:1}.input-group .form-control.is-valid:focus,.input-group .form-select.is-valid:focus,.was-validated .input-group .form-control:valid:focus,.was-validated .input-group .form-select:valid:focus{z-index:3}.invalid-feedback{display:none;width:100%;margin-top:.25rem;font-size:.875em;color:#dc3545}.invalid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:.875rem;color:#fff;background-color:rgba(220,53,69,.9);border-radius:.25rem}.is-invalid~.invalid-feedback,.is-invalid~.invalid-tooltip,.was-validated :invalid~.invalid-feedback,.was-validated :invalid~.invalid-tooltip{display:block}.form-control.is-invalid,.was-validated .form-control:invalid{border-color:#dc3545;padding-right:calc(1.5em + .75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 12' width='12' height='12' fill='none' stroke='%23dc3545'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23dc3545' stroke='none'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(.375em + .1875rem) center;background-size:calc(.75em + .375rem) calc(.75em + .375rem)}.form-control.is-invalid:focus,.was-validated .form-control:invalid:focus{border-color:#dc3545;box-shadow:0 0 0 .25rem rgba(220,53,69,.25)}.was-validated textarea.form-control:invalid,textarea.form-control.is-invalid{padding-right:calc(1.5em + .75rem);background-position:top calc(.375em + .1875rem) right calc(.375em + .1875rem)}.form-select.is-invalid,.was-validated .form-select:invalid{border-color:#dc3545}.form-select.is-invalid:not([multiple]):not([size]),.form-select.is-invalid:not([multiple])[size="1"],.was-validated .form-select:invalid:not([multiple]):not([size]),.was-validated .form-select:invalid:not([multiple])[size="1"]{padding-right:4.125rem;background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23343a40' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M2 5l6 6 6-6'/%3e%3c/svg%3e"),url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 12' width='12' height='12' fill='none' stroke='%23dc3545'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23dc3545' stroke='none'/%3e%3c/svg%3e");background-position:right .75rem center,center right 2.25rem;background-size:16px 12px,calc(.75em + .375rem) calc(.75em + .375rem)}.form-select.is-invalid:focus,.was-validated .form-select:invalid:focus{border-color:#dc3545;box-shadow:0 0 0 .25rem rgba(220,53,69,.25)}.form-check-input.is-invalid,.was-validated .form-check-input:invalid{border-color:#dc3545}.form-check-input.is-invalid:checked,.was-validated .form-check-input:invalid:checked{background-color:#dc3545}.form-check-input.is-invalid:focus,.was-validated .form-check-input:invalid:focus{box-shadow:0 0 0 .25rem rgba(220,53,69,.25)}.form-check-input.is-invalid~.form-check-label,.was-validated .form-check-input:invalid~.form-check-label{color:#dc3545}.form-check-inline .form-check-input~.invalid-feedback{margin-left:.5em}.input-group .form-control.is-invalid,.input-group .form-select.is-invalid,.was-validated .input-group .form-control:invalid,.was-validated .input-group .form-select:invalid{z-index:2}.input-group .form-control.is-invalid:focus,.input-group .form-select.is-invalid:focus,.was-validated .input-group .form-control:invalid:focus,.was-validated .input-group .form-select:invalid:focus{z-index:3}.btn{display:inline-block;font-weight:400;line-height:1.5;color:#212529;text-align:center;text-decoration:none;vertical-align:middle;cursor:pointer;-webkit-user-select:none;-moz-user-select:none;user-select:none;background-color:transparent;border:1px solid transparent;padding:.375rem .75rem;font-size:1rem;border-radius:.25rem;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.btn{transition:none}}.btn:hover{color:#212529}.btn-check:focus+.btn,.btn:focus{outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.btn.disabled,.btn:disabled,fieldset:disabled .btn{pointer-events:none;opacity:.65}.btn-primary{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-primary:hover{color:#fff;background-color:#0b5ed7;border-color:#0a58ca}.btn-check:focus+.btn-primary,.btn-primary:focus{color:#fff;background-color:#0b5ed7;border-color:#0a58ca;box-shadow:0 0 0 .25rem rgba(49,132,253,.5)}.btn-check:active+.btn-primary,.btn-check:checked+.btn-primary,.btn-primary.active,.btn-primary:active,.show>.btn-primary.dropdown-toggle{color:#fff;background-color:#0a58ca;border-color:#0a53be}.btn-check:active+.btn-primary:focus,.btn-check:checked+.btn-primary:focus,.btn-primary.active:focus,.btn-primary:active:focus,.show>.btn-primary.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(49,132,253,.5)}.btn-primary.disabled,.btn-primary:disabled{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-secondary{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-secondary:hover{color:#fff;background-color:#5c636a;border-color:#565e64}.btn-check:focus+.btn-secondary,.btn-secondary:focus{color:#fff;background-color:#5c636a;border-color:#565e64;box-shadow:0 0 0 .25rem rgba(130,138,145,.5)}.btn-check:active+.btn-secondary,.btn-check:checked+.btn-secondary,.btn-secondary.active,.btn-secondary:active,.show>.btn-secondary.dropdown-toggle{color:#fff;background-color:#565e64;border-color:#51585e}.btn-check:active+.btn-secondary:focus,.btn-check:checked+.btn-secondary:focus,.btn-secondary.active:focus,.btn-secondary:active:focus,.show>.btn-secondary.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(130,138,145,.5)}.btn-secondary.disabled,.btn-secondary:disabled{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-success{color:#fff;background-color:#198754;border-color:#198754}.btn-success:hover{color:#fff;background-color:#157347;border-color:#146c43}.btn-check:focus+.btn-success,.btn-success:focus{color:#fff;background-color:#157347;border-color:#146c43;box-shadow:0 0 0 .25rem rgba(60,153,110,.5)}.btn-check:active+.btn-success,.btn-check:checked+.btn-success,.btn-success.active,.btn-success:active,.show>.btn-success.dropdown-toggle{color:#fff;background-color:#146c43;border-color:#13653f}.btn-check:active+.btn-success:focus,.btn-check:checked+.btn-success:focus,.btn-success.active:focus,.btn-success:active:focus,.show>.btn-success.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(60,153,110,.5)}.btn-success.disabled,.btn-success:disabled{color:#fff;background-color:#198754;border-color:#198754}.btn-info{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-info:hover{color:#000;background-color:#31d2f2;border-color:#25cff2}.btn-check:focus+.btn-info,.btn-info:focus{color:#000;background-color:#31d2f2;border-color:#25cff2;box-shadow:0 0 0 .25rem rgba(11,172,204,.5)}.btn-check:active+.btn-info,.btn-check:checked+.btn-info,.btn-info.active,.btn-info:active,.show>.btn-info.dropdown-toggle{color:#000;background-color:#3dd5f3;border-color:#25cff2}.btn-check:active+.btn-info:focus,.btn-check:checked+.btn-info:focus,.btn-info.active:focus,.btn-info:active:focus,.show>.btn-info.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(11,172,204,.5)}.btn-info.disabled,.btn-info:disabled{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-warning{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-warning:hover{color:#000;background-color:#ffca2c;border-color:#ffc720}.btn-check:focus+.btn-warning,.btn-warning:focus{color:#000;background-color:#ffca2c;border-color:#ffc720;box-shadow:0 0 0 .25rem rgba(217,164,6,.5)}.btn-check:active+.btn-warning,.btn-check:checked+.btn-warning,.btn-warning.active,.btn-warning:active,.show>.btn-warning.dropdown-toggle{color:#000;background-color:#ffcd39;border-color:#ffc720}.btn-check:active+.btn-warning:focus,.btn-check:checked+.btn-warning:focus,.btn-warning.active:focus,.btn-warning:active:focus,.show>.btn-warning.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(217,164,6,.5)}.btn-warning.disabled,.btn-warning:disabled{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-danger{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-danger:hover{color:#fff;background-color:#bb2d3b;border-color:#b02a37}.btn-check:focus+.btn-danger,.btn-danger:focus{color:#fff;background-color:#bb2d3b;border-color:#b02a37;box-shadow:0 0 0 .25rem rgba(225,83,97,.5)}.btn-check:active+.btn-danger,.btn-check:checked+.btn-danger,.btn-danger.active,.btn-danger:active,.show>.btn-danger.dropdown-toggle{color:#fff;background-color:#b02a37;border-color:#a52834}.btn-check:active+.btn-danger:focus,.btn-check:checked+.btn-danger:focus,.btn-danger.active:focus,.btn-danger:active:focus,.show>.btn-danger.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(225,83,97,.5)}.btn-danger.disabled,.btn-danger:disabled{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-light{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-light:hover{color:#000;background-color:#f9fafb;border-color:#f9fafb}.btn-check:focus+.btn-light,.btn-light:focus{color:#000;background-color:#f9fafb;border-color:#f9fafb;box-shadow:0 0 0 .25rem rgba(211,212,213,.5)}.btn-check:active+.btn-light,.btn-check:checked+.btn-light,.btn-light.active,.btn-light:active,.show>.btn-light.dropdown-toggle{color:#000;background-color:#f9fafb;border-color:#f9fafb}.btn-check:active+.btn-light:focus,.btn-check:checked+.btn-light:focus,.btn-light.active:focus,.btn-light:active:focus,.show>.btn-light.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(211,212,213,.5)}.btn-light.disabled,.btn-light:disabled{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-dark{color:#fff;background-color:#212529;border-color:#212529}.btn-dark:hover{color:#fff;background-color:#1c1f23;border-color:#1a1e21}.btn-check:focus+.btn-dark,.btn-dark:focus{color:#fff;background-color:#1c1f23;border-color:#1a1e21;box-shadow:0 0 0 .25rem rgba(66,70,73,.5)}.btn-check:active+.btn-dark,.btn-check:checked+.btn-dark,.btn-dark.active,.btn-dark:active,.show>.btn-dark.dropdown-toggle{color:#fff;background-color:#1a1e21;border-color:#191c1f}.btn-check:active+.btn-dark:focus,.btn-check:checked+.btn-dark:focus,.btn-dark.active:focus,.btn-dark:active:focus,.show>.btn-dark.dropdown-toggle:focus{box-shadow:0 0 0 .25rem rgba(66,70,73,.5)}.btn-dark.disabled,.btn-dark:disabled{color:#fff;background-color:#212529;border-color:#212529}.btn-outline-primary{color:#0d6efd;border-color:#0d6efd}.btn-outline-primary:hover{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-check:focus+.btn-outline-primary,.btn-outline-primary:focus{box-shadow:0 0 0 .25rem rgba(13,110,253,.5)}.btn-check:active+.btn-outline-primary,.btn-check:checked+.btn-outline-primary,.btn-outline-primary.active,.btn-outline-primary.dropdown-toggle.show,.btn-outline-primary:active{color:#fff;background-color:#0d6efd;border-color:#0d6efd}.btn-check:active+.btn-outline-primary:focus,.btn-check:checked+.btn-outline-primary:focus,.btn-outline-primary.active:focus,.btn-outline-primary.dropdown-toggle.show:focus,.btn-outline-primary:active:focus{box-shadow:0 0 0 .25rem rgba(13,110,253,.5)}.btn-outline-primary.disabled,.btn-outline-primary:disabled{color:#0d6efd;background-color:transparent}.btn-outline-secondary{color:#6c757d;border-color:#6c757d}.btn-outline-secondary:hover{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-check:focus+.btn-outline-secondary,.btn-outline-secondary:focus{box-shadow:0 0 0 .25rem rgba(108,117,125,.5)}.btn-check:active+.btn-outline-secondary,.btn-check:checked+.btn-outline-secondary,.btn-outline-secondary.active,.btn-outline-secondary.dropdown-toggle.show,.btn-outline-secondary:active{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-check:active+.btn-outline-secondary:focus,.btn-check:checked+.btn-outline-secondary:focus,.btn-outline-secondary.active:focus,.btn-outline-secondary.dropdown-toggle.show:focus,.btn-outline-secondary:active:focus{box-shadow:0 0 0 .25rem rgba(108,117,125,.5)}.btn-outline-secondary.disabled,.btn-outline-secondary:disabled{color:#6c757d;background-color:transparent}.btn-outline-success{color:#198754;border-color:#198754}.btn-outline-success:hover{color:#fff;background-color:#198754;border-color:#198754}.btn-check:focus+.btn-outline-success,.btn-outline-success:focus{box-shadow:0 0 0 .25rem rgba(25,135,84,.5)}.btn-check:active+.btn-outline-success,.btn-check:checked+.btn-outline-success,.btn-outline-success.active,.btn-outline-success.dropdown-toggle.show,.btn-outline-success:active{color:#fff;background-color:#198754;border-color:#198754}.btn-check:active+.btn-outline-success:focus,.btn-check:checked+.btn-outline-success:focus,.btn-outline-success.active:focus,.btn-outline-success.dropdown-toggle.show:focus,.btn-outline-success:active:focus{box-shadow:0 0 0 .25rem rgba(25,135,84,.5)}.btn-outline-success.disabled,.btn-outline-success:disabled{color:#198754;background-color:transparent}.btn-outline-info{color:#0dcaf0;border-color:#0dcaf0}.btn-outline-info:hover{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-check:focus+.btn-outline-info,.btn-outline-info:focus{box-shadow:0 0 0 .25rem rgba(13,202,240,.5)}.btn-check:active+.btn-outline-info,.btn-check:checked+.btn-outline-info,.btn-outline-info.active,.btn-outline-info.dropdown-toggle.show,.btn-outline-info:active{color:#000;background-color:#0dcaf0;border-color:#0dcaf0}.btn-check:active+.btn-outline-info:focus,.btn-check:checked+.btn-outline-info:focus,.btn-outline-info.active:focus,.btn-outline-info.dropdown-toggle.show:focus,.btn-outline-info:active:focus{box-shadow:0 0 0 .25rem rgba(13,202,240,.5)}.btn-outline-info.disabled,.btn-outline-info:disabled{color:#0dcaf0;background-color:transparent}.btn-outline-warning{color:#ffc107;border-color:#ffc107}.btn-outline-warning:hover{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-check:focus+.btn-outline-warning,.btn-outline-warning:focus{box-shadow:0 0 0 .25rem rgba(255,193,7,.5)}.btn-check:active+.btn-outline-warning,.btn-check:checked+.btn-outline-warning,.btn-outline-warning.active,.btn-outline-warning.dropdown-toggle.show,.btn-outline-warning:active{color:#000;background-color:#ffc107;border-color:#ffc107}.btn-check:active+.btn-outline-warning:focus,.btn-check:checked+.btn-outline-warning:focus,.btn-outline-warning.active:focus,.btn-outline-warning.dropdown-toggle.show:focus,.btn-outline-warning:active:focus{box-shadow:0 0 0 .25rem rgba(255,193,7,.5)}.btn-outline-warning.disabled,.btn-outline-warning:disabled{color:#ffc107;background-color:transparent}.btn-outline-danger{color:#dc3545;border-color:#dc3545}.btn-outline-danger:hover{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-check:focus+.btn-outline-danger,.btn-outline-danger:focus{box-shadow:0 0 0 .25rem rgba(220,53,69,.5)}.btn-check:active+.btn-outline-danger,.btn-check:checked+.btn-outline-danger,.btn-outline-danger.active,.btn-outline-danger.dropdown-toggle.show,.btn-outline-danger:active{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-check:active+.btn-outline-danger:focus,.btn-check:checked+.btn-outline-danger:focus,.btn-outline-danger.active:focus,.btn-outline-danger.dropdown-toggle.show:focus,.btn-outline-danger:active:focus{box-shadow:0 0 0 .25rem rgba(220,53,69,.5)}.btn-outline-danger.disabled,.btn-outline-danger:disabled{color:#dc3545;background-color:transparent}.btn-outline-light{color:#f8f9fa;border-color:#f8f9fa}.btn-outline-light:hover{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-check:focus+.btn-outline-light,.btn-outline-light:focus{box-shadow:0 0 0 .25rem rgba(248,249,250,.5)}.btn-check:active+.btn-outline-light,.btn-check:checked+.btn-outline-light,.btn-outline-light.active,.btn-outline-light.dropdown-toggle.show,.btn-outline-light:active{color:#000;background-color:#f8f9fa;border-color:#f8f9fa}.btn-check:active+.btn-outline-light:focus,.btn-check:checked+.btn-outline-light:focus,.btn-outline-light.active:focus,.btn-outline-light.dropdown-toggle.show:focus,.btn-outline-light:active:focus{box-shadow:0 0 0 .25rem rgba(248,249,250,.5)}.btn-outline-light.disabled,.btn-outline-light:disabled{color:#f8f9fa;background-color:transparent}.btn-outline-dark{color:#212529;border-color:#212529}.btn-outline-dark:hover{color:#fff;background-color:#212529;border-color:#212529}.btn-check:focus+.btn-outline-dark,.btn-outline-dark:focus{box-shadow:0 0 0 .25rem rgba(33,37,41,.5)}.btn-check:active+.btn-outline-dark,.btn-check:checked+.btn-outline-dark,.btn-outline-dark.active,.btn-outline-dark.dropdown-toggle.show,.btn-outline-dark:active{color:#fff;background-color:#212529;border-color:#212529}.btn-check:active+.btn-outline-dark:focus,.btn-check:checked+.btn-outline-dark:focus,.btn-outline-dark.active:focus,.btn-outline-dark.dropdown-toggle.show:focus,.btn-outline-dark:active:focus{box-shadow:0 0 0 .25rem rgba(33,37,41,.5)}.btn-outline-dark.disabled,.btn-outline-dark:disabled{color:#212529;background-color:transparent}.btn-link{font-weight:400;color:#0d6efd;text-decoration:underline}.btn-link:hover{color:#0a58ca}.btn-link.disabled,.btn-link:disabled{color:#6c757d}.btn-group-lg>.btn,.btn-lg{padding:.5rem 1rem;font-size:1.25rem;border-radius:.3rem}.btn-group-sm>.btn,.btn-sm{padding:.25rem .5rem;font-size:.875rem;border-radius:.2rem}.fade{transition:opacity .15s linear}@media (prefers-reduced-motion:reduce){.fade{transition:none}}.fade:not(.show){opacity:0}.collapse:not(.show){display:none}.collapsing{height:0;overflow:hidden;transition:height .35s ease}@media (prefers-reduced-motion:reduce){.collapsing{transition:none}}.collapsing.collapse-horizontal{width:0;height:auto;transition:width .35s ease}@media (prefers-reduced-motion:reduce){.collapsing.collapse-horizontal{transition:none}}.dropdown,.dropend,.dropstart,.dropup{position:relative}.dropdown-toggle{white-space:nowrap}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.dropdown-toggle:empty::after{margin-left:0}.dropdown-menu{position:absolute;z-index:1000;display:none;min-width:10rem;padding:.5rem 0;margin:0;font-size:1rem;color:#212529;text-align:left;list-style:none;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.15);border-radius:.25rem}.dropdown-menu[data-bs-popper]{top:100%;left:0;margin-top:.125rem}.dropdown-menu-start{--bs-position:start}.dropdown-menu-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-end{--bs-position:end}.dropdown-menu-end[data-bs-popper]{right:0;left:auto}@media (min-width:576px){.dropdown-menu-sm-start{--bs-position:start}.dropdown-menu-sm-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-sm-end{--bs-position:end}.dropdown-menu-sm-end[data-bs-popper]{right:0;left:auto}}@media (min-width:768px){.dropdown-menu-md-start{--bs-position:start}.dropdown-menu-md-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-md-end{--bs-position:end}.dropdown-menu-md-end[data-bs-popper]{right:0;left:auto}}@media (min-width:992px){.dropdown-menu-lg-start{--bs-position:start}.dropdown-menu-lg-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-lg-end{--bs-position:end}.dropdown-menu-lg-end[data-bs-popper]{right:0;left:auto}}@media (min-width:1200px){.dropdown-menu-xl-start{--bs-position:start}.dropdown-menu-xl-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-xl-end{--bs-position:end}.dropdown-menu-xl-end[data-bs-popper]{right:0;left:auto}}@media (min-width:1400px){.dropdown-menu-xxl-start{--bs-position:start}.dropdown-menu-xxl-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-xxl-end{--bs-position:end}.dropdown-menu-xxl-end[data-bs-popper]{right:0;left:auto}}.dropup .dropdown-menu[data-bs-popper]{top:auto;bottom:100%;margin-top:0;margin-bottom:.125rem}.dropup .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:0;border-right:.3em solid transparent;border-bottom:.3em solid;border-left:.3em solid transparent}.dropup .dropdown-toggle:empty::after{margin-left:0}.dropend .dropdown-menu[data-bs-popper]{top:0;right:auto;left:100%;margin-top:0;margin-left:.125rem}.dropend .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:0;border-bottom:.3em solid transparent;border-left:.3em solid}.dropend .dropdown-toggle:empty::after{margin-left:0}.dropend .dropdown-toggle::after{vertical-align:0}.dropstart .dropdown-menu[data-bs-popper]{top:0;right:100%;left:auto;margin-top:0;margin-right:.125rem}.dropstart .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:""}.dropstart .dropdown-toggle::after{display:none}.dropstart .dropdown-toggle::before{display:inline-block;margin-right:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:.3em solid;border-bottom:.3em solid transparent}.dropstart .dropdown-toggle:empty::after{margin-left:0}.dropstart .dropdown-toggle::before{vertical-align:0}.dropdown-divider{height:0;margin:.5rem 0;overflow:hidden;border-top:1px solid rgba(0,0,0,.15)}.dropdown-item{display:block;width:100%;padding:.25rem 1rem;clear:both;font-weight:400;color:#212529;text-align:inherit;text-decoration:none;white-space:nowrap;background-color:transparent;border:0}.dropdown-item:focus,.dropdown-item:hover{color:#1e2125;background-color:#e9ecef}.dropdown-item.active,.dropdown-item:active{color:#fff;text-decoration:none;background-color:#0d6efd}.dropdown-item.disabled,.dropdown-item:disabled{color:#adb5bd;pointer-events:none;background-color:transparent}.dropdown-menu.show{display:block}.dropdown-header{display:block;padding:.5rem 1rem;margin-bottom:0;font-size:.875rem;color:#6c757d;white-space:nowrap}.dropdown-item-text{display:block;padding:.25rem 1rem;color:#212529}.dropdown-menu-dark{color:#dee2e6;background-color:#343a40;border-color:rgba(0,0,0,.15)}.dropdown-menu-dark .dropdown-item{color:#dee2e6}.dropdown-menu-dark .dropdown-item:focus,.dropdown-menu-dark .dropdown-item:hover{color:#fff;background-color:rgba(255,255,255,.15)}.dropdown-menu-dark .dropdown-item.active,.dropdown-menu-dark .dropdown-item:active{color:#fff;background-color:#0d6efd}.dropdown-menu-dark .dropdown-item.disabled,.dropdown-menu-dark .dropdown-item:disabled{color:#adb5bd}.dropdown-menu-dark .dropdown-divider{border-color:rgba(0,0,0,.15)}.dropdown-menu-dark .dropdown-item-text{color:#dee2e6}.dropdown-menu-dark .dropdown-header{color:#adb5bd}.btn-group,.btn-group-vertical{position:relative;display:inline-flex;vertical-align:middle}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;flex:1 1 auto}.btn-group-vertical>.btn-check:checked+.btn,.btn-group-vertical>.btn-check:focus+.btn,.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn-check:checked+.btn,.btn-group>.btn-check:focus+.btn,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:1}.btn-toolbar{display:flex;flex-wrap:wrap;justify-content:flex-start}.btn-toolbar .input-group{width:auto}.btn-group>.btn-group:not(:first-child),.btn-group>.btn:not(:first-child){margin-left:-1px}.btn-group>.btn-group:not(:last-child)>.btn,.btn-group>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:not(:first-child)>.btn,.btn-group>.btn:nth-child(n+3),.btn-group>:not(.btn-check)+.btn{border-top-left-radius:0;border-bottom-left-radius:0}.dropdown-toggle-split{padding-right:.5625rem;padding-left:.5625rem}.dropdown-toggle-split::after,.dropend .dropdown-toggle-split::after,.dropup .dropdown-toggle-split::after{margin-left:0}.dropstart .dropdown-toggle-split::before{margin-right:0}.btn-group-sm>.btn+.dropdown-toggle-split,.btn-sm+.dropdown-toggle-split{padding-right:.375rem;padding-left:.375rem}.btn-group-lg>.btn+.dropdown-toggle-split,.btn-lg+.dropdown-toggle-split{padding-right:.75rem;padding-left:.75rem}.btn-group-vertical{flex-direction:column;align-items:flex-start;justify-content:center}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group{width:100%}.btn-group-vertical>.btn-group:not(:first-child),.btn-group-vertical>.btn:not(:first-child){margin-top:-1px}.btn-group-vertical>.btn-group:not(:last-child)>.btn,.btn-group-vertical>.btn:not(:last-child):not(.dropdown-toggle){border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:not(:first-child)>.btn,.btn-group-vertical>.btn~.btn{border-top-left-radius:0;border-top-right-radius:0}.nav{display:flex;flex-wrap:wrap;padding-left:0;margin-bottom:0;list-style:none}.nav-link{display:block;padding:.5rem 1rem;color:#0d6efd;text-decoration:none;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out}@media (prefers-reduced-motion:reduce){.nav-link{transition:none}}.nav-link:focus,.nav-link:hover{color:#0a58ca}.nav-link.disabled{color:#6c757d;pointer-events:none;cursor:default}.nav-tabs{border-bottom:1px solid #dee2e6}.nav-tabs .nav-link{margin-bottom:-1px;background:0 0;border:1px solid transparent;border-top-left-radius:.25rem;border-top-right-radius:.25rem}.nav-tabs .nav-link:focus,.nav-tabs .nav-link:hover{border-color:#e9ecef #e9ecef #dee2e6;isolation:isolate}.nav-tabs .nav-link.disabled{color:#6c757d;background-color:transparent;border-color:transparent}.nav-tabs .nav-item.show .nav-link,.nav-tabs .nav-link.active{color:#495057;background-color:#fff;border-color:#dee2e6 #dee2e6 #fff}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.nav-pills .nav-link{background:0 0;border:0;border-radius:.25rem}.nav-pills .nav-link.active,.nav-pills .show>.nav-link{color:#fff;background-color:#0d6efd}.nav-fill .nav-item,.nav-fill>.nav-link{flex:1 1 auto;text-align:center}.nav-justified .nav-item,.nav-justified>.nav-link{flex-basis:0;flex-grow:1;text-align:center}.nav-fill .nav-item .nav-link,.nav-justified .nav-item .nav-link{width:100%}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.navbar{position:relative;display:flex;flex-wrap:wrap;align-items:center;justify-content:space-between;padding-top:.5rem;padding-bottom:.5rem}.navbar>.container,.navbar>.container-fluid,.navbar>.container-lg,.navbar>.container-md,.navbar>.container-sm,.navbar>.container-xl,.navbar>.container-xxl{display:flex;flex-wrap:inherit;align-items:center;justify-content:space-between}.navbar-brand{padding-top:.3125rem;padding-bottom:.3125rem;margin-right:1rem;font-size:1.25rem;text-decoration:none;white-space:nowrap}.navbar-nav{display:flex;flex-direction:column;padding-left:0;margin-bottom:0;list-style:none}.navbar-nav .nav-link{padding-right:0;padding-left:0}.navbar-nav .dropdown-menu{position:static}.navbar-text{padding-top:.5rem;padding-bottom:.5rem}.navbar-collapse{flex-basis:100%;flex-grow:1;align-items:center}.navbar-toggler{padding:.25rem .75rem;font-size:1.25rem;line-height:1;background-color:transparent;border:1px solid transparent;border-radius:.25rem;transition:box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.navbar-toggler{transition:none}}.navbar-toggler:hover{text-decoration:none}.navbar-toggler:focus{text-decoration:none;outline:0;box-shadow:0 0 0 .25rem}.navbar-toggler-icon{display:inline-block;width:1.5em;height:1.5em;vertical-align:middle;background-repeat:no-repeat;background-position:center;background-size:100%}.navbar-nav-scroll{max-height:var(--bs-scroll-height,75vh);overflow-y:auto}@media (min-width:576px){.navbar-expand-sm{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand-sm .navbar-nav{flex-direction:row}.navbar-expand-sm .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-sm .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-sm .navbar-nav-scroll{overflow:visible}.navbar-expand-sm .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-sm .navbar-toggler{display:none}.navbar-expand-sm .offcanvas-header{display:none}.navbar-expand-sm .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;visibility:visible!important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-sm .offcanvas-bottom,.navbar-expand-sm .offcanvas-top{height:auto;border-top:0;border-bottom:0}.navbar-expand-sm .offcanvas-body{display:flex;flex-grow:0;padding:0;overflow-y:visible}}@media (min-width:768px){.navbar-expand-md{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand-md .navbar-nav{flex-direction:row}.navbar-expand-md .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-md .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-md .navbar-nav-scroll{overflow:visible}.navbar-expand-md .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-md .navbar-toggler{display:none}.navbar-expand-md .offcanvas-header{display:none}.navbar-expand-md .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;visibility:visible!important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-md .offcanvas-bottom,.navbar-expand-md .offcanvas-top{height:auto;border-top:0;border-bottom:0}.navbar-expand-md .offcanvas-body{display:flex;flex-grow:0;padding:0;overflow-y:visible}}@media (min-width:992px){.navbar-expand-lg{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand-lg .navbar-nav{flex-direction:row}.navbar-expand-lg .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-lg .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-lg .navbar-nav-scroll{overflow:visible}.navbar-expand-lg .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-lg .navbar-toggler{display:none}.navbar-expand-lg .offcanvas-header{display:none}.navbar-expand-lg .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;visibility:visible!important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-lg .offcanvas-bottom,.navbar-expand-lg .offcanvas-top{height:auto;border-top:0;border-bottom:0}.navbar-expand-lg .offcanvas-body{display:flex;flex-grow:0;padding:0;overflow-y:visible}}@media (min-width:1200px){.navbar-expand-xl{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand-xl .navbar-nav{flex-direction:row}.navbar-expand-xl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xl .navbar-nav-scroll{overflow:visible}.navbar-expand-xl .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-xl .navbar-toggler{display:none}.navbar-expand-xl .offcanvas-header{display:none}.navbar-expand-xl .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;visibility:visible!important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-xl .offcanvas-bottom,.navbar-expand-xl .offcanvas-top{height:auto;border-top:0;border-bottom:0}.navbar-expand-xl .offcanvas-body{display:flex;flex-grow:0;padding:0;overflow-y:visible}}@media (min-width:1400px){.navbar-expand-xxl{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand-xxl .navbar-nav{flex-direction:row}.navbar-expand-xxl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xxl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xxl .navbar-nav-scroll{overflow:visible}.navbar-expand-xxl .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-xxl .navbar-toggler{display:none}.navbar-expand-xxl .offcanvas-header{display:none}.navbar-expand-xxl .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;visibility:visible!important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand-xxl .offcanvas-bottom,.navbar-expand-xxl .offcanvas-top{height:auto;border-top:0;border-bottom:0}.navbar-expand-xxl .offcanvas-body{display:flex;flex-grow:0;padding:0;overflow-y:visible}}.navbar-expand{flex-wrap:nowrap;justify-content:flex-start}.navbar-expand .navbar-nav{flex-direction:row}.navbar-expand .navbar-nav .dropdown-menu{position:absolute}.navbar-expand .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand .navbar-nav-scroll{overflow:visible}.navbar-expand .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand .navbar-toggler{display:none}.navbar-expand .offcanvas-header{display:none}.navbar-expand .offcanvas{position:inherit;bottom:0;z-index:1000;flex-grow:1;visibility:visible!important;background-color:transparent;border-right:0;border-left:0;transition:none;transform:none}.navbar-expand .offcanvas-bottom,.navbar-expand .offcanvas-top{height:auto;border-top:0;border-bottom:0}.navbar-expand .offcanvas-body{display:flex;flex-grow:0;padding:0;overflow-y:visible}.navbar-light .navbar-brand{color:rgba(0,0,0,.9)}.navbar-light .navbar-brand:focus,.navbar-light .navbar-brand:hover{color:rgba(0,0,0,.9)}.navbar-light .navbar-nav .nav-link{color:rgba(0,0,0,.55)}.navbar-light .navbar-nav .nav-link:focus,.navbar-light .navbar-nav .nav-link:hover{color:rgba(0,0,0,.7)}.navbar-light .navbar-nav .nav-link.disabled{color:rgba(0,0,0,.3)}.navbar-light .navbar-nav .nav-link.active,.navbar-light .navbar-nav .show>.nav-link{color:rgba(0,0,0,.9)}.navbar-light .navbar-toggler{color:rgba(0,0,0,.55);border-color:rgba(0,0,0,.1)}.navbar-light .navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 30 30'%3e%3cpath stroke='rgba%280, 0, 0, 0.55%29' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-light .navbar-text{color:rgba(0,0,0,.55)}.navbar-light .navbar-text a,.navbar-light .navbar-text a:focus,.navbar-light .navbar-text a:hover{color:rgba(0,0,0,.9)}.navbar-dark .navbar-brand{color:#fff}.navbar-dark .navbar-brand:focus,.navbar-dark .navbar-brand:hover{color:#fff}.navbar-dark .navbar-nav .nav-link{color:rgba(255,255,255,.55)}.navbar-dark .navbar-nav .nav-link:focus,.navbar-dark .navbar-nav .nav-link:hover{color:rgba(255,255,255,.75)}.navbar-dark .navbar-nav .nav-link.disabled{color:rgba(255,255,255,.25)}.navbar-dark .navbar-nav .nav-link.active,.navbar-dark .navbar-nav .show>.nav-link{color:#fff}.navbar-dark .navbar-toggler{color:rgba(255,255,255,.55);border-color:rgba(255,255,255,.1)}.navbar-dark .navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 30 30'%3e%3cpath stroke='rgba%28255, 255, 255, 0.55%29' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-dark .navbar-text{color:rgba(255,255,255,.55)}.navbar-dark .navbar-text a,.navbar-dark .navbar-text a:focus,.navbar-dark .navbar-text a:hover{color:#fff}.card{position:relative;display:flex;flex-direction:column;min-width:0;word-wrap:break-word;background-color:#fff;background-clip:border-box;border:1px solid rgba(0,0,0,.125);border-radius:.25rem}.card>hr{margin-right:0;margin-left:0}.card>.list-group{border-top:inherit;border-bottom:inherit}.card>.list-group:first-child{border-top-width:0;border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.card>.list-group:last-child{border-bottom-width:0;border-bottom-right-radius:calc(.25rem - 1px);border-bottom-left-radius:calc(.25rem - 1px)}.card>.card-header+.list-group,.card>.list-group+.card-footer{border-top:0}.card-body{flex:1 1 auto;padding:1rem 1rem}.card-title{margin-bottom:.5rem}.card-subtitle{margin-top:-.25rem;margin-bottom:0}.card-text:last-child{margin-bottom:0}.card-link+.card-link{margin-left:1rem}.card-header{padding:.5rem 1rem;margin-bottom:0;background-color:rgba(0,0,0,.03);border-bottom:1px solid rgba(0,0,0,.125)}.card-header:first-child{border-radius:calc(.25rem - 1px) calc(.25rem - 1px) 0 0}.card-footer{padding:.5rem 1rem;background-color:rgba(0,0,0,.03);border-top:1px solid rgba(0,0,0,.125)}.card-footer:last-child{border-radius:0 0 calc(.25rem - 1px) calc(.25rem - 1px)}.card-header-tabs{margin-right:-.5rem;margin-bottom:-.5rem;margin-left:-.5rem;border-bottom:0}.card-header-pills{margin-right:-.5rem;margin-left:-.5rem}.card-img-overlay{position:absolute;top:0;right:0;bottom:0;left:0;padding:1rem;border-radius:calc(.25rem - 1px)}.card-img,.card-img-bottom,.card-img-top{width:100%}.card-img,.card-img-top{border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.card-img,.card-img-bottom{border-bottom-right-radius:calc(.25rem - 1px);border-bottom-left-radius:calc(.25rem - 1px)}.card-group>.card{margin-bottom:.75rem}@media (min-width:576px){.card-group{display:flex;flex-flow:row wrap}.card-group>.card{flex:1 0 0%;margin-bottom:0}.card-group>.card+.card{margin-left:0;border-left:0}.card-group>.card:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}.card-group>.card:not(:last-child) .card-header,.card-group>.card:not(:last-child) .card-img-top{border-top-right-radius:0}.card-group>.card:not(:last-child) .card-footer,.card-group>.card:not(:last-child) .card-img-bottom{border-bottom-right-radius:0}.card-group>.card:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.card-group>.card:not(:first-child) .card-header,.card-group>.card:not(:first-child) .card-img-top{border-top-left-radius:0}.card-group>.card:not(:first-child) .card-footer,.card-group>.card:not(:first-child) .card-img-bottom{border-bottom-left-radius:0}}.accordion-button{position:relative;display:flex;align-items:center;width:100%;padding:1rem 1.25rem;font-size:1rem;color:#212529;text-align:left;background-color:#fff;border:0;border-radius:0;overflow-anchor:none;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,border-radius .15s ease}@media (prefers-reduced-motion:reduce){.accordion-button{transition:none}}.accordion-button:not(.collapsed){color:#0c63e4;background-color:#e7f1ff;box-shadow:inset 0 -1px 0 rgba(0,0,0,.125)}.accordion-button:not(.collapsed)::after{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%230c63e4'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");transform:rotate(-180deg)}.accordion-button::after{flex-shrink:0;width:1.25rem;height:1.25rem;margin-left:auto;content:"";background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23212529'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");background-repeat:no-repeat;background-size:1.25rem;transition:transform .2s ease-in-out}@media (prefers-reduced-motion:reduce){.accordion-button::after{transition:none}}.accordion-button:hover{z-index:2}.accordion-button:focus{z-index:3;border-color:#86b7fe;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.accordion-header{margin-bottom:0}.accordion-item{background-color:#fff;border:1px solid rgba(0,0,0,.125)}.accordion-item:first-of-type{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.accordion-item:first-of-type .accordion-button{border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.accordion-item:not(:first-of-type){border-top:0}.accordion-item:last-of-type{border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.accordion-item:last-of-type .accordion-button.collapsed{border-bottom-right-radius:calc(.25rem - 1px);border-bottom-left-radius:calc(.25rem - 1px)}.accordion-item:last-of-type .accordion-collapse{border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.accordion-body{padding:1rem 1.25rem}.accordion-flush .accordion-collapse{border-width:0}.accordion-flush .accordion-item{border-right:0;border-left:0;border-radius:0}.accordion-flush .accordion-item:first-child{border-top:0}.accordion-flush .accordion-item:last-child{border-bottom:0}.accordion-flush .accordion-item .accordion-button{border-radius:0}.breadcrumb{display:flex;flex-wrap:wrap;padding:0 0;margin-bottom:1rem;list-style:none}.breadcrumb-item+.breadcrumb-item{padding-left:.5rem}.breadcrumb-item+.breadcrumb-item::before{float:left;padding-right:.5rem;color:#6c757d;content:var(--bs-breadcrumb-divider, "/")}.breadcrumb-item.active{color:#6c757d}.pagination{display:flex;padding-left:0;list-style:none}.page-link{position:relative;display:block;color:#0d6efd;text-decoration:none;background-color:#fff;border:1px solid #dee2e6;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.page-link{transition:none}}.page-link:hover{z-index:2;color:#0a58ca;background-color:#e9ecef;border-color:#dee2e6}.page-link:focus{z-index:3;color:#0a58ca;background-color:#e9ecef;outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25)}.page-item:not(:first-child) .page-link{margin-left:-1px}.page-item.active .page-link{z-index:3;color:#fff;background-color:#0d6efd;border-color:#0d6efd}.page-item.disabled .page-link{color:#6c757d;pointer-events:none;background-color:#fff;border-color:#dee2e6}.page-link{padding:.375rem .75rem}.page-item:first-child .page-link{border-top-left-radius:.25rem;border-bottom-left-radius:.25rem}.page-item:last-child .page-link{border-top-right-radius:.25rem;border-bottom-right-radius:.25rem}.pagination-lg .page-link{padding:.75rem 1.5rem;font-size:1.25rem}.pagination-lg .page-item:first-child .page-link{border-top-left-radius:.3rem;border-bottom-left-radius:.3rem}.pagination-lg .page-item:last-child .page-link{border-top-right-radius:.3rem;border-bottom-right-radius:.3rem}.pagination-sm .page-link{padding:.25rem .5rem;font-size:.875rem}.pagination-sm .page-item:first-child .page-link{border-top-left-radius:.2rem;border-bottom-left-radius:.2rem}.pagination-sm .page-item:last-child .page-link{border-top-right-radius:.2rem;border-bottom-right-radius:.2rem}.badge{display:inline-block;padding:.35em .65em;font-size:.75em;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25rem}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.alert{position:relative;padding:1rem 1rem;margin-bottom:1rem;border:1px solid transparent;border-radius:.25rem}.alert-heading{color:inherit}.alert-link{font-weight:700}.alert-dismissible{padding-right:3rem}.alert-dismissible .btn-close{position:absolute;top:0;right:0;z-index:2;padding:1.25rem 1rem}.alert-primary{color:#084298;background-color:#cfe2ff;border-color:#b6d4fe}.alert-primary .alert-link{color:#06357a}.alert-secondary{color:#41464b;background-color:#e2e3e5;border-color:#d3d6d8}.alert-secondary .alert-link{color:#34383c}.alert-success{color:#0f5132;background-color:#d1e7dd;border-color:#badbcc}.alert-success .alert-link{color:#0c4128}.alert-info{color:#055160;background-color:#cff4fc;border-color:#b6effb}.alert-info .alert-link{color:#04414d}.alert-warning{color:#664d03;background-color:#fff3cd;border-color:#ffecb5}.alert-warning .alert-link{color:#523e02}.alert-danger{color:#842029;background-color:#f8d7da;border-color:#f5c2c7}.alert-danger .alert-link{color:#6a1a21}.alert-light{color:#636464;background-color:#fefefe;border-color:#fdfdfe}.alert-light .alert-link{color:#4f5050}.alert-dark{color:#141619;background-color:#d3d3d4;border-color:#bcbebf}.alert-dark .alert-link{color:#101214}@-webkit-keyframes progress-bar-stripes{0%{background-position-x:1rem}}@keyframes progress-bar-stripes{0%{background-position-x:1rem}}.progress{display:flex;height:1rem;overflow:hidden;font-size:.75rem;background-color:#e9ecef;border-radius:.25rem}.progress-bar{display:flex;flex-direction:column;justify-content:center;overflow:hidden;color:#fff;text-align:center;white-space:nowrap;background-color:#0d6efd;transition:width .6s ease}@media (prefers-reduced-motion:reduce){.progress-bar{transition:none}}.progress-bar-striped{background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-size:1rem 1rem}.progress-bar-animated{-webkit-animation:1s linear infinite progress-bar-stripes;animation:1s linear infinite progress-bar-stripes}@media (prefers-reduced-motion:reduce){.progress-bar-animated{-webkit-animation:none;animation:none}}.list-group{display:flex;flex-direction:column;padding-left:0;margin-bottom:0;border-radius:.25rem}.list-group-numbered{list-style-type:none;counter-reset:section}.list-group-numbered>li::before{content:counters(section, ".") ". ";counter-increment:section}.list-group-item-action{width:100%;color:#495057;text-align:inherit}.list-group-item-action:focus,.list-group-item-action:hover{z-index:1;color:#495057;text-decoration:none;background-color:#f8f9fa}.list-group-item-action:active{color:#212529;background-color:#e9ecef}.list-group-item{position:relative;display:block;padding:.5rem 1rem;color:#212529;text-decoration:none;background-color:#fff;border:1px solid rgba(0,0,0,.125)}.list-group-item:first-child{border-top-left-radius:inherit;border-top-right-radius:inherit}.list-group-item:last-child{border-bottom-right-radius:inherit;border-bottom-left-radius:inherit}.list-group-item.disabled,.list-group-item:disabled{color:#6c757d;pointer-events:none;background-color:#fff}.list-group-item.active{z-index:2;color:#fff;background-color:#0d6efd;border-color:#0d6efd}.list-group-item+.list-group-item{border-top-width:0}.list-group-item+.list-group-item.active{margin-top:-1px;border-top-width:1px}.list-group-horizontal{flex-direction:row}.list-group-horizontal>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal>.list-group-item.active{margin-top:0}.list-group-horizontal>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}@media (min-width:576px){.list-group-horizontal-sm{flex-direction:row}.list-group-horizontal-sm>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-sm>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-sm>.list-group-item.active{margin-top:0}.list-group-horizontal-sm>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-sm>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media (min-width:768px){.list-group-horizontal-md{flex-direction:row}.list-group-horizontal-md>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-md>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-md>.list-group-item.active{margin-top:0}.list-group-horizontal-md>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-md>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media (min-width:992px){.list-group-horizontal-lg{flex-direction:row}.list-group-horizontal-lg>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-lg>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-lg>.list-group-item.active{margin-top:0}.list-group-horizontal-lg>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-lg>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media (min-width:1200px){.list-group-horizontal-xl{flex-direction:row}.list-group-horizontal-xl>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-xl>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-xl>.list-group-item.active{margin-top:0}.list-group-horizontal-xl>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-xl>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media (min-width:1400px){.list-group-horizontal-xxl{flex-direction:row}.list-group-horizontal-xxl>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-xxl>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-xxl>.list-group-item.active{margin-top:0}.list-group-horizontal-xxl>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-xxl>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}.list-group-flush{border-radius:0}.list-group-flush>.list-group-item{border-width:0 0 1px}.list-group-flush>.list-group-item:last-child{border-bottom-width:0}.list-group-item-primary{color:#084298;background-color:#cfe2ff}.list-group-item-primary.list-group-item-action:focus,.list-group-item-primary.list-group-item-action:hover{color:#084298;background-color:#bacbe6}.list-group-item-primary.list-group-item-action.active{color:#fff;background-color:#084298;border-color:#084298}.list-group-item-secondary{color:#41464b;background-color:#e2e3e5}.list-group-item-secondary.list-group-item-action:focus,.list-group-item-secondary.list-group-item-action:hover{color:#41464b;background-color:#cbccce}.list-group-item-secondary.list-group-item-action.active{color:#fff;background-color:#41464b;border-color:#41464b}.list-group-item-success{color:#0f5132;background-color:#d1e7dd}.list-group-item-success.list-group-item-action:focus,.list-group-item-success.list-group-item-action:hover{color:#0f5132;background-color:#bcd0c7}.list-group-item-success.list-group-item-action.active{color:#fff;background-color:#0f5132;border-color:#0f5132}.list-group-item-info{color:#055160;background-color:#cff4fc}.list-group-item-info.list-group-item-action:focus,.list-group-item-info.list-group-item-action:hover{color:#055160;background-color:#badce3}.list-group-item-info.list-group-item-action.active{color:#fff;background-color:#055160;border-color:#055160}.list-group-item-warning{color:#664d03;background-color:#fff3cd}.list-group-item-warning.list-group-item-action:focus,.list-group-item-warning.list-group-item-action:hover{color:#664d03;background-color:#e6dbb9}.list-group-item-warning.list-group-item-action.active{color:#fff;background-color:#664d03;border-color:#664d03}.list-group-item-danger{color:#842029;background-color:#f8d7da}.list-group-item-danger.list-group-item-action:focus,.list-group-item-danger.list-group-item-action:hover{color:#842029;background-color:#dfc2c4}.list-group-item-danger.list-group-item-action.active{color:#fff;background-color:#842029;border-color:#842029}.list-group-item-light{color:#636464;background-color:#fefefe}.list-group-item-light.list-group-item-action:focus,.list-group-item-light.list-group-item-action:hover{color:#636464;background-color:#e5e5e5}.list-group-item-light.list-group-item-action.active{color:#fff;background-color:#636464;border-color:#636464}.list-group-item-dark{color:#141619;background-color:#d3d3d4}.list-group-item-dark.list-group-item-action:focus,.list-group-item-dark.list-group-item-action:hover{color:#141619;background-color:#bebebf}.list-group-item-dark.list-group-item-action.active{color:#fff;background-color:#141619;border-color:#141619}.btn-close{box-sizing:content-box;width:1em;height:1em;padding:.25em .25em;color:#000;background:transparent url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23000'%3e%3cpath d='M.293.293a1 1 0 011.414 0L8 6.586 14.293.293a1 1 0 111.414 1.414L9.414 8l6.293 6.293a1 1 0 01-1.414 1.414L8 9.414l-6.293 6.293a1 1 0 01-1.414-1.414L6.586 8 .293 1.707a1 1 0 010-1.414z'/%3e%3c/svg%3e") center/1em auto no-repeat;border:0;border-radius:.25rem;opacity:.5}.btn-close:hover{color:#000;text-decoration:none;opacity:.75}.btn-close:focus{outline:0;box-shadow:0 0 0 .25rem rgba(13,110,253,.25);opacity:1}.btn-close.disabled,.btn-close:disabled{pointer-events:none;-webkit-user-select:none;-moz-user-select:none;user-select:none;opacity:.25}.btn-close-white{filter:invert(1) grayscale(100%) brightness(200%)}.toast{width:350px;max-width:100%;font-size:.875rem;pointer-events:auto;background-color:rgba(255,255,255,.85);background-clip:padding-box;border:1px solid rgba(0,0,0,.1);box-shadow:0 .5rem 1rem rgba(0,0,0,.15);border-radius:.25rem}.toast.showing{opacity:0}.toast:not(.show){display:none}.toast-container{width:-webkit-max-content;width:-moz-max-content;width:max-content;max-width:100%;pointer-events:none}.toast-container>:not(:last-child){margin-bottom:.75rem}.toast-header{display:flex;align-items:center;padding:.5rem .75rem;color:#6c757d;background-color:rgba(255,255,255,.85);background-clip:padding-box;border-bottom:1px solid rgba(0,0,0,.05);border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.toast-header .btn-close{margin-right:-.375rem;margin-left:.75rem}.toast-body{padding:.75rem;word-wrap:break-word}.modal{position:fixed;top:0;left:0;z-index:1055;display:none;width:100%;height:100%;overflow-x:hidden;overflow-y:auto;outline:0}.modal-dialog{position:relative;width:auto;margin:.5rem;pointer-events:none}.modal.fade .modal-dialog{transition:transform .3s ease-out;transform:translate(0,-50px)}@media (prefers-reduced-motion:reduce){.modal.fade .modal-dialog{transition:none}}.modal.show .modal-dialog{transform:none}.modal.modal-static .modal-dialog{transform:scale(1.02)}.modal-dialog-scrollable{height:calc(100% - 1rem)}.modal-dialog-scrollable .modal-content{max-height:100%;overflow:hidden}.modal-dialog-scrollable .modal-body{overflow-y:auto}.modal-dialog-centered{display:flex;align-items:center;min-height:calc(100% - 1rem)}.modal-content{position:relative;display:flex;flex-direction:column;width:100%;pointer-events:auto;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem;outline:0}.modal-backdrop{position:fixed;top:0;left:0;z-index:1050;width:100vw;height:100vh;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop.show{opacity:.5}.modal-header{display:flex;flex-shrink:0;align-items:center;justify-content:space-between;padding:1rem 1rem;border-bottom:1px solid #dee2e6;border-top-left-radius:calc(.3rem - 1px);border-top-right-radius:calc(.3rem - 1px)}.modal-header .btn-close{padding:.5rem .5rem;margin:-.5rem -.5rem -.5rem auto}.modal-title{margin-bottom:0;line-height:1.5}.modal-body{position:relative;flex:1 1 auto;padding:1rem}.modal-footer{display:flex;flex-wrap:wrap;flex-shrink:0;align-items:center;justify-content:flex-end;padding:.75rem;border-top:1px solid #dee2e6;border-bottom-right-radius:calc(.3rem - 1px);border-bottom-left-radius:calc(.3rem - 1px)}.modal-footer>*{margin:.25rem}@media (min-width:576px){.modal-dialog{max-width:500px;margin:1.75rem auto}.modal-dialog-scrollable{height:calc(100% - 3.5rem)}.modal-dialog-centered{min-height:calc(100% - 3.5rem)}.modal-sm{max-width:300px}}@media (min-width:992px){.modal-lg,.modal-xl{max-width:800px}}@media (min-width:1200px){.modal-xl{max-width:1140px}}.modal-fullscreen{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen .modal-header{border-radius:0}.modal-fullscreen .modal-body{overflow-y:auto}.modal-fullscreen .modal-footer{border-radius:0}@media (max-width:575.98px){.modal-fullscreen-sm-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-sm-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-sm-down .modal-header{border-radius:0}.modal-fullscreen-sm-down .modal-body{overflow-y:auto}.modal-fullscreen-sm-down .modal-footer{border-radius:0}}@media (max-width:767.98px){.modal-fullscreen-md-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-md-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-md-down .modal-header{border-radius:0}.modal-fullscreen-md-down .modal-body{overflow-y:auto}.modal-fullscreen-md-down .modal-footer{border-radius:0}}@media (max-width:991.98px){.modal-fullscreen-lg-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-lg-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-lg-down .modal-header{border-radius:0}.modal-fullscreen-lg-down .modal-body{overflow-y:auto}.modal-fullscreen-lg-down .modal-footer{border-radius:0}}@media (max-width:1199.98px){.modal-fullscreen-xl-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-xl-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-xl-down .modal-header{border-radius:0}.modal-fullscreen-xl-down .modal-body{overflow-y:auto}.modal-fullscreen-xl-down .modal-footer{border-radius:0}}@media (max-width:1399.98px){.modal-fullscreen-xxl-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-xxl-down .modal-content{height:100%;border:0;border-radius:0}.modal-fullscreen-xxl-down .modal-header{border-radius:0}.modal-fullscreen-xxl-down .modal-body{overflow-y:auto}.modal-fullscreen-xxl-down .modal-footer{border-radius:0}}.tooltip{position:absolute;z-index:1080;display:block;margin:0;font-family:var(--bs-font-sans-serif);font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:.875rem;word-wrap:break-word;opacity:0}.tooltip.show{opacity:.9}.tooltip .tooltip-arrow{position:absolute;display:block;width:.8rem;height:.4rem}.tooltip .tooltip-arrow::before{position:absolute;content:"";border-color:transparent;border-style:solid}.bs-tooltip-auto[data-popper-placement^=top],.bs-tooltip-top{padding:.4rem 0}.bs-tooltip-auto[data-popper-placement^=top] .tooltip-arrow,.bs-tooltip-top .tooltip-arrow{bottom:0}.bs-tooltip-auto[data-popper-placement^=top] .tooltip-arrow::before,.bs-tooltip-top .tooltip-arrow::before{top:-1px;border-width:.4rem .4rem 0;border-top-color:#000}.bs-tooltip-auto[data-popper-placement^=right],.bs-tooltip-end{padding:0 .4rem}.bs-tooltip-auto[data-popper-placement^=right] .tooltip-arrow,.bs-tooltip-end .tooltip-arrow{left:0;width:.4rem;height:.8rem}.bs-tooltip-auto[data-popper-placement^=right] .tooltip-arrow::before,.bs-tooltip-end .tooltip-arrow::before{right:-1px;border-width:.4rem .4rem .4rem 0;border-right-color:#000}.bs-tooltip-auto[data-popper-placement^=bottom],.bs-tooltip-bottom{padding:.4rem 0}.bs-tooltip-auto[data-popper-placement^=bottom] .tooltip-arrow,.bs-tooltip-bottom .tooltip-arrow{top:0}.bs-tooltip-auto[data-popper-placement^=bottom] .tooltip-arrow::before,.bs-tooltip-bottom .tooltip-arrow::before{bottom:-1px;border-width:0 .4rem .4rem;border-bottom-color:#000}.bs-tooltip-auto[data-popper-placement^=left],.bs-tooltip-start{padding:0 .4rem}.bs-tooltip-auto[data-popper-placement^=left] .tooltip-arrow,.bs-tooltip-start .tooltip-arrow{right:0;width:.4rem;height:.8rem}.bs-tooltip-auto[data-popper-placement^=left] .tooltip-arrow::before,.bs-tooltip-start .tooltip-arrow::before{left:-1px;border-width:.4rem 0 .4rem .4rem;border-left-color:#000}.tooltip-inner{max-width:200px;padding:.25rem .5rem;color:#fff;text-align:center;background-color:#000;border-radius:.25rem}.popover{position:absolute;top:0;left:0;z-index:1070;display:block;max-width:276px;font-family:var(--bs-font-sans-serif);font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:.875rem;word-wrap:break-word;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem}.popover .popover-arrow{position:absolute;display:block;width:1rem;height:.5rem}.popover .popover-arrow::after,.popover .popover-arrow::before{position:absolute;display:block;content:"";border-color:transparent;border-style:solid}.bs-popover-auto[data-popper-placement^=top]>.popover-arrow,.bs-popover-top>.popover-arrow{bottom:calc(-.5rem - 1px)}.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::before,.bs-popover-top>.popover-arrow::before{bottom:0;border-width:.5rem .5rem 0;border-top-color:rgba(0,0,0,.25)}.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::after,.bs-popover-top>.popover-arrow::after{bottom:1px;border-width:.5rem .5rem 0;border-top-color:#fff}.bs-popover-auto[data-popper-placement^=right]>.popover-arrow,.bs-popover-end>.popover-arrow{left:calc(-.5rem - 1px);width:.5rem;height:1rem}.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::before,.bs-popover-end>.popover-arrow::before{left:0;border-width:.5rem .5rem .5rem 0;border-right-color:rgba(0,0,0,.25)}.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::after,.bs-popover-end>.popover-arrow::after{left:1px;border-width:.5rem .5rem .5rem 0;border-right-color:#fff}.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow,.bs-popover-bottom>.popover-arrow{top:calc(-.5rem - 1px)}.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::before,.bs-popover-bottom>.popover-arrow::before{top:0;border-width:0 .5rem .5rem .5rem;border-bottom-color:rgba(0,0,0,.25)}.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::after,.bs-popover-bottom>.popover-arrow::after{top:1px;border-width:0 .5rem .5rem .5rem;border-bottom-color:#fff}.bs-popover-auto[data-popper-placement^=bottom] .popover-header::before,.bs-popover-bottom .popover-header::before{position:absolute;top:0;left:50%;display:block;width:1rem;margin-left:-.5rem;content:"";border-bottom:1px solid #f0f0f0}.bs-popover-auto[data-popper-placement^=left]>.popover-arrow,.bs-popover-start>.popover-arrow{right:calc(-.5rem - 1px);width:.5rem;height:1rem}.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::before,.bs-popover-start>.popover-arrow::before{right:0;border-width:.5rem 0 .5rem .5rem;border-left-color:rgba(0,0,0,.25)}.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::after,.bs-popover-start>.popover-arrow::after{right:1px;border-width:.5rem 0 .5rem .5rem;border-left-color:#fff}.popover-header{padding:.5rem 1rem;margin-bottom:0;font-size:1rem;background-color:#f0f0f0;border-bottom:1px solid rgba(0,0,0,.2);border-top-left-radius:calc(.3rem - 1px);border-top-right-radius:calc(.3rem - 1px)}.popover-header:empty{display:none}.popover-body{padding:1rem 1rem;color:#212529}.carousel{position:relative}.carousel.pointer-event{touch-action:pan-y}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner::after{display:block;clear:both;content:""}.carousel-item{position:relative;display:none;float:left;width:100%;margin-right:-100%;-webkit-backface-visibility:hidden;backface-visibility:hidden;transition:transform .6s ease-in-out}@media (prefers-reduced-motion:reduce){.carousel-item{transition:none}}.carousel-item-next,.carousel-item-prev,.carousel-item.active{display:block}.active.carousel-item-end,.carousel-item-next:not(.carousel-item-start){transform:translateX(100%)}.active.carousel-item-start,.carousel-item-prev:not(.carousel-item-end){transform:translateX(-100%)}.carousel-fade .carousel-item{opacity:0;transition-property:opacity;transform:none}.carousel-fade .carousel-item-next.carousel-item-start,.carousel-fade .carousel-item-prev.carousel-item-end,.carousel-fade .carousel-item.active{z-index:1;opacity:1}.carousel-fade .active.carousel-item-end,.carousel-fade .active.carousel-item-start{z-index:0;opacity:0;transition:opacity 0s .6s}@media (prefers-reduced-motion:reduce){.carousel-fade .active.carousel-item-end,.carousel-fade .active.carousel-item-start{transition:none}}.carousel-control-next,.carousel-control-prev{position:absolute;top:0;bottom:0;z-index:1;display:flex;align-items:center;justify-content:center;width:15%;padding:0;color:#fff;text-align:center;background:0 0;border:0;opacity:.5;transition:opacity .15s ease}@media (prefers-reduced-motion:reduce){.carousel-control-next,.carousel-control-prev{transition:none}}.carousel-control-next:focus,.carousel-control-next:hover,.carousel-control-prev:focus,.carousel-control-prev:hover{color:#fff;text-decoration:none;outline:0;opacity:.9}.carousel-control-prev{left:0}.carousel-control-next{right:0}.carousel-control-next-icon,.carousel-control-prev-icon{display:inline-block;width:2rem;height:2rem;background-repeat:no-repeat;background-position:50%;background-size:100% 100%}.carousel-control-prev-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M11.354 1.646a.5.5 0 0 1 0 .708L5.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z'/%3e%3c/svg%3e")}.carousel-control-next-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M4.646 1.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1 0 .708l-6 6a.5.5 0 0 1-.708-.708L10.293 8 4.646 2.354a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e")}.carousel-indicators{position:absolute;right:0;bottom:0;left:0;z-index:2;display:flex;justify-content:center;padding:0;margin-right:15%;margin-bottom:1rem;margin-left:15%;list-style:none}.carousel-indicators [data-bs-target]{box-sizing:content-box;flex:0 1 auto;width:30px;height:3px;padding:0;margin-right:3px;margin-left:3px;text-indent:-999px;cursor:pointer;background-color:#fff;background-clip:padding-box;border:0;border-top:10px solid transparent;border-bottom:10px solid transparent;opacity:.5;transition:opacity .6s ease}@media (prefers-reduced-motion:reduce){.carousel-indicators [data-bs-target]{transition:none}}.carousel-indicators .active{opacity:1}.carousel-caption{position:absolute;right:15%;bottom:1.25rem;left:15%;padding-top:1.25rem;padding-bottom:1.25rem;color:#fff;text-align:center}.carousel-dark .carousel-control-next-icon,.carousel-dark .carousel-control-prev-icon{filter:invert(1) grayscale(100)}.carousel-dark .carousel-indicators [data-bs-target]{background-color:#000}.carousel-dark .carousel-caption{color:#000}@-webkit-keyframes spinner-border{to{transform:rotate(360deg)}}@keyframes spinner-border{to{transform:rotate(360deg)}}.spinner-border{display:inline-block;width:2rem;height:2rem;vertical-align:-.125em;border:.25em solid currentColor;border-right-color:transparent;border-radius:50%;-webkit-animation:.75s linear infinite spinner-border;animation:.75s linear infinite spinner-border}.spinner-border-sm{width:1rem;height:1rem;border-width:.2em}@-webkit-keyframes spinner-grow{0%{transform:scale(0)}50%{opacity:1;transform:none}}@keyframes spinner-grow{0%{transform:scale(0)}50%{opacity:1;transform:none}}.spinner-grow{display:inline-block;width:2rem;height:2rem;vertical-align:-.125em;background-color:currentColor;border-radius:50%;opacity:0;-webkit-animation:.75s linear infinite spinner-grow;animation:.75s linear infinite spinner-grow}.spinner-grow-sm{width:1rem;height:1rem}@media (prefers-reduced-motion:reduce){.spinner-border,.spinner-grow{-webkit-animation-duration:1.5s;animation-duration:1.5s}}.offcanvas{position:fixed;bottom:0;z-index:1045;display:flex;flex-direction:column;max-width:100%;visibility:hidden;background-color:#fff;background-clip:padding-box;outline:0;transition:transform .3s ease-in-out}@media (prefers-reduced-motion:reduce){.offcanvas{transition:none}}.offcanvas-backdrop{position:fixed;top:0;left:0;z-index:1040;width:100vw;height:100vh;background-color:#000}.offcanvas-backdrop.fade{opacity:0}.offcanvas-backdrop.show{opacity:.5}.offcanvas-header{display:flex;align-items:center;justify-content:space-between;padding:1rem 1rem}.offcanvas-header .btn-close{padding:.5rem .5rem;margin-top:-.5rem;margin-right:-.5rem;margin-bottom:-.5rem}.offcanvas-title{margin-bottom:0;line-height:1.5}.offcanvas-body{flex-grow:1;padding:1rem 1rem;overflow-y:auto}.offcanvas-start{top:0;left:0;width:400px;border-right:1px solid rgba(0,0,0,.2);transform:translateX(-100%)}.offcanvas-end{top:0;right:0;width:400px;border-left:1px solid rgba(0,0,0,.2);transform:translateX(100%)}.offcanvas-top{top:0;right:0;left:0;height:30vh;max-height:100%;border-bottom:1px solid rgba(0,0,0,.2);transform:translateY(-100%)}.offcanvas-bottom{right:0;left:0;height:30vh;max-height:100%;border-top:1px solid rgba(0,0,0,.2);transform:translateY(100%)}.offcanvas.show{transform:none}.placeholder{display:inline-block;min-height:1em;vertical-align:middle;cursor:wait;background-color:currentColor;opacity:.5}.placeholder.btn::before{display:inline-block;content:""}.placeholder-xs{min-height:.6em}.placeholder-sm{min-height:.8em}.placeholder-lg{min-height:1.2em}.placeholder-glow .placeholder{-webkit-animation:placeholder-glow 2s ease-in-out infinite;animation:placeholder-glow 2s ease-in-out infinite}@-webkit-keyframes placeholder-glow{50%{opacity:.2}}@keyframes placeholder-glow{50%{opacity:.2}}.placeholder-wave{-webkit-mask-image:linear-gradient(130deg,#000 55%,rgba(0,0,0,0.8) 75%,#000 95%);mask-image:linear-gradient(130deg,#000 55%,rgba(0,0,0,0.8) 75%,#000 95%);-webkit-mask-size:200% 100%;mask-size:200% 100%;-webkit-animation:placeholder-wave 2s linear infinite;animation:placeholder-wave 2s linear infinite}@-webkit-keyframes placeholder-wave{100%{-webkit-mask-position:-200% 0%;mask-position:-200% 0%}}@keyframes placeholder-wave{100%{-webkit-mask-position:-200% 0%;mask-position:-200% 0%}}.clearfix::after{display:block;clear:both;content:""}.link-primary{color:#0d6efd}.link-primary:focus,.link-primary:hover{color:#0a58ca}.link-secondary{color:#6c757d}.link-secondary:focus,.link-secondary:hover{color:#565e64}.link-success{color:#198754}.link-success:focus,.link-success:hover{color:#146c43}.link-info{color:#0dcaf0}.link-info:focus,.link-info:hover{color:#3dd5f3}.link-warning{color:#ffc107}.link-warning:focus,.link-warning:hover{color:#ffcd39}.link-danger{color:#dc3545}.link-danger:focus,.link-danger:hover{color:#b02a37}.link-light{color:#f8f9fa}.link-light:focus,.link-light:hover{color:#f9fafb}.link-dark{color:#212529}.link-dark:focus,.link-dark:hover{color:#1a1e21}.ratio{position:relative;width:100%}.ratio::before{display:block;padding-top:var(--bs-aspect-ratio);content:""}.ratio>*{position:absolute;top:0;left:0;width:100%;height:100%}.ratio-1x1{--bs-aspect-ratio:100%}.ratio-4x3{--bs-aspect-ratio:75%}.ratio-16x9{--bs-aspect-ratio:56.25%}.ratio-21x9{--bs-aspect-ratio:42.8571428571%}.fixed-top{position:fixed;top:0;right:0;left:0;z-index:1030}.fixed-bottom{position:fixed;right:0;bottom:0;left:0;z-index:1030}.sticky-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}@media (min-width:576px){.sticky-sm-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}@media (min-width:768px){.sticky-md-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}@media (min-width:992px){.sticky-lg-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}@media (min-width:1200px){.sticky-xl-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}@media (min-width:1400px){.sticky-xxl-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}.hstack{display:flex;flex-direction:row;align-items:center;align-self:stretch}.vstack{display:flex;flex:1 1 auto;flex-direction:column;align-self:stretch}.visually-hidden,.visually-hidden-focusable:not(:focus):not(:focus-within){position:absolute!important;width:1px!important;height:1px!important;padding:0!important;margin:-1px!important;overflow:hidden!important;clip:rect(0,0,0,0)!important;white-space:nowrap!important;border:0!important}.stretched-link::after{position:absolute;top:0;right:0;bottom:0;left:0;z-index:1;content:""}.text-truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.vr{display:inline-block;align-self:stretch;width:1px;min-height:1em;background-color:currentColor;opacity:.25}.align-baseline{vertical-align:baseline!important}.align-top{vertical-align:top!important}.align-middle{vertical-align:middle!important}.align-bottom{vertical-align:bottom!important}.align-text-bottom{vertical-align:text-bottom!important}.align-text-top{vertical-align:text-top!important}.float-start{float:left!important}.float-end{float:right!important}.float-none{float:none!important}.opacity-0{opacity:0!important}.opacity-25{opacity:.25!important}.opacity-50{opacity:.5!important}.opacity-75{opacity:.75!important}.opacity-100{opacity:1!important}.overflow-auto{overflow:auto!important}.overflow-hidden{overflow:hidden!important}.overflow-visible{overflow:visible!important}.overflow-scroll{overflow:scroll!important}.d-inline{display:inline!important}.d-inline-block{display:inline-block!important}.d-block{display:block!important}.d-grid{display:grid!important}.d-table{display:table!important}.d-table-row{display:table-row!important}.d-table-cell{display:table-cell!important}.d-flex{display:flex!important}.d-inline-flex{display:inline-flex!important}.d-none{display:none!important}.shadow{box-shadow:0 .5rem 1rem rgba(0,0,0,.15)!important}.shadow-sm{box-shadow:0 .125rem .25rem rgba(0,0,0,.075)!important}.shadow-lg{box-shadow:0 1rem 3rem rgba(0,0,0,.175)!important}.shadow-none{box-shadow:none!important}.position-static{position:static!important}.position-relative{position:relative!important}.position-absolute{position:absolute!important}.position-fixed{position:fixed!important}.position-sticky{position:-webkit-sticky!important;position:sticky!important}.top-0{top:0!important}.top-50{top:50%!important}.top-100{top:100%!important}.bottom-0{bottom:0!important}.bottom-50{bottom:50%!important}.bottom-100{bottom:100%!important}.start-0{left:0!important}.start-50{left:50%!important}.start-100{left:100%!important}.end-0{right:0!important}.end-50{right:50%!important}.end-100{right:100%!important}.translate-middle{transform:translate(-50%,-50%)!important}.translate-middle-x{transform:translateX(-50%)!important}.translate-middle-y{transform:translateY(-50%)!important}.border{border:1px solid #dee2e6!important}.border-0{border:0!important}.border-top{border-top:1px solid #dee2e6!important}.border-top-0{border-top:0!important}.border-end{border-right:1px solid #dee2e6!important}.border-end-0{border-right:0!important}.border-bottom{border-bottom:1px solid #dee2e6!important}.border-bottom-0{border-bottom:0!important}.border-start{border-left:1px solid #dee2e6!important}.border-start-0{border-left:0!important}.border-primary{border-color:#0d6efd!important}.border-secondary{border-color:#6c757d!important}.border-success{border-color:#198754!important}.border-info{border-color:#0dcaf0!important}.border-warning{border-color:#ffc107!important}.border-danger{border-color:#dc3545!important}.border-light{border-color:#f8f9fa!important}.border-dark{border-color:#212529!important}.border-white{border-color:#fff!important}.border-1{border-width:1px!important}.border-2{border-width:2px!important}.border-3{border-width:3px!important}.border-4{border-width:4px!important}.border-5{border-width:5px!important}.w-25{width:25%!important}.w-50{width:50%!important}.w-75{width:75%!important}.w-100{width:100%!important}.w-auto{width:auto!important}.mw-100{max-width:100%!important}.vw-100{width:100vw!important}.min-vw-100{min-width:100vw!important}.h-25{height:25%!important}.h-50{height:50%!important}.h-75{height:75%!important}.h-100{height:100%!important}.h-auto{height:auto!important}.mh-100{max-height:100%!important}.vh-100{height:100vh!important}.min-vh-100{min-height:100vh!important}.flex-fill{flex:1 1 auto!important}.flex-row{flex-direction:row!important}.flex-column{flex-direction:column!important}.flex-row-reverse{flex-direction:row-reverse!important}.flex-column-reverse{flex-direction:column-reverse!important}.flex-grow-0{flex-grow:0!important}.flex-grow-1{flex-grow:1!important}.flex-shrink-0{flex-shrink:0!important}.flex-shrink-1{flex-shrink:1!important}.flex-wrap{flex-wrap:wrap!important}.flex-nowrap{flex-wrap:nowrap!important}.flex-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-0{gap:0!important}.gap-1{gap:.25rem!important}.gap-2{gap:.5rem!important}.gap-3{gap:1rem!important}.gap-4{gap:1.5rem!important}.gap-5{gap:3rem!important}.justify-content-start{justify-content:flex-start!important}.justify-content-end{justify-content:flex-end!important}.justify-content-center{justify-content:center!important}.justify-content-between{justify-content:space-between!important}.justify-content-around{justify-content:space-around!important}.justify-content-evenly{justify-content:space-evenly!important}.align-items-start{align-items:flex-start!important}.align-items-end{align-items:flex-end!important}.align-items-center{align-items:center!important}.align-items-baseline{align-items:baseline!important}.align-items-stretch{align-items:stretch!important}.align-content-start{align-content:flex-start!important}.align-content-end{align-content:flex-end!important}.align-content-center{align-content:center!important}.align-content-between{align-content:space-between!important}.align-content-around{align-content:space-around!important}.align-content-stretch{align-content:stretch!important}.align-self-auto{align-self:auto!important}.align-self-start{align-self:flex-start!important}.align-self-end{align-self:flex-end!important}.align-self-center{align-self:center!important}.align-self-baseline{align-self:baseline!important}.align-self-stretch{align-self:stretch!important}.order-first{order:-1!important}.order-0{order:0!important}.order-1{order:1!important}.order-2{order:2!important}.order-3{order:3!important}.order-4{order:4!important}.order-5{order:5!important}.order-last{order:6!important}.m-0{margin:0!important}.m-1{margin:.25rem!important}.m-2{margin:.5rem!important}.m-3{margin:1rem!important}.m-4{margin:1.5rem!important}.m-5{margin:3rem!important}.m-auto{margin:auto!important}.mx-0{margin-right:0!important;margin-left:0!important}.mx-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-3{margin-right:1rem!important;margin-left:1rem!important}.mx-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-5{margin-right:3rem!important;margin-left:3rem!important}.mx-auto{margin-right:auto!important;margin-left:auto!important}.my-0{margin-top:0!important;margin-bottom:0!important}.my-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-0{margin-top:0!important}.mt-1{margin-top:.25rem!important}.mt-2{margin-top:.5rem!important}.mt-3{margin-top:1rem!important}.mt-4{margin-top:1.5rem!important}.mt-5{margin-top:3rem!important}.mt-auto{margin-top:auto!important}.me-0{margin-right:0!important}.me-1{margin-right:.25rem!important}.me-2{margin-right:.5rem!important}.me-3{margin-right:1rem!important}.me-4{margin-right:1.5rem!important}.me-5{margin-right:3rem!important}.me-auto{margin-right:auto!important}.mb-0{margin-bottom:0!important}.mb-1{margin-bottom:.25rem!important}.mb-2{margin-bottom:.5rem!important}.mb-3{margin-bottom:1rem!important}.mb-4{margin-bottom:1.5rem!important}.mb-5{margin-bottom:3rem!important}.mb-auto{margin-bottom:auto!important}.ms-0{margin-left:0!important}.ms-1{margin-left:.25rem!important}.ms-2{margin-left:.5rem!important}.ms-3{margin-left:1rem!important}.ms-4{margin-left:1.5rem!important}.ms-5{margin-left:3rem!important}.ms-auto{margin-left:auto!important}.p-0{padding:0!important}.p-1{padding:.25rem!important}.p-2{padding:.5rem!important}.p-3{padding:1rem!important}.p-4{padding:1.5rem!important}.p-5{padding:3rem!important}.px-0{padding-right:0!important;padding-left:0!important}.px-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-3{padding-right:1rem!important;padding-left:1rem!important}.px-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-5{padding-right:3rem!important;padding-left:3rem!important}.py-0{padding-top:0!important;padding-bottom:0!important}.py-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-0{padding-top:0!important}.pt-1{padding-top:.25rem!important}.pt-2{padding-top:.5rem!important}.pt-3{padding-top:1rem!important}.pt-4{padding-top:1.5rem!important}.pt-5{padding-top:3rem!important}.pe-0{padding-right:0!important}.pe-1{padding-right:.25rem!important}.pe-2{padding-right:.5rem!important}.pe-3{padding-right:1rem!important}.pe-4{padding-right:1.5rem!important}.pe-5{padding-right:3rem!important}.pb-0{padding-bottom:0!important}.pb-1{padding-bottom:.25rem!important}.pb-2{padding-bottom:.5rem!important}.pb-3{padding-bottom:1rem!important}.pb-4{padding-bottom:1.5rem!important}.pb-5{padding-bottom:3rem!important}.ps-0{padding-left:0!important}.ps-1{padding-left:.25rem!important}.ps-2{padding-left:.5rem!important}.ps-3{padding-left:1rem!important}.ps-4{padding-left:1.5rem!important}.ps-5{padding-left:3rem!important}.font-monospace{font-family:var(--bs-font-monospace)!important}.fs-1{font-size:calc(1.375rem + 1.5vw)!important}.fs-2{font-size:calc(1.325rem + .9vw)!important}.fs-3{font-size:calc(1.3rem + .6vw)!important}.fs-4{font-size:calc(1.275rem + .3vw)!important}.fs-5{font-size:1.25rem!important}.fs-6{font-size:1rem!important}.fst-italic{font-style:italic!important}.fst-normal{font-style:normal!important}.fw-light{font-weight:300!important}.fw-lighter{font-weight:lighter!important}.fw-normal{font-weight:400!important}.fw-bold{font-weight:700!important}.fw-bolder{font-weight:bolder!important}.lh-1{line-height:1!important}.lh-sm{line-height:1.25!important}.lh-base{line-height:1.5!important}.lh-lg{line-height:2!important}.text-start{text-align:left!important}.text-end{text-align:right!important}.text-center{text-align:center!important}.text-decoration-none{text-decoration:none!important}.text-decoration-underline{text-decoration:underline!important}.text-decoration-line-through{text-decoration:line-through!important}.text-lowercase{text-transform:lowercase!important}.text-uppercase{text-transform:uppercase!important}.text-capitalize{text-transform:capitalize!important}.text-wrap{white-space:normal!important}.text-nowrap{white-space:nowrap!important}.text-break{word-wrap:break-word!important;word-break:break-word!important}.text-primary{--bs-text-opacity:1;color:rgba(var(--bs-primary-rgb),var(--bs-text-opacity))!important}.text-secondary{--bs-text-opacity:1;color:rgba(var(--bs-secondary-rgb),var(--bs-text-opacity))!important}.text-success{--bs-text-opacity:1;color:rgba(var(--bs-success-rgb),var(--bs-text-opacity))!important}.text-info{--bs-text-opacity:1;color:rgba(var(--bs-info-rgb),var(--bs-text-opacity))!important}.text-warning{--bs-text-opacity:1;color:rgba(var(--bs-warning-rgb),var(--bs-text-opacity))!important}.text-danger{--bs-text-opacity:1;color:rgba(var(--bs-danger-rgb),var(--bs-text-opacity))!important}.text-light{--bs-text-opacity:1;color:rgba(var(--bs-light-rgb),var(--bs-text-opacity))!important}.text-dark{--bs-text-opacity:1;color:rgba(var(--bs-dark-rgb),var(--bs-text-opacity))!important}.text-black{--bs-text-opacity:1;color:rgba(var(--bs-black-rgb),var(--bs-text-opacity))!important}.text-white{--bs-text-opacity:1;color:rgba(var(--bs-white-rgb),var(--bs-text-opacity))!important}.text-body{--bs-text-opacity:1;color:rgba(var(--bs-body-color-rgb),var(--bs-text-opacity))!important}.text-muted{--bs-text-opacity:1;color:#6c757d!important}.text-black-50{--bs-text-opacity:1;color:rgba(0,0,0,.5)!important}.text-white-50{--bs-text-opacity:1;color:rgba(255,255,255,.5)!important}.text-reset{--bs-text-opacity:1;color:inherit!important}.text-opacity-25{--bs-text-opacity:0.25}.text-opacity-50{--bs-text-opacity:0.5}.text-opacity-75{--bs-text-opacity:0.75}.text-opacity-100{--bs-text-opacity:1}.bg-primary{--bs-bg-opacity:1;background-color:rgba(var(--bs-primary-rgb),var(--bs-bg-opacity))!important}.bg-secondary{--bs-bg-opacity:1;background-color:rgba(var(--bs-secondary-rgb),var(--bs-bg-opacity))!important}.bg-success{--bs-bg-opacity:1;background-color:rgba(var(--bs-success-rgb),var(--bs-bg-opacity))!important}.bg-info{--bs-bg-opacity:1;background-color:rgba(var(--bs-info-rgb),var(--bs-bg-opacity))!important}.bg-warning{--bs-bg-opacity:1;background-color:rgba(var(--bs-warning-rgb),var(--bs-bg-opacity))!important}.bg-danger{--bs-bg-opacity:1;background-color:rgba(var(--bs-danger-rgb),var(--bs-bg-opacity))!important}.bg-light{--bs-bg-opacity:1;background-color:rgba(var(--bs-light-rgb),var(--bs-bg-opacity))!important}.bg-dark{--bs-bg-opacity:1;background-color:rgba(var(--bs-dark-rgb),var(--bs-bg-opacity))!important}.bg-black{--bs-bg-opacity:1;background-color:rgba(var(--bs-black-rgb),var(--bs-bg-opacity))!important}.bg-white{--bs-bg-opacity:1;background-color:rgba(var(--bs-white-rgb),var(--bs-bg-opacity))!important}.bg-body{--bs-bg-opacity:1;background-color:rgba(var(--bs-body-bg-rgb),var(--bs-bg-opacity))!important}.bg-transparent{--bs-bg-opacity:1;background-color:transparent!important}.bg-opacity-10{--bs-bg-opacity:0.1}.bg-opacity-25{--bs-bg-opacity:0.25}.bg-opacity-50{--bs-bg-opacity:0.5}.bg-opacity-75{--bs-bg-opacity:0.75}.bg-opacity-100{--bs-bg-opacity:1}.bg-gradient{background-image:var(--bs-gradient)!important}.user-select-all{-webkit-user-select:all!important;-moz-user-select:all!important;user-select:all!important}.user-select-auto{-webkit-user-select:auto!important;-moz-user-select:auto!important;user-select:auto!important}.user-select-none{-webkit-user-select:none!important;-moz-user-select:none!important;user-select:none!important}.pe-none{pointer-events:none!important}.pe-auto{pointer-events:auto!important}.rounded{border-radius:.25rem!important}.rounded-0{border-radius:0!important}.rounded-1{border-radius:.2rem!important}.rounded-2{border-radius:.25rem!important}.rounded-3{border-radius:.3rem!important}.rounded-circle{border-radius:50%!important}.rounded-pill{border-radius:50rem!important}.rounded-top{border-top-left-radius:.25rem!important;border-top-right-radius:.25rem!important}.rounded-end{border-top-right-radius:.25rem!important;border-bottom-right-radius:.25rem!important}.rounded-bottom{border-bottom-right-radius:.25rem!important;border-bottom-left-radius:.25rem!important}.rounded-start{border-bottom-left-radius:.25rem!important;border-top-left-radius:.25rem!important}.visible{visibility:visible!important}.invisible{visibility:hidden!important}@media (min-width:576px){.float-sm-start{float:left!important}.float-sm-end{float:right!important}.float-sm-none{float:none!important}.d-sm-inline{display:inline!important}.d-sm-inline-block{display:inline-block!important}.d-sm-block{display:block!important}.d-sm-grid{display:grid!important}.d-sm-table{display:table!important}.d-sm-table-row{display:table-row!important}.d-sm-table-cell{display:table-cell!important}.d-sm-flex{display:flex!important}.d-sm-inline-flex{display:inline-flex!important}.d-sm-none{display:none!important}.flex-sm-fill{flex:1 1 auto!important}.flex-sm-row{flex-direction:row!important}.flex-sm-column{flex-direction:column!important}.flex-sm-row-reverse{flex-direction:row-reverse!important}.flex-sm-column-reverse{flex-direction:column-reverse!important}.flex-sm-grow-0{flex-grow:0!important}.flex-sm-grow-1{flex-grow:1!important}.flex-sm-shrink-0{flex-shrink:0!important}.flex-sm-shrink-1{flex-shrink:1!important}.flex-sm-wrap{flex-wrap:wrap!important}.flex-sm-nowrap{flex-wrap:nowrap!important}.flex-sm-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-sm-0{gap:0!important}.gap-sm-1{gap:.25rem!important}.gap-sm-2{gap:.5rem!important}.gap-sm-3{gap:1rem!important}.gap-sm-4{gap:1.5rem!important}.gap-sm-5{gap:3rem!important}.justify-content-sm-start{justify-content:flex-start!important}.justify-content-sm-end{justify-content:flex-end!important}.justify-content-sm-center{justify-content:center!important}.justify-content-sm-between{justify-content:space-between!important}.justify-content-sm-around{justify-content:space-around!important}.justify-content-sm-evenly{justify-content:space-evenly!important}.align-items-sm-start{align-items:flex-start!important}.align-items-sm-end{align-items:flex-end!important}.align-items-sm-center{align-items:center!important}.align-items-sm-baseline{align-items:baseline!important}.align-items-sm-stretch{align-items:stretch!important}.align-content-sm-start{align-content:flex-start!important}.align-content-sm-end{align-content:flex-end!important}.align-content-sm-center{align-content:center!important}.align-content-sm-between{align-content:space-between!important}.align-content-sm-around{align-content:space-around!important}.align-content-sm-stretch{align-content:stretch!important}.align-self-sm-auto{align-self:auto!important}.align-self-sm-start{align-self:flex-start!important}.align-self-sm-end{align-self:flex-end!important}.align-self-sm-center{align-self:center!important}.align-self-sm-baseline{align-self:baseline!important}.align-self-sm-stretch{align-self:stretch!important}.order-sm-first{order:-1!important}.order-sm-0{order:0!important}.order-sm-1{order:1!important}.order-sm-2{order:2!important}.order-sm-3{order:3!important}.order-sm-4{order:4!important}.order-sm-5{order:5!important}.order-sm-last{order:6!important}.m-sm-0{margin:0!important}.m-sm-1{margin:.25rem!important}.m-sm-2{margin:.5rem!important}.m-sm-3{margin:1rem!important}.m-sm-4{margin:1.5rem!important}.m-sm-5{margin:3rem!important}.m-sm-auto{margin:auto!important}.mx-sm-0{margin-right:0!important;margin-left:0!important}.mx-sm-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-sm-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-sm-3{margin-right:1rem!important;margin-left:1rem!important}.mx-sm-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-sm-5{margin-right:3rem!important;margin-left:3rem!important}.mx-sm-auto{margin-right:auto!important;margin-left:auto!important}.my-sm-0{margin-top:0!important;margin-bottom:0!important}.my-sm-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-sm-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-sm-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-sm-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-sm-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-sm-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-sm-0{margin-top:0!important}.mt-sm-1{margin-top:.25rem!important}.mt-sm-2{margin-top:.5rem!important}.mt-sm-3{margin-top:1rem!important}.mt-sm-4{margin-top:1.5rem!important}.mt-sm-5{margin-top:3rem!important}.mt-sm-auto{margin-top:auto!important}.me-sm-0{margin-right:0!important}.me-sm-1{margin-right:.25rem!important}.me-sm-2{margin-right:.5rem!important}.me-sm-3{margin-right:1rem!important}.me-sm-4{margin-right:1.5rem!important}.me-sm-5{margin-right:3rem!important}.me-sm-auto{margin-right:auto!important}.mb-sm-0{margin-bottom:0!important}.mb-sm-1{margin-bottom:.25rem!important}.mb-sm-2{margin-bottom:.5rem!important}.mb-sm-3{margin-bottom:1rem!important}.mb-sm-4{margin-bottom:1.5rem!important}.mb-sm-5{margin-bottom:3rem!important}.mb-sm-auto{margin-bottom:auto!important}.ms-sm-0{margin-left:0!important}.ms-sm-1{margin-left:.25rem!important}.ms-sm-2{margin-left:.5rem!important}.ms-sm-3{margin-left:1rem!important}.ms-sm-4{margin-left:1.5rem!important}.ms-sm-5{margin-left:3rem!important}.ms-sm-auto{margin-left:auto!important}.p-sm-0{padding:0!important}.p-sm-1{padding:.25rem!important}.p-sm-2{padding:.5rem!important}.p-sm-3{padding:1rem!important}.p-sm-4{padding:1.5rem!important}.p-sm-5{padding:3rem!important}.px-sm-0{padding-right:0!important;padding-left:0!important}.px-sm-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-sm-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-sm-3{padding-right:1rem!important;padding-left:1rem!important}.px-sm-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-sm-5{padding-right:3rem!important;padding-left:3rem!important}.py-sm-0{padding-top:0!important;padding-bottom:0!important}.py-sm-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-sm-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-sm-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-sm-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-sm-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-sm-0{padding-top:0!important}.pt-sm-1{padding-top:.25rem!important}.pt-sm-2{padding-top:.5rem!important}.pt-sm-3{padding-top:1rem!important}.pt-sm-4{padding-top:1.5rem!important}.pt-sm-5{padding-top:3rem!important}.pe-sm-0{padding-right:0!important}.pe-sm-1{padding-right:.25rem!important}.pe-sm-2{padding-right:.5rem!important}.pe-sm-3{padding-right:1rem!important}.pe-sm-4{padding-right:1.5rem!important}.pe-sm-5{padding-right:3rem!important}.pb-sm-0{padding-bottom:0!important}.pb-sm-1{padding-bottom:.25rem!important}.pb-sm-2{padding-bottom:.5rem!important}.pb-sm-3{padding-bottom:1rem!important}.pb-sm-4{padding-bottom:1.5rem!important}.pb-sm-5{padding-bottom:3rem!important}.ps-sm-0{padding-left:0!important}.ps-sm-1{padding-left:.25rem!important}.ps-sm-2{padding-left:.5rem!important}.ps-sm-3{padding-left:1rem!important}.ps-sm-4{padding-left:1.5rem!important}.ps-sm-5{padding-left:3rem!important}.text-sm-start{text-align:left!important}.text-sm-end{text-align:right!important}.text-sm-center{text-align:center!important}}@media (min-width:768px){.float-md-start{float:left!important}.float-md-end{float:right!important}.float-md-none{float:none!important}.d-md-inline{display:inline!important}.d-md-inline-block{display:inline-block!important}.d-md-block{display:block!important}.d-md-grid{display:grid!important}.d-md-table{display:table!important}.d-md-table-row{display:table-row!important}.d-md-table-cell{display:table-cell!important}.d-md-flex{display:flex!important}.d-md-inline-flex{display:inline-flex!important}.d-md-none{display:none!important}.flex-md-fill{flex:1 1 auto!important}.flex-md-row{flex-direction:row!important}.flex-md-column{flex-direction:column!important}.flex-md-row-reverse{flex-direction:row-reverse!important}.flex-md-column-reverse{flex-direction:column-reverse!important}.flex-md-grow-0{flex-grow:0!important}.flex-md-grow-1{flex-grow:1!important}.flex-md-shrink-0{flex-shrink:0!important}.flex-md-shrink-1{flex-shrink:1!important}.flex-md-wrap{flex-wrap:wrap!important}.flex-md-nowrap{flex-wrap:nowrap!important}.flex-md-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-md-0{gap:0!important}.gap-md-1{gap:.25rem!important}.gap-md-2{gap:.5rem!important}.gap-md-3{gap:1rem!important}.gap-md-4{gap:1.5rem!important}.gap-md-5{gap:3rem!important}.justify-content-md-start{justify-content:flex-start!important}.justify-content-md-end{justify-content:flex-end!important}.justify-content-md-center{justify-content:center!important}.justify-content-md-between{justify-content:space-between!important}.justify-content-md-around{justify-content:space-around!important}.justify-content-md-evenly{justify-content:space-evenly!important}.align-items-md-start{align-items:flex-start!important}.align-items-md-end{align-items:flex-end!important}.align-items-md-center{align-items:center!important}.align-items-md-baseline{align-items:baseline!important}.align-items-md-stretch{align-items:stretch!important}.align-content-md-start{align-content:flex-start!important}.align-content-md-end{align-content:flex-end!important}.align-content-md-center{align-content:center!important}.align-content-md-between{align-content:space-between!important}.align-content-md-around{align-content:space-around!important}.align-content-md-stretch{align-content:stretch!important}.align-self-md-auto{align-self:auto!important}.align-self-md-start{align-self:flex-start!important}.align-self-md-end{align-self:flex-end!important}.align-self-md-center{align-self:center!important}.align-self-md-baseline{align-self:baseline!important}.align-self-md-stretch{align-self:stretch!important}.order-md-first{order:-1!important}.order-md-0{order:0!important}.order-md-1{order:1!important}.order-md-2{order:2!important}.order-md-3{order:3!important}.order-md-4{order:4!important}.order-md-5{order:5!important}.order-md-last{order:6!important}.m-md-0{margin:0!important}.m-md-1{margin:.25rem!important}.m-md-2{margin:.5rem!important}.m-md-3{margin:1rem!important}.m-md-4{margin:1.5rem!important}.m-md-5{margin:3rem!important}.m-md-auto{margin:auto!important}.mx-md-0{margin-right:0!important;margin-left:0!important}.mx-md-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-md-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-md-3{margin-right:1rem!important;margin-left:1rem!important}.mx-md-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-md-5{margin-right:3rem!important;margin-left:3rem!important}.mx-md-auto{margin-right:auto!important;margin-left:auto!important}.my-md-0{margin-top:0!important;margin-bottom:0!important}.my-md-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-md-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-md-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-md-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-md-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-md-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-md-0{margin-top:0!important}.mt-md-1{margin-top:.25rem!important}.mt-md-2{margin-top:.5rem!important}.mt-md-3{margin-top:1rem!important}.mt-md-4{margin-top:1.5rem!important}.mt-md-5{margin-top:3rem!important}.mt-md-auto{margin-top:auto!important}.me-md-0{margin-right:0!important}.me-md-1{margin-right:.25rem!important}.me-md-2{margin-right:.5rem!important}.me-md-3{margin-right:1rem!important}.me-md-4{margin-right:1.5rem!important}.me-md-5{margin-right:3rem!important}.me-md-auto{margin-right:auto!important}.mb-md-0{margin-bottom:0!important}.mb-md-1{margin-bottom:.25rem!important}.mb-md-2{margin-bottom:.5rem!important}.mb-md-3{margin-bottom:1rem!important}.mb-md-4{margin-bottom:1.5rem!important}.mb-md-5{margin-bottom:3rem!important}.mb-md-auto{margin-bottom:auto!important}.ms-md-0{margin-left:0!important}.ms-md-1{margin-left:.25rem!important}.ms-md-2{margin-left:.5rem!important}.ms-md-3{margin-left:1rem!important}.ms-md-4{margin-left:1.5rem!important}.ms-md-5{margin-left:3rem!important}.ms-md-auto{margin-left:auto!important}.p-md-0{padding:0!important}.p-md-1{padding:.25rem!important}.p-md-2{padding:.5rem!important}.p-md-3{padding:1rem!important}.p-md-4{padding:1.5rem!important}.p-md-5{padding:3rem!important}.px-md-0{padding-right:0!important;padding-left:0!important}.px-md-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-md-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-md-3{padding-right:1rem!important;padding-left:1rem!important}.px-md-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-md-5{padding-right:3rem!important;padding-left:3rem!important}.py-md-0{padding-top:0!important;padding-bottom:0!important}.py-md-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-md-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-md-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-md-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-md-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-md-0{padding-top:0!important}.pt-md-1{padding-top:.25rem!important}.pt-md-2{padding-top:.5rem!important}.pt-md-3{padding-top:1rem!important}.pt-md-4{padding-top:1.5rem!important}.pt-md-5{padding-top:3rem!important}.pe-md-0{padding-right:0!important}.pe-md-1{padding-right:.25rem!important}.pe-md-2{padding-right:.5rem!important}.pe-md-3{padding-right:1rem!important}.pe-md-4{padding-right:1.5rem!important}.pe-md-5{padding-right:3rem!important}.pb-md-0{padding-bottom:0!important}.pb-md-1{padding-bottom:.25rem!important}.pb-md-2{padding-bottom:.5rem!important}.pb-md-3{padding-bottom:1rem!important}.pb-md-4{padding-bottom:1.5rem!important}.pb-md-5{padding-bottom:3rem!important}.ps-md-0{padding-left:0!important}.ps-md-1{padding-left:.25rem!important}.ps-md-2{padding-left:.5rem!important}.ps-md-3{padding-left:1rem!important}.ps-md-4{padding-left:1.5rem!important}.ps-md-5{padding-left:3rem!important}.text-md-start{text-align:left!important}.text-md-end{text-align:right!important}.text-md-center{text-align:center!important}}@media (min-width:992px){.float-lg-start{float:left!important}.float-lg-end{float:right!important}.float-lg-none{float:none!important}.d-lg-inline{display:inline!important}.d-lg-inline-block{display:inline-block!important}.d-lg-block{display:block!important}.d-lg-grid{display:grid!important}.d-lg-table{display:table!important}.d-lg-table-row{display:table-row!important}.d-lg-table-cell{display:table-cell!important}.d-lg-flex{display:flex!important}.d-lg-inline-flex{display:inline-flex!important}.d-lg-none{display:none!important}.flex-lg-fill{flex:1 1 auto!important}.flex-lg-row{flex-direction:row!important}.flex-lg-column{flex-direction:column!important}.flex-lg-row-reverse{flex-direction:row-reverse!important}.flex-lg-column-reverse{flex-direction:column-reverse!important}.flex-lg-grow-0{flex-grow:0!important}.flex-lg-grow-1{flex-grow:1!important}.flex-lg-shrink-0{flex-shrink:0!important}.flex-lg-shrink-1{flex-shrink:1!important}.flex-lg-wrap{flex-wrap:wrap!important}.flex-lg-nowrap{flex-wrap:nowrap!important}.flex-lg-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-lg-0{gap:0!important}.gap-lg-1{gap:.25rem!important}.gap-lg-2{gap:.5rem!important}.gap-lg-3{gap:1rem!important}.gap-lg-4{gap:1.5rem!important}.gap-lg-5{gap:3rem!important}.justify-content-lg-start{justify-content:flex-start!important}.justify-content-lg-end{justify-content:flex-end!important}.justify-content-lg-center{justify-content:center!important}.justify-content-lg-between{justify-content:space-between!important}.justify-content-lg-around{justify-content:space-around!important}.justify-content-lg-evenly{justify-content:space-evenly!important}.align-items-lg-start{align-items:flex-start!important}.align-items-lg-end{align-items:flex-end!important}.align-items-lg-center{align-items:center!important}.align-items-lg-baseline{align-items:baseline!important}.align-items-lg-stretch{align-items:stretch!important}.align-content-lg-start{align-content:flex-start!important}.align-content-lg-end{align-content:flex-end!important}.align-content-lg-center{align-content:center!important}.align-content-lg-between{align-content:space-between!important}.align-content-lg-around{align-content:space-around!important}.align-content-lg-stretch{align-content:stretch!important}.align-self-lg-auto{align-self:auto!important}.align-self-lg-start{align-self:flex-start!important}.align-self-lg-end{align-self:flex-end!important}.align-self-lg-center{align-self:center!important}.align-self-lg-baseline{align-self:baseline!important}.align-self-lg-stretch{align-self:stretch!important}.order-lg-first{order:-1!important}.order-lg-0{order:0!important}.order-lg-1{order:1!important}.order-lg-2{order:2!important}.order-lg-3{order:3!important}.order-lg-4{order:4!important}.order-lg-5{order:5!important}.order-lg-last{order:6!important}.m-lg-0{margin:0!important}.m-lg-1{margin:.25rem!important}.m-lg-2{margin:.5rem!important}.m-lg-3{margin:1rem!important}.m-lg-4{margin:1.5rem!important}.m-lg-5{margin:3rem!important}.m-lg-auto{margin:auto!important}.mx-lg-0{margin-right:0!important;margin-left:0!important}.mx-lg-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-lg-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-lg-3{margin-right:1rem!important;margin-left:1rem!important}.mx-lg-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-lg-5{margin-right:3rem!important;margin-left:3rem!important}.mx-lg-auto{margin-right:auto!important;margin-left:auto!important}.my-lg-0{margin-top:0!important;margin-bottom:0!important}.my-lg-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-lg-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-lg-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-lg-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-lg-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-lg-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-lg-0{margin-top:0!important}.mt-lg-1{margin-top:.25rem!important}.mt-lg-2{margin-top:.5rem!important}.mt-lg-3{margin-top:1rem!important}.mt-lg-4{margin-top:1.5rem!important}.mt-lg-5{margin-top:3rem!important}.mt-lg-auto{margin-top:auto!important}.me-lg-0{margin-right:0!important}.me-lg-1{margin-right:.25rem!important}.me-lg-2{margin-right:.5rem!important}.me-lg-3{margin-right:1rem!important}.me-lg-4{margin-right:1.5rem!important}.me-lg-5{margin-right:3rem!important}.me-lg-auto{margin-right:auto!important}.mb-lg-0{margin-bottom:0!important}.mb-lg-1{margin-bottom:.25rem!important}.mb-lg-2{margin-bottom:.5rem!important}.mb-lg-3{margin-bottom:1rem!important}.mb-lg-4{margin-bottom:1.5rem!important}.mb-lg-5{margin-bottom:3rem!important}.mb-lg-auto{margin-bottom:auto!important}.ms-lg-0{margin-left:0!important}.ms-lg-1{margin-left:.25rem!important}.ms-lg-2{margin-left:.5rem!important}.ms-lg-3{margin-left:1rem!important}.ms-lg-4{margin-left:1.5rem!important}.ms-lg-5{margin-left:3rem!important}.ms-lg-auto{margin-left:auto!important}.p-lg-0{padding:0!important}.p-lg-1{padding:.25rem!important}.p-lg-2{padding:.5rem!important}.p-lg-3{padding:1rem!important}.p-lg-4{padding:1.5rem!important}.p-lg-5{padding:3rem!important}.px-lg-0{padding-right:0!important;padding-left:0!important}.px-lg-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-lg-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-lg-3{padding-right:1rem!important;padding-left:1rem!important}.px-lg-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-lg-5{padding-right:3rem!important;padding-left:3rem!important}.py-lg-0{padding-top:0!important;padding-bottom:0!important}.py-lg-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-lg-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-lg-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-lg-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-lg-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-lg-0{padding-top:0!important}.pt-lg-1{padding-top:.25rem!important}.pt-lg-2{padding-top:.5rem!important}.pt-lg-3{padding-top:1rem!important}.pt-lg-4{padding-top:1.5rem!important}.pt-lg-5{padding-top:3rem!important}.pe-lg-0{padding-right:0!important}.pe-lg-1{padding-right:.25rem!important}.pe-lg-2{padding-right:.5rem!important}.pe-lg-3{padding-right:1rem!important}.pe-lg-4{padding-right:1.5rem!important}.pe-lg-5{padding-right:3rem!important}.pb-lg-0{padding-bottom:0!important}.pb-lg-1{padding-bottom:.25rem!important}.pb-lg-2{padding-bottom:.5rem!important}.pb-lg-3{padding-bottom:1rem!important}.pb-lg-4{padding-bottom:1.5rem!important}.pb-lg-5{padding-bottom:3rem!important}.ps-lg-0{padding-left:0!important}.ps-lg-1{padding-left:.25rem!important}.ps-lg-2{padding-left:.5rem!important}.ps-lg-3{padding-left:1rem!important}.ps-lg-4{padding-left:1.5rem!important}.ps-lg-5{padding-left:3rem!important}.text-lg-start{text-align:left!important}.text-lg-end{text-align:right!important}.text-lg-center{text-align:center!important}}@media (min-width:1200px){.float-xl-start{float:left!important}.float-xl-end{float:right!important}.float-xl-none{float:none!important}.d-xl-inline{display:inline!important}.d-xl-inline-block{display:inline-block!important}.d-xl-block{display:block!important}.d-xl-grid{display:grid!important}.d-xl-table{display:table!important}.d-xl-table-row{display:table-row!important}.d-xl-table-cell{display:table-cell!important}.d-xl-flex{display:flex!important}.d-xl-inline-flex{display:inline-flex!important}.d-xl-none{display:none!important}.flex-xl-fill{flex:1 1 auto!important}.flex-xl-row{flex-direction:row!important}.flex-xl-column{flex-direction:column!important}.flex-xl-row-reverse{flex-direction:row-reverse!important}.flex-xl-column-reverse{flex-direction:column-reverse!important}.flex-xl-grow-0{flex-grow:0!important}.flex-xl-grow-1{flex-grow:1!important}.flex-xl-shrink-0{flex-shrink:0!important}.flex-xl-shrink-1{flex-shrink:1!important}.flex-xl-wrap{flex-wrap:wrap!important}.flex-xl-nowrap{flex-wrap:nowrap!important}.flex-xl-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-xl-0{gap:0!important}.gap-xl-1{gap:.25rem!important}.gap-xl-2{gap:.5rem!important}.gap-xl-3{gap:1rem!important}.gap-xl-4{gap:1.5rem!important}.gap-xl-5{gap:3rem!important}.justify-content-xl-start{justify-content:flex-start!important}.justify-content-xl-end{justify-content:flex-end!important}.justify-content-xl-center{justify-content:center!important}.justify-content-xl-between{justify-content:space-between!important}.justify-content-xl-around{justify-content:space-around!important}.justify-content-xl-evenly{justify-content:space-evenly!important}.align-items-xl-start{align-items:flex-start!important}.align-items-xl-end{align-items:flex-end!important}.align-items-xl-center{align-items:center!important}.align-items-xl-baseline{align-items:baseline!important}.align-items-xl-stretch{align-items:stretch!important}.align-content-xl-start{align-content:flex-start!important}.align-content-xl-end{align-content:flex-end!important}.align-content-xl-center{align-content:center!important}.align-content-xl-between{align-content:space-between!important}.align-content-xl-around{align-content:space-around!important}.align-content-xl-stretch{align-content:stretch!important}.align-self-xl-auto{align-self:auto!important}.align-self-xl-start{align-self:flex-start!important}.align-self-xl-end{align-self:flex-end!important}.align-self-xl-center{align-self:center!important}.align-self-xl-baseline{align-self:baseline!important}.align-self-xl-stretch{align-self:stretch!important}.order-xl-first{order:-1!important}.order-xl-0{order:0!important}.order-xl-1{order:1!important}.order-xl-2{order:2!important}.order-xl-3{order:3!important}.order-xl-4{order:4!important}.order-xl-5{order:5!important}.order-xl-last{order:6!important}.m-xl-0{margin:0!important}.m-xl-1{margin:.25rem!important}.m-xl-2{margin:.5rem!important}.m-xl-3{margin:1rem!important}.m-xl-4{margin:1.5rem!important}.m-xl-5{margin:3rem!important}.m-xl-auto{margin:auto!important}.mx-xl-0{margin-right:0!important;margin-left:0!important}.mx-xl-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-xl-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-xl-3{margin-right:1rem!important;margin-left:1rem!important}.mx-xl-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-xl-5{margin-right:3rem!important;margin-left:3rem!important}.mx-xl-auto{margin-right:auto!important;margin-left:auto!important}.my-xl-0{margin-top:0!important;margin-bottom:0!important}.my-xl-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-xl-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-xl-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-xl-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-xl-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-xl-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-xl-0{margin-top:0!important}.mt-xl-1{margin-top:.25rem!important}.mt-xl-2{margin-top:.5rem!important}.mt-xl-3{margin-top:1rem!important}.mt-xl-4{margin-top:1.5rem!important}.mt-xl-5{margin-top:3rem!important}.mt-xl-auto{margin-top:auto!important}.me-xl-0{margin-right:0!important}.me-xl-1{margin-right:.25rem!important}.me-xl-2{margin-right:.5rem!important}.me-xl-3{margin-right:1rem!important}.me-xl-4{margin-right:1.5rem!important}.me-xl-5{margin-right:3rem!important}.me-xl-auto{margin-right:auto!important}.mb-xl-0{margin-bottom:0!important}.mb-xl-1{margin-bottom:.25rem!important}.mb-xl-2{margin-bottom:.5rem!important}.mb-xl-3{margin-bottom:1rem!important}.mb-xl-4{margin-bottom:1.5rem!important}.mb-xl-5{margin-bottom:3rem!important}.mb-xl-auto{margin-bottom:auto!important}.ms-xl-0{margin-left:0!important}.ms-xl-1{margin-left:.25rem!important}.ms-xl-2{margin-left:.5rem!important}.ms-xl-3{margin-left:1rem!important}.ms-xl-4{margin-left:1.5rem!important}.ms-xl-5{margin-left:3rem!important}.ms-xl-auto{margin-left:auto!important}.p-xl-0{padding:0!important}.p-xl-1{padding:.25rem!important}.p-xl-2{padding:.5rem!important}.p-xl-3{padding:1rem!important}.p-xl-4{padding:1.5rem!important}.p-xl-5{padding:3rem!important}.px-xl-0{padding-right:0!important;padding-left:0!important}.px-xl-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-xl-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-xl-3{padding-right:1rem!important;padding-left:1rem!important}.px-xl-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-xl-5{padding-right:3rem!important;padding-left:3rem!important}.py-xl-0{padding-top:0!important;padding-bottom:0!important}.py-xl-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-xl-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-xl-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-xl-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-xl-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-xl-0{padding-top:0!important}.pt-xl-1{padding-top:.25rem!important}.pt-xl-2{padding-top:.5rem!important}.pt-xl-3{padding-top:1rem!important}.pt-xl-4{padding-top:1.5rem!important}.pt-xl-5{padding-top:3rem!important}.pe-xl-0{padding-right:0!important}.pe-xl-1{padding-right:.25rem!important}.pe-xl-2{padding-right:.5rem!important}.pe-xl-3{padding-right:1rem!important}.pe-xl-4{padding-right:1.5rem!important}.pe-xl-5{padding-right:3rem!important}.pb-xl-0{padding-bottom:0!important}.pb-xl-1{padding-bottom:.25rem!important}.pb-xl-2{padding-bottom:.5rem!important}.pb-xl-3{padding-bottom:1rem!important}.pb-xl-4{padding-bottom:1.5rem!important}.pb-xl-5{padding-bottom:3rem!important}.ps-xl-0{padding-left:0!important}.ps-xl-1{padding-left:.25rem!important}.ps-xl-2{padding-left:.5rem!important}.ps-xl-3{padding-left:1rem!important}.ps-xl-4{padding-left:1.5rem!important}.ps-xl-5{padding-left:3rem!important}.text-xl-start{text-align:left!important}.text-xl-end{text-align:right!important}.text-xl-center{text-align:center!important}}@media (min-width:1400px){.float-xxl-start{float:left!important}.float-xxl-end{float:right!important}.float-xxl-none{float:none!important}.d-xxl-inline{display:inline!important}.d-xxl-inline-block{display:inline-block!important}.d-xxl-block{display:block!important}.d-xxl-grid{display:grid!important}.d-xxl-table{display:table!important}.d-xxl-table-row{display:table-row!important}.d-xxl-table-cell{display:table-cell!important}.d-xxl-flex{display:flex!important}.d-xxl-inline-flex{display:inline-flex!important}.d-xxl-none{display:none!important}.flex-xxl-fill{flex:1 1 auto!important}.flex-xxl-row{flex-direction:row!important}.flex-xxl-column{flex-direction:column!important}.flex-xxl-row-reverse{flex-direction:row-reverse!important}.flex-xxl-column-reverse{flex-direction:column-reverse!important}.flex-xxl-grow-0{flex-grow:0!important}.flex-xxl-grow-1{flex-grow:1!important}.flex-xxl-shrink-0{flex-shrink:0!important}.flex-xxl-shrink-1{flex-shrink:1!important}.flex-xxl-wrap{flex-wrap:wrap!important}.flex-xxl-nowrap{flex-wrap:nowrap!important}.flex-xxl-wrap-reverse{flex-wrap:wrap-reverse!important}.gap-xxl-0{gap:0!important}.gap-xxl-1{gap:.25rem!important}.gap-xxl-2{gap:.5rem!important}.gap-xxl-3{gap:1rem!important}.gap-xxl-4{gap:1.5rem!important}.gap-xxl-5{gap:3rem!important}.justify-content-xxl-start{justify-content:flex-start!important}.justify-content-xxl-end{justify-content:flex-end!important}.justify-content-xxl-center{justify-content:center!important}.justify-content-xxl-between{justify-content:space-between!important}.justify-content-xxl-around{justify-content:space-around!important}.justify-content-xxl-evenly{justify-content:space-evenly!important}.align-items-xxl-start{align-items:flex-start!important}.align-items-xxl-end{align-items:flex-end!important}.align-items-xxl-center{align-items:center!important}.align-items-xxl-baseline{align-items:baseline!important}.align-items-xxl-stretch{align-items:stretch!important}.align-content-xxl-start{align-content:flex-start!important}.align-content-xxl-end{align-content:flex-end!important}.align-content-xxl-center{align-content:center!important}.align-content-xxl-between{align-content:space-between!important}.align-content-xxl-around{align-content:space-around!important}.align-content-xxl-stretch{align-content:stretch!important}.align-self-xxl-auto{align-self:auto!important}.align-self-xxl-start{align-self:flex-start!important}.align-self-xxl-end{align-self:flex-end!important}.align-self-xxl-center{align-self:center!important}.align-self-xxl-baseline{align-self:baseline!important}.align-self-xxl-stretch{align-self:stretch!important}.order-xxl-first{order:-1!important}.order-xxl-0{order:0!important}.order-xxl-1{order:1!important}.order-xxl-2{order:2!important}.order-xxl-3{order:3!important}.order-xxl-4{order:4!important}.order-xxl-5{order:5!important}.order-xxl-last{order:6!important}.m-xxl-0{margin:0!important}.m-xxl-1{margin:.25rem!important}.m-xxl-2{margin:.5rem!important}.m-xxl-3{margin:1rem!important}.m-xxl-4{margin:1.5rem!important}.m-xxl-5{margin:3rem!important}.m-xxl-auto{margin:auto!important}.mx-xxl-0{margin-right:0!important;margin-left:0!important}.mx-xxl-1{margin-right:.25rem!important;margin-left:.25rem!important}.mx-xxl-2{margin-right:.5rem!important;margin-left:.5rem!important}.mx-xxl-3{margin-right:1rem!important;margin-left:1rem!important}.mx-xxl-4{margin-right:1.5rem!important;margin-left:1.5rem!important}.mx-xxl-5{margin-right:3rem!important;margin-left:3rem!important}.mx-xxl-auto{margin-right:auto!important;margin-left:auto!important}.my-xxl-0{margin-top:0!important;margin-bottom:0!important}.my-xxl-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.my-xxl-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.my-xxl-3{margin-top:1rem!important;margin-bottom:1rem!important}.my-xxl-4{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.my-xxl-5{margin-top:3rem!important;margin-bottom:3rem!important}.my-xxl-auto{margin-top:auto!important;margin-bottom:auto!important}.mt-xxl-0{margin-top:0!important}.mt-xxl-1{margin-top:.25rem!important}.mt-xxl-2{margin-top:.5rem!important}.mt-xxl-3{margin-top:1rem!important}.mt-xxl-4{margin-top:1.5rem!important}.mt-xxl-5{margin-top:3rem!important}.mt-xxl-auto{margin-top:auto!important}.me-xxl-0{margin-right:0!important}.me-xxl-1{margin-right:.25rem!important}.me-xxl-2{margin-right:.5rem!important}.me-xxl-3{margin-right:1rem!important}.me-xxl-4{margin-right:1.5rem!important}.me-xxl-5{margin-right:3rem!important}.me-xxl-auto{margin-right:auto!important}.mb-xxl-0{margin-bottom:0!important}.mb-xxl-1{margin-bottom:.25rem!important}.mb-xxl-2{margin-bottom:.5rem!important}.mb-xxl-3{margin-bottom:1rem!important}.mb-xxl-4{margin-bottom:1.5rem!important}.mb-xxl-5{margin-bottom:3rem!important}.mb-xxl-auto{margin-bottom:auto!important}.ms-xxl-0{margin-left:0!important}.ms-xxl-1{margin-left:.25rem!important}.ms-xxl-2{margin-left:.5rem!important}.ms-xxl-3{margin-left:1rem!important}.ms-xxl-4{margin-left:1.5rem!important}.ms-xxl-5{margin-left:3rem!important}.ms-xxl-auto{margin-left:auto!important}.p-xxl-0{padding:0!important}.p-xxl-1{padding:.25rem!important}.p-xxl-2{padding:.5rem!important}.p-xxl-3{padding:1rem!important}.p-xxl-4{padding:1.5rem!important}.p-xxl-5{padding:3rem!important}.px-xxl-0{padding-right:0!important;padding-left:0!important}.px-xxl-1{padding-right:.25rem!important;padding-left:.25rem!important}.px-xxl-2{padding-right:.5rem!important;padding-left:.5rem!important}.px-xxl-3{padding-right:1rem!important;padding-left:1rem!important}.px-xxl-4{padding-right:1.5rem!important;padding-left:1.5rem!important}.px-xxl-5{padding-right:3rem!important;padding-left:3rem!important}.py-xxl-0{padding-top:0!important;padding-bottom:0!important}.py-xxl-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.py-xxl-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.py-xxl-3{padding-top:1rem!important;padding-bottom:1rem!important}.py-xxl-4{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.py-xxl-5{padding-top:3rem!important;padding-bottom:3rem!important}.pt-xxl-0{padding-top:0!important}.pt-xxl-1{padding-top:.25rem!important}.pt-xxl-2{padding-top:.5rem!important}.pt-xxl-3{padding-top:1rem!important}.pt-xxl-4{padding-top:1.5rem!important}.pt-xxl-5{padding-top:3rem!important}.pe-xxl-0{padding-right:0!important}.pe-xxl-1{padding-right:.25rem!important}.pe-xxl-2{padding-right:.5rem!important}.pe-xxl-3{padding-right:1rem!important}.pe-xxl-4{padding-right:1.5rem!important}.pe-xxl-5{padding-right:3rem!important}.pb-xxl-0{padding-bottom:0!important}.pb-xxl-1{padding-bottom:.25rem!important}.pb-xxl-2{padding-bottom:.5rem!important}.pb-xxl-3{padding-bottom:1rem!important}.pb-xxl-4{padding-bottom:1.5rem!important}.pb-xxl-5{padding-bottom:3rem!important}.ps-xxl-0{padding-left:0!important}.ps-xxl-1{padding-left:.25rem!important}.ps-xxl-2{padding-left:.5rem!important}.ps-xxl-3{padding-left:1rem!important}.ps-xxl-4{padding-left:1.5rem!important}.ps-xxl-5{padding-left:3rem!important}.text-xxl-start{text-align:left!important}.text-xxl-end{text-align:right!important}.text-xxl-center{text-align:center!important}}@media (min-width:1200px){.fs-1{font-size:2.5rem!important}.fs-2{font-size:2rem!important}.fs-3{font-size:1.75rem!important}.fs-4{font-size:1.5rem!important}}@media print{.d-print-inline{display:inline!important}.d-print-inline-block{display:inline-block!important}.d-print-block{display:block!important}.d-print-grid{display:grid!important}.d-print-table{display:table!important}.d-print-table-row{display:table-row!important}.d-print-table-cell{display:table-cell!important}.d-print-flex{display:flex!important}.d-print-inline-flex{display:inline-flex!important}.d-print-none{display:none!important}} -/*# sourceMappingURL=bootstrap.min.css.map */ \ No newline at end of file diff --git a/web/static/css/bootstrap.min.css.map b/web/static/css/bootstrap.min.css.map deleted file mode 100644 index c84afa43..00000000 --- a/web/static/css/bootstrap.min.css.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"sources":["../../scss/bootstrap.scss","../../scss/_root.scss","../../scss/_reboot.scss","dist/css/bootstrap.css","../../scss/vendor/_rfs.scss","../../scss/mixins/_border-radius.scss","../../scss/_type.scss","../../scss/mixins/_lists.scss","../../scss/_images.scss","../../scss/mixins/_image.scss","../../scss/_containers.scss","../../scss/mixins/_container.scss","../../scss/mixins/_breakpoints.scss","../../scss/_grid.scss","../../scss/mixins/_grid.scss","../../scss/_tables.scss","../../scss/mixins/_table-variants.scss","../../scss/forms/_labels.scss","../../scss/forms/_form-text.scss","../../scss/forms/_form-control.scss","../../scss/mixins/_transition.scss","../../scss/mixins/_gradients.scss","../../scss/forms/_form-select.scss","../../scss/forms/_form-check.scss","../../scss/forms/_form-range.scss","../../scss/forms/_floating-labels.scss","../../scss/forms/_input-group.scss","../../scss/mixins/_forms.scss","../../scss/_buttons.scss","../../scss/mixins/_buttons.scss","../../scss/_transitions.scss","../../scss/_dropdown.scss","../../scss/mixins/_caret.scss","../../scss/_button-group.scss","../../scss/_nav.scss","../../scss/_navbar.scss","../../scss/_card.scss","../../scss/_accordion.scss","../../scss/_breadcrumb.scss","../../scss/_pagination.scss","../../scss/mixins/_pagination.scss","../../scss/_badge.scss","../../scss/_alert.scss","../../scss/mixins/_alert.scss","../../scss/_progress.scss","../../scss/_list-group.scss","../../scss/mixins/_list-group.scss","../../scss/_close.scss","../../scss/_toasts.scss","../../scss/_modal.scss","../../scss/mixins/_backdrop.scss","../../scss/_tooltip.scss","../../scss/mixins/_reset-text.scss","../../scss/_popover.scss","../../scss/_carousel.scss","../../scss/mixins/_clearfix.scss","../../scss/_spinners.scss","../../scss/_offcanvas.scss","../../scss/_placeholders.scss","../../scss/helpers/_colored-links.scss","../../scss/helpers/_ratio.scss","../../scss/helpers/_position.scss","../../scss/helpers/_stacks.scss","../../scss/helpers/_visually-hidden.scss","../../scss/mixins/_visually-hidden.scss","../../scss/helpers/_stretched-link.scss","../../scss/helpers/_text-truncation.scss","../../scss/mixins/_text-truncate.scss","../../scss/helpers/_vr.scss","../../scss/mixins/_utilities.scss","../../scss/utilities/_api.scss"],"names":[],"mappings":"iBAAA;;;;;ACAA,MAQI,UAAA,QAAA,YAAA,QAAA,YAAA,QAAA,UAAA,QAAA,SAAA,QAAA,YAAA,QAAA,YAAA,QAAA,WAAA,QAAA,UAAA,QAAA,UAAA,QAAA,WAAA,KAAA,UAAA,QAAA,eAAA,QAIA,cAAA,QAAA,cAAA,QAAA,cAAA,QAAA,cAAA,QAAA,cAAA,QAAA,cAAA,QAAA,cAAA,QAAA,cAAA,QAAA,cAAA,QAIA,aAAA,QAAA,eAAA,QAAA,aAAA,QAAA,UAAA,QAAA,aAAA,QAAA,YAAA,QAAA,WAAA,QAAA,UAAA,QAIA,iBAAA,EAAA,CAAA,GAAA,CAAA,IAAA,mBAAA,GAAA,CAAA,GAAA,CAAA,IAAA,iBAAA,EAAA,CAAA,GAAA,CAAA,GAAA,cAAA,EAAA,CAAA,GAAA,CAAA,IAAA,iBAAA,GAAA,CAAA,GAAA,CAAA,EAAA,gBAAA,GAAA,CAAA,EAAA,CAAA,GAAA,eAAA,GAAA,CAAA,GAAA,CAAA,IAAA,cAAA,EAAA,CAAA,EAAA,CAAA,GAGF,eAAA,GAAA,CAAA,GAAA,CAAA,IACA,eAAA,CAAA,CAAA,CAAA,CAAA,EACA,oBAAA,EAAA,CAAA,EAAA,CAAA,GACA,iBAAA,GAAA,CAAA,GAAA,CAAA,IAMA,qBAAA,SAAA,CAAA,aAAA,CAAA,UAAA,CAAA,MAAA,CAAA,gBAAA,CAAA,KAAA,CAAA,WAAA,CAAA,iBAAA,CAAA,UAAA,CAAA,mBAAA,CAAA,gBAAA,CAAA,iBAAA,CAAA,mBACA,oBAAA,cAAA,CAAA,KAAA,CAAA,MAAA,CAAA,QAAA,CAAA,iBAAA,CAAA,aAAA,CAAA,UACA,cAAA,2EAQA,sBAAA,0BACA,oBAAA,KACA,sBAAA,IACA,sBAAA,IACA,gBAAA,QAIA,aAAA,KCnCF,ECgDA,QADA,SD5CE,WAAA,WAeE,8CANJ,MAOM,gBAAA,QAcN,KACE,OAAA,EACA,YAAA,2BEmPI,UAAA,yBFjPJ,YAAA,2BACA,YAAA,2BACA,MAAA,qBACA,WAAA,0BACA,iBAAA,kBACA,yBAAA,KACA,4BAAA,YAUF,GACE,OAAA,KAAA,EACA,MAAA,QACA,iBAAA,aACA,OAAA,EACA,QAAA,IAGF,eACE,OAAA,IAUF,IAAA,IAAA,IAAA,IAAA,IAAA,IAAA,GAAA,GAAA,GAAA,GAAA,GAAA,GACE,WAAA,EACA,cAAA,MAGA,YAAA,IACA,YAAA,IAIF,IAAA,GEwMQ,UAAA,uBAlKJ,0BFtCJ,IAAA,GE+MQ,UAAA,QF1MR,IAAA,GEmMQ,UAAA,sBAlKJ,0BFjCJ,IAAA,GE0MQ,UAAA,MFrMR,IAAA,GE8LQ,UAAA,oBAlKJ,0BF5BJ,IAAA,GEqMQ,UAAA,SFhMR,IAAA,GEyLQ,UAAA,sBAlKJ,0BFvBJ,IAAA,GEgMQ,UAAA,QF3LR,IAAA,GEgLM,UAAA,QF3KN,IAAA,GE2KM,UAAA,KFhKN,EACE,WAAA,EACA,cAAA,KCoBF,6BDTA,YAEE,wBAAA,UAAA,OAAA,gBAAA,UAAA,OACA,OAAA,KACA,iCAAA,KAAA,yBAAA,KAMF,QACE,cAAA,KACA,WAAA,OACA,YAAA,QAMF,GCKA,GDHE,aAAA,KCSF,GDNA,GCKA,GDFE,WAAA,EACA,cAAA,KAGF,MCMA,MACA,MAFA,MDDE,cAAA,EAGF,GACE,YAAA,IAKF,GACE,cAAA,MACA,YAAA,EAMF,WACE,OAAA,EAAA,EAAA,KAQF,ECLA,ODOE,YAAA,OAQF,OAAA,ME4EM,UAAA,OFrEN,MAAA,KACE,QAAA,KACA,iBAAA,QASF,ICnBA,IDqBE,SAAA,SEwDI,UAAA,MFtDJ,YAAA,EACA,eAAA,SAGF,IAAM,OAAA,OACN,IAAM,IAAA,MAKN,EACE,MAAA,QACA,gBAAA,UAEA,QACE,MAAA,QAWF,2BAAA,iCAEE,MAAA,QACA,gBAAA,KCvBJ,KACA,ID6BA,IC5BA,KDgCE,YAAA,yBEcI,UAAA,IFZJ,UAAA,IACA,aAAA,cAOF,IACE,QAAA,MACA,WAAA,EACA,cAAA,KACA,SAAA,KEAI,UAAA,OFKJ,SELI,UAAA,QFOF,MAAA,QACA,WAAA,OAIJ,KEZM,UAAA,OFcJ,MAAA,QACA,UAAA,WAGA,OACE,MAAA,QAIJ,IACE,QAAA,MAAA,MExBI,UAAA,OF0BJ,MAAA,KACA,iBAAA,QG7SE,cAAA,MHgTF,QACE,QAAA,EE/BE,UAAA,IFiCF,YAAA,IASJ,OACE,OAAA,EAAA,EAAA,KAMF,IChDA,IDkDE,eAAA,OAQF,MACE,aAAA,OACA,gBAAA,SAGF,QACE,YAAA,MACA,eAAA,MACA,MAAA,QACA,WAAA,KAOF,GAEE,WAAA,QACA,WAAA,qBCvDF,MAGA,GAFA,MAGA,GDsDA,MCxDA,GD8DE,aAAA,QACA,aAAA,MACA,aAAA,EAQF,MACE,QAAA,aAMF,OAEE,cAAA,EAQF,iCACE,QAAA,ECrEF,OD0EA,MCxEA,SADA,OAEA,SD4EE,OAAA,EACA,YAAA,QE9HI,UAAA,QFgIJ,YAAA,QAIF,OC3EA,OD6EE,eAAA,KAKF,cACE,OAAA,QAGF,OAGE,UAAA,OAGA,gBACE,QAAA,EAOJ,0CACE,QAAA,KCjFF,cACA,aACA,cDuFA,OAIE,mBAAA,OCvFF,6BACA,4BACA,6BDwFI,sBACE,OAAA,QAON,mBACE,QAAA,EACA,aAAA,KAKF,SACE,OAAA,SAUF,SACE,UAAA,EACA,QAAA,EACA,OAAA,EACA,OAAA,EAQF,OACE,MAAA,KACA,MAAA,KACA,QAAA,EACA,cAAA,MEnNM,UAAA,sBFsNN,YAAA,QExXE,0BFiXJ,OExMQ,UAAA,QFiNN,SACE,MAAA,KC/FJ,kCDsGA,uCCvGA,mCADA,+BAGA,oCAJA,6BAKA,mCD2GE,QAAA,EAGF,4BACE,OAAA,KASF,cACE,eAAA,KACA,mBAAA,UAmBF,4BACE,mBAAA,KAKF,+BACE,QAAA,EAMF,6BACE,KAAA,QADF,uBACE,KAAA,QAMF,6BACE,KAAA,QACA,mBAAA,OAKF,OACE,QAAA,aAKF,OACE,OAAA,EAOF,QACE,QAAA,UACA,OAAA,QAQF,SACE,eAAA,SAQF,SACE,QAAA,eInlBF,MFyQM,UAAA,QEvQJ,YAAA,IAKA,WFsQM,UAAA,uBEpQJ,YAAA,IACA,YAAA,IFiGA,0BEpGF,WF6QM,UAAA,ME7QN,WFsQM,UAAA,uBEpQJ,YAAA,IACA,YAAA,IFiGA,0BEpGF,WF6QM,UAAA,QE7QN,WFsQM,UAAA,uBEpQJ,YAAA,IACA,YAAA,IFiGA,0BEpGF,WF6QM,UAAA,ME7QN,WFsQM,UAAA,uBEpQJ,YAAA,IACA,YAAA,IFiGA,0BEpGF,WF6QM,UAAA,QE7QN,WFsQM,UAAA,uBEpQJ,YAAA,IACA,YAAA,IFiGA,0BEpGF,WF6QM,UAAA,ME7QN,WFsQM,UAAA,uBEpQJ,YAAA,IACA,YAAA,IFiGA,0BEpGF,WF6QM,UAAA,QEvPR,eCrDE,aAAA,EACA,WAAA,KDyDF,aC1DE,aAAA,EACA,WAAA,KD4DF,kBACE,QAAA,aAEA,mCACE,aAAA,MAUJ,YFsNM,UAAA,OEpNJ,eAAA,UAIF,YACE,cAAA,KF+MI,UAAA,QE5MJ,wBACE,cAAA,EAIJ,mBACE,WAAA,MACA,cAAA,KFqMI,UAAA,OEnMJ,MAAA,QAEA,2BACE,QAAA,KE9FJ,WCIE,UAAA,KAGA,OAAA,KDDF,eACE,QAAA,OACA,iBAAA,KACA,OAAA,IAAA,MAAA,QHGE,cAAA,OIRF,UAAA,KAGA,OAAA,KDcF,QAEE,QAAA,aAGF,YACE,cAAA,MACA,YAAA,EAGF,gBJ+PM,UAAA,OI7PJ,MAAA,QElCA,WP0mBF,iBAGA,cACA,cACA,cAHA,cADA,eQ9mBE,MAAA,KACA,cAAA,0BACA,aAAA,0BACA,aAAA,KACA,YAAA,KCwDE,yBF5CE,WAAA,cACE,UAAA,OE2CJ,yBF5CE,WAAA,cAAA,cACE,UAAA,OE2CJ,yBF5CE,WAAA,cAAA,cAAA,cACE,UAAA,OE2CJ,0BF5CE,WAAA,cAAA,cAAA,cAAA,cACE,UAAA,QE2CJ,0BF5CE,WAAA,cAAA,cAAA,cAAA,cAAA,eACE,UAAA,QGfN,KCAA,cAAA,OACA,cAAA,EACA,QAAA,KACA,UAAA,KAEA,WAAA,8BACA,aAAA,+BACA,YAAA,+BDJE,OCaF,YAAA,EACA,MAAA,KACA,UAAA,KACA,cAAA,8BACA,aAAA,8BACA,WAAA,mBA+CI,KACE,KAAA,EAAA,EAAA,GAGF,iBApCJ,KAAA,EAAA,EAAA,KACA,MAAA,KAcA,cACE,KAAA,EAAA,EAAA,KACA,MAAA,KAFF,cACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,cACE,KAAA,EAAA,EAAA,KACA,MAAA,eAFF,cACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,cACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,cACE,KAAA,EAAA,EAAA,KACA,MAAA,eA+BE,UAhDJ,KAAA,EAAA,EAAA,KACA,MAAA,KAqDQ,OAhEN,KAAA,EAAA,EAAA,KACA,MAAA,YA+DM,OAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,OAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,OAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,OAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,OAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,OAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,OAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,OAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,QAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,QAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,QAhEN,KAAA,EAAA,EAAA,KACA,MAAA,KAuEQ,UAxDV,YAAA,YAwDU,UAxDV,YAAA,aAwDU,UAxDV,YAAA,IAwDU,UAxDV,YAAA,aAwDU,UAxDV,YAAA,aAwDU,UAxDV,YAAA,IAwDU,UAxDV,YAAA,aAwDU,UAxDV,YAAA,aAwDU,UAxDV,YAAA,IAwDU,WAxDV,YAAA,aAwDU,WAxDV,YAAA,aAmEM,KX2sBR,MWzsBU,cAAA,EAGF,KX2sBR,MWzsBU,cAAA,EAPF,KXqtBR,MWntBU,cAAA,QAGF,KXqtBR,MWntBU,cAAA,QAPF,KX+tBR,MW7tBU,cAAA,OAGF,KX+tBR,MW7tBU,cAAA,OAPF,KXyuBR,MWvuBU,cAAA,KAGF,KXyuBR,MWvuBU,cAAA,KAPF,KXmvBR,MWjvBU,cAAA,OAGF,KXmvBR,MWjvBU,cAAA,OAPF,KX6vBR,MW3vBU,cAAA,KAGF,KX6vBR,MW3vBU,cAAA,KF1DN,yBEUE,QACE,KAAA,EAAA,EAAA,GAGF,oBApCJ,KAAA,EAAA,EAAA,KACA,MAAA,KAcA,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,KAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,eAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,eA+BE,aAhDJ,KAAA,EAAA,EAAA,KACA,MAAA,KAqDQ,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,YA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,KAuEQ,aAxDV,YAAA,EAwDU,aAxDV,YAAA,YAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,cAxDV,YAAA,aAwDU,cAxDV,YAAA,aAmEM,QXg6BR,SW95BU,cAAA,EAGF,QXg6BR,SW95BU,cAAA,EAPF,QX06BR,SWx6BU,cAAA,QAGF,QX06BR,SWx6BU,cAAA,QAPF,QXo7BR,SWl7BU,cAAA,OAGF,QXo7BR,SWl7BU,cAAA,OAPF,QX87BR,SW57BU,cAAA,KAGF,QX87BR,SW57BU,cAAA,KAPF,QXw8BR,SWt8BU,cAAA,OAGF,QXw8BR,SWt8BU,cAAA,OAPF,QXk9BR,SWh9BU,cAAA,KAGF,QXk9BR,SWh9BU,cAAA,MF1DN,yBEUE,QACE,KAAA,EAAA,EAAA,GAGF,oBApCJ,KAAA,EAAA,EAAA,KACA,MAAA,KAcA,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,KAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,eAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,eA+BE,aAhDJ,KAAA,EAAA,EAAA,KACA,MAAA,KAqDQ,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,YA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,KAuEQ,aAxDV,YAAA,EAwDU,aAxDV,YAAA,YAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,cAxDV,YAAA,aAwDU,cAxDV,YAAA,aAmEM,QXqnCR,SWnnCU,cAAA,EAGF,QXqnCR,SWnnCU,cAAA,EAPF,QX+nCR,SW7nCU,cAAA,QAGF,QX+nCR,SW7nCU,cAAA,QAPF,QXyoCR,SWvoCU,cAAA,OAGF,QXyoCR,SWvoCU,cAAA,OAPF,QXmpCR,SWjpCU,cAAA,KAGF,QXmpCR,SWjpCU,cAAA,KAPF,QX6pCR,SW3pCU,cAAA,OAGF,QX6pCR,SW3pCU,cAAA,OAPF,QXuqCR,SWrqCU,cAAA,KAGF,QXuqCR,SWrqCU,cAAA,MF1DN,yBEUE,QACE,KAAA,EAAA,EAAA,GAGF,oBApCJ,KAAA,EAAA,EAAA,KACA,MAAA,KAcA,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,KAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,eAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,eA+BE,aAhDJ,KAAA,EAAA,EAAA,KACA,MAAA,KAqDQ,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,YA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,KAuEQ,aAxDV,YAAA,EAwDU,aAxDV,YAAA,YAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,cAxDV,YAAA,aAwDU,cAxDV,YAAA,aAmEM,QX00CR,SWx0CU,cAAA,EAGF,QX00CR,SWx0CU,cAAA,EAPF,QXo1CR,SWl1CU,cAAA,QAGF,QXo1CR,SWl1CU,cAAA,QAPF,QX81CR,SW51CU,cAAA,OAGF,QX81CR,SW51CU,cAAA,OAPF,QXw2CR,SWt2CU,cAAA,KAGF,QXw2CR,SWt2CU,cAAA,KAPF,QXk3CR,SWh3CU,cAAA,OAGF,QXk3CR,SWh3CU,cAAA,OAPF,QX43CR,SW13CU,cAAA,KAGF,QX43CR,SW13CU,cAAA,MF1DN,0BEUE,QACE,KAAA,EAAA,EAAA,GAGF,oBApCJ,KAAA,EAAA,EAAA,KACA,MAAA,KAcA,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,KAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,eAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,iBACE,KAAA,EAAA,EAAA,KACA,MAAA,eA+BE,aAhDJ,KAAA,EAAA,EAAA,KACA,MAAA,KAqDQ,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,YA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,UAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,KAuEQ,aAxDV,YAAA,EAwDU,aAxDV,YAAA,YAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,aAwDU,aAxDV,YAAA,IAwDU,cAxDV,YAAA,aAwDU,cAxDV,YAAA,aAmEM,QX+hDR,SW7hDU,cAAA,EAGF,QX+hDR,SW7hDU,cAAA,EAPF,QXyiDR,SWviDU,cAAA,QAGF,QXyiDR,SWviDU,cAAA,QAPF,QXmjDR,SWjjDU,cAAA,OAGF,QXmjDR,SWjjDU,cAAA,OAPF,QX6jDR,SW3jDU,cAAA,KAGF,QX6jDR,SW3jDU,cAAA,KAPF,QXukDR,SWrkDU,cAAA,OAGF,QXukDR,SWrkDU,cAAA,OAPF,QXilDR,SW/kDU,cAAA,KAGF,QXilDR,SW/kDU,cAAA,MF1DN,0BEUE,SACE,KAAA,EAAA,EAAA,GAGF,qBApCJ,KAAA,EAAA,EAAA,KACA,MAAA,KAcA,kBACE,KAAA,EAAA,EAAA,KACA,MAAA,KAFF,kBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,kBACE,KAAA,EAAA,EAAA,KACA,MAAA,eAFF,kBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,kBACE,KAAA,EAAA,EAAA,KACA,MAAA,IAFF,kBACE,KAAA,EAAA,EAAA,KACA,MAAA,eA+BE,cAhDJ,KAAA,EAAA,EAAA,KACA,MAAA,KAqDQ,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,YA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,WAhEN,KAAA,EAAA,EAAA,KACA,MAAA,IA+DM,YAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,YAhEN,KAAA,EAAA,EAAA,KACA,MAAA,aA+DM,YAhEN,KAAA,EAAA,EAAA,KACA,MAAA,KAuEQ,cAxDV,YAAA,EAwDU,cAxDV,YAAA,YAwDU,cAxDV,YAAA,aAwDU,cAxDV,YAAA,IAwDU,cAxDV,YAAA,aAwDU,cAxDV,YAAA,aAwDU,cAxDV,YAAA,IAwDU,cAxDV,YAAA,aAwDU,cAxDV,YAAA,aAwDU,cAxDV,YAAA,IAwDU,eAxDV,YAAA,aAwDU,eAxDV,YAAA,aAmEM,SXovDR,UWlvDU,cAAA,EAGF,SXovDR,UWlvDU,cAAA,EAPF,SX8vDR,UW5vDU,cAAA,QAGF,SX8vDR,UW5vDU,cAAA,QAPF,SXwwDR,UWtwDU,cAAA,OAGF,SXwwDR,UWtwDU,cAAA,OAPF,SXkxDR,UWhxDU,cAAA,KAGF,SXkxDR,UWhxDU,cAAA,KAPF,SX4xDR,UW1xDU,cAAA,OAGF,SX4xDR,UW1xDU,cAAA,OAPF,SXsyDR,UWpyDU,cAAA,KAGF,SXsyDR,UWpyDU,cAAA,MCrHV,OACE,cAAA,YACA,qBAAA,YACA,yBAAA,QACA,sBAAA,oBACA,wBAAA,QACA,qBAAA,mBACA,uBAAA,QACA,oBAAA,qBAEA,MAAA,KACA,cAAA,KACA,MAAA,QACA,eAAA,IACA,aAAA,QAOA,yBACE,QAAA,MAAA,MACA,iBAAA,mBACA,oBAAA,IACA,WAAA,MAAA,EAAA,EAAA,EAAA,OAAA,0BAGF,aACE,eAAA,QAGF,aACE,eAAA,OAIF,0BACE,WAAA,IAAA,MAAA,aASJ,aACE,aAAA,IAUA,4BACE,QAAA,OAAA,OAeF,gCACE,aAAA,IAAA,EAGA,kCACE,aAAA,EAAA,IAOJ,oCACE,oBAAA,EAGF,qCACE,iBAAA,EASF,2CACE,qBAAA,2BACA,MAAA,8BAQJ,cACE,qBAAA,0BACA,MAAA,6BAQA,8BACE,qBAAA,yBACA,MAAA,4BC5HF,eAME,cAAA,QACA,sBAAA,QACA,yBAAA,KACA,qBAAA,QACA,wBAAA,KACA,oBAAA,QACA,uBAAA,KAEA,MAAA,KACA,aAAA,QAfF,iBAME,cAAA,QACA,sBAAA,QACA,yBAAA,KACA,qBAAA,QACA,wBAAA,KACA,oBAAA,QACA,uBAAA,KAEA,MAAA,KACA,aAAA,QAfF,eAME,cAAA,QACA,sBAAA,QACA,yBAAA,KACA,qBAAA,QACA,wBAAA,KACA,oBAAA,QACA,uBAAA,KAEA,MAAA,KACA,aAAA,QAfF,YAME,cAAA,QACA,sBAAA,QACA,yBAAA,KACA,qBAAA,QACA,wBAAA,KACA,oBAAA,QACA,uBAAA,KAEA,MAAA,KACA,aAAA,QAfF,eAME,cAAA,QACA,sBAAA,QACA,yBAAA,KACA,qBAAA,QACA,wBAAA,KACA,oBAAA,QACA,uBAAA,KAEA,MAAA,KACA,aAAA,QAfF,cAME,cAAA,QACA,sBAAA,QACA,yBAAA,KACA,qBAAA,QACA,wBAAA,KACA,oBAAA,QACA,uBAAA,KAEA,MAAA,KACA,aAAA,QAfF,aAME,cAAA,QACA,sBAAA,QACA,yBAAA,KACA,qBAAA,QACA,wBAAA,KACA,oBAAA,QACA,uBAAA,KAEA,MAAA,KACA,aAAA,QAfF,YAME,cAAA,QACA,sBAAA,QACA,yBAAA,KACA,qBAAA,QACA,wBAAA,KACA,oBAAA,QACA,uBAAA,KAEA,MAAA,KACA,aAAA,QDoIA,kBACE,WAAA,KACA,2BAAA,MH3EF,4BGyEA,qBACE,WAAA,KACA,2BAAA,OH3EF,4BGyEA,qBACE,WAAA,KACA,2BAAA,OH3EF,4BGyEA,qBACE,WAAA,KACA,2BAAA,OH3EF,6BGyEA,qBACE,WAAA,KACA,2BAAA,OH3EF,6BGyEA,sBACE,WAAA,KACA,2BAAA,OEnJN,YACE,cAAA,MASF,gBACE,YAAA,oBACA,eAAA,oBACA,cAAA,EboRI,UAAA,QahRJ,YAAA,IAIF,mBACE,YAAA,kBACA,eAAA,kBb0QI,UAAA,QatQN,mBACE,YAAA,mBACA,eAAA,mBboQI,UAAA,QcjSN,WACE,WAAA,OdgSI,UAAA,Oc5RJ,MAAA,QCLF,cACE,QAAA,MACA,MAAA,KACA,QAAA,QAAA,Of8RI,UAAA,Ke3RJ,YAAA,IACA,YAAA,IACA,MAAA,QACA,iBAAA,KACA,gBAAA,YACA,OAAA,IAAA,MAAA,QACA,mBAAA,KAAA,gBAAA,KAAA,WAAA,KdGE,cAAA,OeHE,WAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YAIA,uCDhBN,cCiBQ,WAAA,MDGN,yBACE,SAAA,OAEA,wDACE,OAAA,QAKJ,oBACE,MAAA,QACA,iBAAA,KACA,aAAA,QACA,QAAA,EAKE,WAAA,EAAA,EAAA,EAAA,OAAA,qBAOJ,2CAEE,OAAA,MAIF,gCACE,MAAA,QAEA,QAAA,EAHF,2BACE,MAAA,QAEA,QAAA,EAQF,uBAAA,wBAEE,iBAAA,QAGA,QAAA,EAIF,0CACE,QAAA,QAAA,OACA,OAAA,SAAA,QACA,mBAAA,OAAA,kBAAA,OACA,MAAA,QE3EF,iBAAA,QF6EE,eAAA,KACA,aAAA,QACA,aAAA,MACA,aAAA,EACA,wBAAA,IACA,cAAA,ECtEE,mBAAA,MAAA,KAAA,WAAA,CAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YAAA,WAAA,MAAA,KAAA,WAAA,CAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YD2DJ,oCACE,QAAA,QAAA,OACA,OAAA,SAAA,QACA,mBAAA,OAAA,kBAAA,OACA,MAAA,QE3EF,iBAAA,QF6EE,eAAA,KACA,aAAA,QACA,aAAA,MACA,aAAA,EACA,wBAAA,IACA,cAAA,ECtEE,WAAA,MAAA,KAAA,WAAA,CAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YAIA,uCDuDJ,0CCtDM,mBAAA,KAAA,WAAA,KDsDN,oCCtDM,WAAA,MDqEN,+EACE,iBAAA,QADF,yEACE,iBAAA,QAGF,0CACE,QAAA,QAAA,OACA,OAAA,SAAA,QACA,mBAAA,OAAA,kBAAA,OACA,MAAA,QE9FF,iBAAA,QFgGE,eAAA,KACA,aAAA,QACA,aAAA,MACA,aAAA,EACA,wBAAA,IACA,cAAA,ECzFE,mBAAA,MAAA,KAAA,WAAA,CAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YAAA,WAAA,MAAA,KAAA,WAAA,CAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YAIA,uCD0EJ,0CCzEM,mBAAA,KAAA,WAAA,MDwFN,+EACE,iBAAA,QASJ,wBACE,QAAA,MACA,MAAA,KACA,QAAA,QAAA,EACA,cAAA,EACA,YAAA,IACA,MAAA,QACA,iBAAA,YACA,OAAA,MAAA,YACA,aAAA,IAAA,EAEA,wCAAA,wCAEE,cAAA,EACA,aAAA,EAWJ,iBACE,WAAA,0BACA,QAAA,OAAA,MfmJI,UAAA,QClRF,cAAA,McmIF,6CACE,QAAA,OAAA,MACA,OAAA,QAAA,OACA,mBAAA,MAAA,kBAAA,MAHF,uCACE,QAAA,OAAA,MACA,OAAA,QAAA,OACA,mBAAA,MAAA,kBAAA,MAGF,6CACE,QAAA,OAAA,MACA,OAAA,QAAA,OACA,mBAAA,MAAA,kBAAA,MAIJ,iBACE,WAAA,yBACA,QAAA,MAAA,KfgII,UAAA,QClRF,cAAA,McsJF,6CACE,QAAA,MAAA,KACA,OAAA,OAAA,MACA,mBAAA,KAAA,kBAAA,KAHF,uCACE,QAAA,MAAA,KACA,OAAA,OAAA,MACA,mBAAA,KAAA,kBAAA,KAGF,6CACE,QAAA,MAAA,KACA,OAAA,OAAA,MACA,mBAAA,KAAA,kBAAA,KAQF,sBACE,WAAA,2BAGF,yBACE,WAAA,0BAGF,yBACE,WAAA,yBAKJ,oBACE,MAAA,KACA,OAAA,KACA,QAAA,QAEA,mDACE,OAAA,QAGF,uCACE,OAAA,Md/LA,cAAA,OcmMF,0CACE,OAAA,MdpMA,cAAA,OiBdJ,aACE,QAAA,MACA,MAAA,KACA,QAAA,QAAA,QAAA,QAAA,OAEA,mBAAA,oBlB2RI,UAAA,KkBxRJ,YAAA,IACA,YAAA,IACA,MAAA,QACA,iBAAA,KACA,iBAAA,gOACA,kBAAA,UACA,oBAAA,MAAA,OAAA,OACA,gBAAA,KAAA,KACA,OAAA,IAAA,MAAA,QjBFE,cAAA,OeHE,WAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YESJ,mBAAA,KAAA,gBAAA,KAAA,WAAA,KFLI,uCEfN,aFgBQ,WAAA,MEMN,mBACE,aAAA,QACA,QAAA,EAKE,WAAA,EAAA,EAAA,EAAA,OAAA,qBAIJ,uBAAA,mCAEE,cAAA,OACA,iBAAA,KAGF,sBAEE,iBAAA,QAKF,4BACE,MAAA,YACA,YAAA,EAAA,EAAA,EAAA,QAIJ,gBACE,YAAA,OACA,eAAA,OACA,aAAA,MlByOI,UAAA,QClRF,cAAA,MiB8CJ,gBACE,YAAA,MACA,eAAA,MACA,aAAA,KlBiOI,UAAA,QClRF,cAAA,MkBfJ,YACE,QAAA,MACA,WAAA,OACA,aAAA,MACA,cAAA,QAEA,8BACE,MAAA,KACA,YAAA,OAIJ,kBACE,MAAA,IACA,OAAA,IACA,WAAA,MACA,eAAA,IACA,iBAAA,KACA,kBAAA,UACA,oBAAA,OACA,gBAAA,QACA,OAAA,IAAA,MAAA,gBACA,mBAAA,KAAA,gBAAA,KAAA,WAAA,KACA,2BAAA,MAAA,aAAA,MAGA,iClBXE,cAAA,MkBeF,8BAEE,cAAA,IAGF,yBACE,OAAA,gBAGF,wBACE,aAAA,QACA,QAAA,EACA,WAAA,EAAA,EAAA,EAAA,OAAA,qBAGF,0BACE,iBAAA,QACA,aAAA,QAEA,yCAII,iBAAA,8NAIJ,sCAII,iBAAA,sIAKN,+CACE,iBAAA,QACA,aAAA,QAKE,iBAAA,wNAIJ,2BACE,eAAA,KACA,OAAA,KACA,QAAA,GAOA,6CAAA,8CACE,QAAA,GAcN,aACE,aAAA,MAEA,+BACE,MAAA,IACA,YAAA,OACA,iBAAA,uJACA,oBAAA,KAAA,OlB9FA,cAAA,IeHE,WAAA,oBAAA,KAAA,YAIA,uCGyFJ,+BHxFM,WAAA,MGgGJ,qCACE,iBAAA,yIAGF,uCACE,oBAAA,MAAA,OAKE,iBAAA,sIAMR,mBACE,QAAA,aACA,aAAA,KAGF,WACE,SAAA,SACA,KAAA,cACA,eAAA,KAIE,yBAAA,0BACE,eAAA,KACA,OAAA,KACA,QAAA,IC9IN,YACE,MAAA,KACA,OAAA,OACA,QAAA,EACA,iBAAA,YACA,mBAAA,KAAA,gBAAA,KAAA,WAAA,KAEA,kBACE,QAAA,EAIA,wCAA0B,WAAA,EAAA,EAAA,EAAA,IAAA,IAAA,CAAA,EAAA,EAAA,EAAA,OAAA,qBAC1B,oCAA0B,WAAA,EAAA,EAAA,EAAA,IAAA,IAAA,CAAA,EAAA,EAAA,EAAA,OAAA,qBAG5B,8BACE,OAAA,EAGF,kCACE,MAAA,KACA,OAAA,KACA,WAAA,QHzBF,iBAAA,QG2BE,OAAA,EnBZA,cAAA,KeHE,mBAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YAAA,WAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YImBF,mBAAA,KAAA,WAAA,KJfE,uCIMJ,kCJLM,mBAAA,KAAA,WAAA,MIgBJ,yCHjCF,iBAAA,QGsCA,2CACE,MAAA,KACA,OAAA,MACA,MAAA,YACA,OAAA,QACA,iBAAA,QACA,aAAA,YnB7BA,cAAA,KmBkCF,8BACE,MAAA,KACA,OAAA,KHnDF,iBAAA,QGqDE,OAAA,EnBtCA,cAAA,KeHE,gBAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YAAA,WAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YI6CF,gBAAA,KAAA,WAAA,KJzCE,uCIiCJ,8BJhCM,gBAAA,KAAA,WAAA,MI0CJ,qCH3DF,iBAAA,QGgEA,8BACE,MAAA,KACA,OAAA,MACA,MAAA,YACA,OAAA,QACA,iBAAA,QACA,aAAA,YnBvDA,cAAA,KmB4DF,qBACE,eAAA,KAEA,2CACE,iBAAA,QAGF,uCACE,iBAAA,QCvFN,eACE,SAAA,SAEA,6BtB4lFF,4BsB1lFI,OAAA,mBACA,YAAA,KAGF,qBACE,SAAA,SACA,IAAA,EACA,KAAA,EACA,OAAA,KACA,QAAA,KAAA,OACA,eAAA,KACA,OAAA,IAAA,MAAA,YACA,iBAAA,EAAA,ELDE,WAAA,QAAA,IAAA,WAAA,CAAA,UAAA,IAAA,YAIA,uCKXJ,qBLYM,WAAA,MKCN,6BACE,QAAA,KAAA,OAEA,+CACE,MAAA,YADF,0CACE,MAAA,YAGF,0DAEE,YAAA,SACA,eAAA,QAHF,mCAAA,qDAEE,YAAA,SACA,eAAA,QAGF,8CACE,YAAA,SACA,eAAA,QAIJ,4BACE,YAAA,SACA,eAAA,QAMA,gEACE,QAAA,IACA,UAAA,WAAA,mBAAA,mBAFF,yCtBgmFJ,2DACA,kCsBhmFM,QAAA,IACA,UAAA,WAAA,mBAAA,mBAKF,oDACE,QAAA,IACA,UAAA,WAAA,mBAAA,mBCtDN,aACE,SAAA,SACA,QAAA,KACA,UAAA,KACA,YAAA,QACA,MAAA,KAEA,2BvBwpFF,0BuBtpFI,SAAA,SACA,KAAA,EAAA,EAAA,KACA,MAAA,GACA,UAAA,EAIF,iCvBspFF,gCuBppFI,QAAA,EAMF,kBACE,SAAA,SACA,QAAA,EAEA,wBACE,QAAA,EAWN,kBACE,QAAA,KACA,YAAA,OACA,QAAA,QAAA,OtBsPI,UAAA,KsBpPJ,YAAA,IACA,YAAA,IACA,MAAA,QACA,WAAA,OACA,YAAA,OACA,iBAAA,QACA,OAAA,IAAA,MAAA,QrBpCE,cAAA,OForFJ,qBuBtoFA,8BvBooFA,6BACA,kCuBjoFE,QAAA,MAAA,KtBgOI,UAAA,QClRF,cAAA,MF6rFJ,qBuBtoFA,8BvBooFA,6BACA,kCuBjoFE,QAAA,OAAA,MtBuNI,UAAA,QClRF,cAAA,MqBgEJ,6BvBooFA,6BuBloFE,cAAA,KvBuoFF,uEuB1nFI,8FrB/DA,wBAAA,EACA,2BAAA,EF6rFJ,iEuBxnFI,2FrBtEA,wBAAA,EACA,2BAAA,EqBgFF,0IACE,YAAA,KrBpEA,uBAAA,EACA,0BAAA,EsBzBF,gBACE,QAAA,KACA,MAAA,KACA,WAAA,OvByQE,UAAA,OuBtQF,MAAA,QAGF,eACE,SAAA,SACA,IAAA,KACA,QAAA,EACA,QAAA,KACA,UAAA,KACA,QAAA,OAAA,MACA,WAAA,MvB4PE,UAAA,QuBzPF,MAAA,KACA,iBAAA,mBtB1BA,cAAA,OFgvFJ,0BACA,yBwBltFI,sCxBgtFJ,qCwB9sFM,QAAA,MA9CF,uBAAA,mCAoDE,aAAA,QAGE,cAAA,qBACA,iBAAA,2OACA,kBAAA,UACA,oBAAA,MAAA,wBAAA,OACA,gBAAA,sBAAA,sBAGF,6BAAA,yCACE,aAAA,QACA,WAAA,EAAA,EAAA,EAAA,OAAA,oBAhEJ,2CAAA,+BAyEI,cAAA,qBACA,oBAAA,IAAA,wBAAA,MAAA,wBA1EJ,sBAAA,kCAiFE,aAAA,QAGE,kDAAA,gDAAA,8DAAA,4DAEE,cAAA,SACA,iBAAA,+NAAA,CAAA,2OACA,oBAAA,MAAA,OAAA,MAAA,CAAA,OAAA,MAAA,QACA,gBAAA,KAAA,IAAA,CAAA,sBAAA,sBAIJ,4BAAA,wCACE,aAAA,QACA,WAAA,EAAA,EAAA,EAAA,OAAA,oBA/FJ,2BAAA,uCAsGE,aAAA,QAEA,mCAAA,+CACE,iBAAA,QAGF,iCAAA,6CACE,WAAA,EAAA,EAAA,EAAA,OAAA,oBAGF,6CAAA,yDACE,MAAA,QAKJ,qDACE,YAAA,KAvHF,oCxBqzFJ,mCwBrzFI,gDxBozFJ,+CwBrrFQ,QAAA,EAIF,0CxBurFN,yCwBvrFM,sDxBsrFN,qDwBrrFQ,QAAA,EAjHN,kBACE,QAAA,KACA,MAAA,KACA,WAAA,OvByQE,UAAA,OuBtQF,MAAA,QAGF,iBACE,SAAA,SACA,IAAA,KACA,QAAA,EACA,QAAA,KACA,UAAA,KACA,QAAA,OAAA,MACA,WAAA,MvB4PE,UAAA,QuBzPF,MAAA,KACA,iBAAA,mBtB1BA,cAAA,OFy0FJ,8BACA,6BwB3yFI,0CxByyFJ,yCwBvyFM,QAAA,MA9CF,yBAAA,qCAoDE,aAAA,QAGE,cAAA,qBACA,iBAAA,2TACA,kBAAA,UACA,oBAAA,MAAA,wBAAA,OACA,gBAAA,sBAAA,sBAGF,+BAAA,2CACE,aAAA,QACA,WAAA,EAAA,EAAA,EAAA,OAAA,oBAhEJ,6CAAA,iCAyEI,cAAA,qBACA,oBAAA,IAAA,wBAAA,MAAA,wBA1EJ,wBAAA,oCAiFE,aAAA,QAGE,oDAAA,kDAAA,gEAAA,8DAEE,cAAA,SACA,iBAAA,+NAAA,CAAA,2TACA,oBAAA,MAAA,OAAA,MAAA,CAAA,OAAA,MAAA,QACA,gBAAA,KAAA,IAAA,CAAA,sBAAA,sBAIJ,8BAAA,0CACE,aAAA,QACA,WAAA,EAAA,EAAA,EAAA,OAAA,oBA/FJ,6BAAA,yCAsGE,aAAA,QAEA,qCAAA,iDACE,iBAAA,QAGF,mCAAA,+CACE,WAAA,EAAA,EAAA,EAAA,OAAA,oBAGF,+CAAA,2DACE,MAAA,QAKJ,uDACE,YAAA,KAvHF,sCxB84FJ,qCwB94FI,kDxB64FJ,iDwB5wFQ,QAAA,EAEF,4CxBgxFN,2CwBhxFM,wDxB+wFN,uDwB9wFQ,QAAA,ECtIR,KACE,QAAA,aAEA,YAAA,IACA,YAAA,IACA,MAAA,QACA,WAAA,OACA,gBAAA,KAEA,eAAA,OACA,OAAA,QACA,oBAAA,KAAA,iBAAA,KAAA,YAAA,KACA,iBAAA,YACA,OAAA,IAAA,MAAA,YC8GA,QAAA,QAAA,OzBsKI,UAAA,KClRF,cAAA,OeHE,WAAA,MAAA,KAAA,WAAA,CAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YAIA,uCQhBN,KRiBQ,WAAA,MQAN,WACE,MAAA,QAIF,sBAAA,WAEE,QAAA,EACA,WAAA,EAAA,EAAA,EAAA,OAAA,qBAcF,cAAA,cAAA,uBAGE,eAAA,KACA,QAAA,IAYF,aCvCA,MAAA,KRhBA,iBAAA,QQkBA,aAAA,QAGA,mBACE,MAAA,KRtBF,iBAAA,QQwBE,aAAA,QAGF,8BAAA,mBAEE,MAAA,KR7BF,iBAAA,QQ+BE,aAAA,QAKE,WAAA,EAAA,EAAA,EAAA,OAAA,oBAIJ,+BAAA,gCAAA,oBAAA,oBAAA,mCAKE,MAAA,KACA,iBAAA,QAGA,aAAA,QAEA,qCAAA,sCAAA,0BAAA,0BAAA,yCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,oBAKN,sBAAA,sBAEE,MAAA,KACA,iBAAA,QAGA,aAAA,QDZF,eCvCA,MAAA,KRhBA,iBAAA,QQkBA,aAAA,QAGA,qBACE,MAAA,KRtBF,iBAAA,QQwBE,aAAA,QAGF,gCAAA,qBAEE,MAAA,KR7BF,iBAAA,QQ+BE,aAAA,QAKE,WAAA,EAAA,EAAA,EAAA,OAAA,qBAIJ,iCAAA,kCAAA,sBAAA,sBAAA,qCAKE,MAAA,KACA,iBAAA,QAGA,aAAA,QAEA,uCAAA,wCAAA,4BAAA,4BAAA,2CAKI,WAAA,EAAA,EAAA,EAAA,OAAA,qBAKN,wBAAA,wBAEE,MAAA,KACA,iBAAA,QAGA,aAAA,QDZF,aCvCA,MAAA,KRhBA,iBAAA,QQkBA,aAAA,QAGA,mBACE,MAAA,KRtBF,iBAAA,QQwBE,aAAA,QAGF,8BAAA,mBAEE,MAAA,KR7BF,iBAAA,QQ+BE,aAAA,QAKE,WAAA,EAAA,EAAA,EAAA,OAAA,oBAIJ,+BAAA,gCAAA,oBAAA,oBAAA,mCAKE,MAAA,KACA,iBAAA,QAGA,aAAA,QAEA,qCAAA,sCAAA,0BAAA,0BAAA,yCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,oBAKN,sBAAA,sBAEE,MAAA,KACA,iBAAA,QAGA,aAAA,QDZF,UCvCA,MAAA,KRhBA,iBAAA,QQkBA,aAAA,QAGA,gBACE,MAAA,KRtBF,iBAAA,QQwBE,aAAA,QAGF,2BAAA,gBAEE,MAAA,KR7BF,iBAAA,QQ+BE,aAAA,QAKE,WAAA,EAAA,EAAA,EAAA,OAAA,oBAIJ,4BAAA,6BAAA,iBAAA,iBAAA,gCAKE,MAAA,KACA,iBAAA,QAGA,aAAA,QAEA,kCAAA,mCAAA,uBAAA,uBAAA,sCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,oBAKN,mBAAA,mBAEE,MAAA,KACA,iBAAA,QAGA,aAAA,QDZF,aCvCA,MAAA,KRhBA,iBAAA,QQkBA,aAAA,QAGA,mBACE,MAAA,KRtBF,iBAAA,QQwBE,aAAA,QAGF,8BAAA,mBAEE,MAAA,KR7BF,iBAAA,QQ+BE,aAAA,QAKE,WAAA,EAAA,EAAA,EAAA,OAAA,mBAIJ,+BAAA,gCAAA,oBAAA,oBAAA,mCAKE,MAAA,KACA,iBAAA,QAGA,aAAA,QAEA,qCAAA,sCAAA,0BAAA,0BAAA,yCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,mBAKN,sBAAA,sBAEE,MAAA,KACA,iBAAA,QAGA,aAAA,QDZF,YCvCA,MAAA,KRhBA,iBAAA,QQkBA,aAAA,QAGA,kBACE,MAAA,KRtBF,iBAAA,QQwBE,aAAA,QAGF,6BAAA,kBAEE,MAAA,KR7BF,iBAAA,QQ+BE,aAAA,QAKE,WAAA,EAAA,EAAA,EAAA,OAAA,mBAIJ,8BAAA,+BAAA,mBAAA,mBAAA,kCAKE,MAAA,KACA,iBAAA,QAGA,aAAA,QAEA,oCAAA,qCAAA,yBAAA,yBAAA,wCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,mBAKN,qBAAA,qBAEE,MAAA,KACA,iBAAA,QAGA,aAAA,QDZF,WCvCA,MAAA,KRhBA,iBAAA,QQkBA,aAAA,QAGA,iBACE,MAAA,KRtBF,iBAAA,QQwBE,aAAA,QAGF,4BAAA,iBAEE,MAAA,KR7BF,iBAAA,QQ+BE,aAAA,QAKE,WAAA,EAAA,EAAA,EAAA,OAAA,qBAIJ,6BAAA,8BAAA,kBAAA,kBAAA,iCAKE,MAAA,KACA,iBAAA,QAGA,aAAA,QAEA,mCAAA,oCAAA,wBAAA,wBAAA,uCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,qBAKN,oBAAA,oBAEE,MAAA,KACA,iBAAA,QAGA,aAAA,QDZF,UCvCA,MAAA,KRhBA,iBAAA,QQkBA,aAAA,QAGA,gBACE,MAAA,KRtBF,iBAAA,QQwBE,aAAA,QAGF,2BAAA,gBAEE,MAAA,KR7BF,iBAAA,QQ+BE,aAAA,QAKE,WAAA,EAAA,EAAA,EAAA,OAAA,kBAIJ,4BAAA,6BAAA,iBAAA,iBAAA,gCAKE,MAAA,KACA,iBAAA,QAGA,aAAA,QAEA,kCAAA,mCAAA,uBAAA,uBAAA,sCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,kBAKN,mBAAA,mBAEE,MAAA,KACA,iBAAA,QAGA,aAAA,QDNF,qBCmBA,MAAA,QACA,aAAA,QAEA,2BACE,MAAA,KACA,iBAAA,QACA,aAAA,QAGF,sCAAA,2BAEE,WAAA,EAAA,EAAA,EAAA,OAAA,oBAGF,uCAAA,wCAAA,4BAAA,0CAAA,4BAKE,MAAA,KACA,iBAAA,QACA,aAAA,QAEA,6CAAA,8CAAA,kCAAA,gDAAA,kCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,oBAKN,8BAAA,8BAEE,MAAA,QACA,iBAAA,YDvDF,uBCmBA,MAAA,QACA,aAAA,QAEA,6BACE,MAAA,KACA,iBAAA,QACA,aAAA,QAGF,wCAAA,6BAEE,WAAA,EAAA,EAAA,EAAA,OAAA,qBAGF,yCAAA,0CAAA,8BAAA,4CAAA,8BAKE,MAAA,KACA,iBAAA,QACA,aAAA,QAEA,+CAAA,gDAAA,oCAAA,kDAAA,oCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,qBAKN,gCAAA,gCAEE,MAAA,QACA,iBAAA,YDvDF,qBCmBA,MAAA,QACA,aAAA,QAEA,2BACE,MAAA,KACA,iBAAA,QACA,aAAA,QAGF,sCAAA,2BAEE,WAAA,EAAA,EAAA,EAAA,OAAA,mBAGF,uCAAA,wCAAA,4BAAA,0CAAA,4BAKE,MAAA,KACA,iBAAA,QACA,aAAA,QAEA,6CAAA,8CAAA,kCAAA,gDAAA,kCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,mBAKN,8BAAA,8BAEE,MAAA,QACA,iBAAA,YDvDF,kBCmBA,MAAA,QACA,aAAA,QAEA,wBACE,MAAA,KACA,iBAAA,QACA,aAAA,QAGF,mCAAA,wBAEE,WAAA,EAAA,EAAA,EAAA,OAAA,oBAGF,oCAAA,qCAAA,yBAAA,uCAAA,yBAKE,MAAA,KACA,iBAAA,QACA,aAAA,QAEA,0CAAA,2CAAA,+BAAA,6CAAA,+BAKI,WAAA,EAAA,EAAA,EAAA,OAAA,oBAKN,2BAAA,2BAEE,MAAA,QACA,iBAAA,YDvDF,qBCmBA,MAAA,QACA,aAAA,QAEA,2BACE,MAAA,KACA,iBAAA,QACA,aAAA,QAGF,sCAAA,2BAEE,WAAA,EAAA,EAAA,EAAA,OAAA,mBAGF,uCAAA,wCAAA,4BAAA,0CAAA,4BAKE,MAAA,KACA,iBAAA,QACA,aAAA,QAEA,6CAAA,8CAAA,kCAAA,gDAAA,kCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,mBAKN,8BAAA,8BAEE,MAAA,QACA,iBAAA,YDvDF,oBCmBA,MAAA,QACA,aAAA,QAEA,0BACE,MAAA,KACA,iBAAA,QACA,aAAA,QAGF,qCAAA,0BAEE,WAAA,EAAA,EAAA,EAAA,OAAA,mBAGF,sCAAA,uCAAA,2BAAA,yCAAA,2BAKE,MAAA,KACA,iBAAA,QACA,aAAA,QAEA,4CAAA,6CAAA,iCAAA,+CAAA,iCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,mBAKN,6BAAA,6BAEE,MAAA,QACA,iBAAA,YDvDF,mBCmBA,MAAA,QACA,aAAA,QAEA,yBACE,MAAA,KACA,iBAAA,QACA,aAAA,QAGF,oCAAA,yBAEE,WAAA,EAAA,EAAA,EAAA,OAAA,qBAGF,qCAAA,sCAAA,0BAAA,wCAAA,0BAKE,MAAA,KACA,iBAAA,QACA,aAAA,QAEA,2CAAA,4CAAA,gCAAA,8CAAA,gCAKI,WAAA,EAAA,EAAA,EAAA,OAAA,qBAKN,4BAAA,4BAEE,MAAA,QACA,iBAAA,YDvDF,kBCmBA,MAAA,QACA,aAAA,QAEA,wBACE,MAAA,KACA,iBAAA,QACA,aAAA,QAGF,mCAAA,wBAEE,WAAA,EAAA,EAAA,EAAA,OAAA,kBAGF,oCAAA,qCAAA,yBAAA,uCAAA,yBAKE,MAAA,KACA,iBAAA,QACA,aAAA,QAEA,0CAAA,2CAAA,+BAAA,6CAAA,+BAKI,WAAA,EAAA,EAAA,EAAA,OAAA,kBAKN,2BAAA,2BAEE,MAAA,QACA,iBAAA,YD3CJ,UACE,YAAA,IACA,MAAA,QACA,gBAAA,UAEA,gBACE,MAAA,QAQF,mBAAA,mBAEE,MAAA,QAWJ,mBAAA,QCuBE,QAAA,MAAA,KzBsKI,UAAA,QClRF,cAAA,MuByFJ,mBAAA,QCmBE,QAAA,OAAA,MzBsKI,UAAA,QClRF,cAAA,MyBnBJ,MVgBM,WAAA,QAAA,KAAA,OAIA,uCUpBN,MVqBQ,WAAA,MUlBN,iBACE,QAAA,EAMF,qBACE,QAAA,KAIJ,YACE,OAAA,EACA,SAAA,OVDI,WAAA,OAAA,KAAA,KAIA,uCULN,YVMQ,WAAA,MUDN,gCACE,MAAA,EACA,OAAA,KVNE,WAAA,MAAA,KAAA,KAIA,uCUAJ,gCVCM,WAAA,MjBm6GR,UADA,SAEA,W4Bx7GA,QAIE,SAAA,SAGF,iBACE,YAAA,OCqBE,wBACE,QAAA,aACA,YAAA,OACA,eAAA,OACA,QAAA,GAhCJ,WAAA,KAAA,MACA,aAAA,KAAA,MAAA,YACA,cAAA,EACA,YAAA,KAAA,MAAA,YAqDE,8BACE,YAAA,ED3CN,eACE,SAAA,SACA,QAAA,KACA,QAAA,KACA,UAAA,MACA,QAAA,MAAA,EACA,OAAA,E3B+QI,UAAA,K2B7QJ,MAAA,QACA,WAAA,KACA,WAAA,KACA,iBAAA,KACA,gBAAA,YACA,OAAA,IAAA,MAAA,gB1BVE,cAAA,O0BcF,+BACE,IAAA,KACA,KAAA,EACA,WAAA,QAYA,qBACE,cAAA,MAEA,qCACE,MAAA,KACA,KAAA,EAIJ,mBACE,cAAA,IAEA,mCACE,MAAA,EACA,KAAA,KnBCJ,yBmBfA,wBACE,cAAA,MAEA,wCACE,MAAA,KACA,KAAA,EAIJ,sBACE,cAAA,IAEA,sCACE,MAAA,EACA,KAAA,MnBCJ,yBmBfA,wBACE,cAAA,MAEA,wCACE,MAAA,KACA,KAAA,EAIJ,sBACE,cAAA,IAEA,sCACE,MAAA,EACA,KAAA,MnBCJ,yBmBfA,wBACE,cAAA,MAEA,wCACE,MAAA,KACA,KAAA,EAIJ,sBACE,cAAA,IAEA,sCACE,MAAA,EACA,KAAA,MnBCJ,0BmBfA,wBACE,cAAA,MAEA,wCACE,MAAA,KACA,KAAA,EAIJ,sBACE,cAAA,IAEA,sCACE,MAAA,EACA,KAAA,MnBCJ,0BmBfA,yBACE,cAAA,MAEA,yCACE,MAAA,KACA,KAAA,EAIJ,uBACE,cAAA,IAEA,uCACE,MAAA,EACA,KAAA,MAUN,uCACE,IAAA,KACA,OAAA,KACA,WAAA,EACA,cAAA,QC9CA,gCACE,QAAA,aACA,YAAA,OACA,eAAA,OACA,QAAA,GAzBJ,WAAA,EACA,aAAA,KAAA,MAAA,YACA,cAAA,KAAA,MACA,YAAA,KAAA,MAAA,YA8CE,sCACE,YAAA,ED0BJ,wCACE,IAAA,EACA,MAAA,KACA,KAAA,KACA,WAAA,EACA,YAAA,QC5DA,iCACE,QAAA,aACA,YAAA,OACA,eAAA,OACA,QAAA,GAlBJ,WAAA,KAAA,MAAA,YACA,aAAA,EACA,cAAA,KAAA,MAAA,YACA,YAAA,KAAA,MAuCE,uCACE,YAAA,EDoCF,iCACE,eAAA,EAMJ,0CACE,IAAA,EACA,MAAA,KACA,KAAA,KACA,WAAA,EACA,aAAA,QC7EA,mCACE,QAAA,aACA,YAAA,OACA,eAAA,OACA,QAAA,GAWA,mCACE,QAAA,KAGF,oCACE,QAAA,aACA,aAAA,OACA,eAAA,OACA,QAAA,GA9BN,WAAA,KAAA,MAAA,YACA,aAAA,KAAA,MACA,cAAA,KAAA,MAAA,YAiCE,yCACE,YAAA,EDqDF,oCACE,eAAA,EAON,kBACE,OAAA,EACA,OAAA,MAAA,EACA,SAAA,OACA,WAAA,IAAA,MAAA,gBAMF,eACE,QAAA,MACA,MAAA,KACA,QAAA,OAAA,KACA,MAAA,KACA,YAAA,IACA,MAAA,QACA,WAAA,QACA,gBAAA,KACA,YAAA,OACA,iBAAA,YACA,OAAA,EAcA,qBAAA,qBAEE,MAAA,QVzJF,iBAAA,QU8JA,sBAAA,sBAEE,MAAA,KACA,gBAAA,KVjKF,iBAAA,QUqKA,wBAAA,wBAEE,MAAA,QACA,eAAA,KACA,iBAAA,YAMJ,oBACE,QAAA,MAIF,iBACE,QAAA,MACA,QAAA,MAAA,KACA,cAAA,E3B0GI,UAAA,Q2BxGJ,MAAA,QACA,YAAA,OAIF,oBACE,QAAA,MACA,QAAA,OAAA,KACA,MAAA,QAIF,oBACE,MAAA,QACA,iBAAA,QACA,aAAA,gBAGA,mCACE,MAAA,QAEA,yCAAA,yCAEE,MAAA,KVhNJ,iBAAA,sBUoNE,0CAAA,0CAEE,MAAA,KVtNJ,iBAAA,QU0NE,4CAAA,4CAEE,MAAA,QAIJ,sCACE,aAAA,gBAGF,wCACE,MAAA,QAGF,qCACE,MAAA,QE5OJ,W9BwuHA,oB8BtuHE,SAAA,SACA,QAAA,YACA,eAAA,O9B0uHF,yB8BxuHE,gBACE,SAAA,SACA,KAAA,EAAA,EAAA,K9BgvHJ,4CACA,0CAIA,gCADA,gCADA,+BADA,+B8B7uHE,mC9BsuHF,iCAIA,uBADA,uBADA,sBADA,sB8BjuHI,QAAA,EAKJ,aACE,QAAA,KACA,UAAA,KACA,gBAAA,WAEA,0BACE,MAAA,K9B6uHJ,wC8BvuHE,kCAEE,YAAA,K9ByuHJ,4C8BruHE,uD5BRE,wBAAA,EACA,2BAAA,EFkvHJ,6C8BluHE,+B9BiuHF,iCEpuHI,uBAAA,EACA,0BAAA,E4BqBJ,uBACE,cAAA,SACA,aAAA,SAEA,8BAAA,uCAAA,sCAGE,YAAA,EAGF,0CACE,aAAA,EAIJ,0CAAA,+BACE,cAAA,QACA,aAAA,QAGF,0CAAA,+BACE,cAAA,OACA,aAAA,OAoBF,oBACE,eAAA,OACA,YAAA,WACA,gBAAA,OAEA,yB9BgsHF,+B8B9rHI,MAAA,K9BksHJ,iD8B/rHE,2CAEE,WAAA,K9BisHJ,qD8B7rHE,gE5BvFE,2BAAA,EACA,0BAAA,EFwxHJ,sD8B7rHE,8B5B1GE,uBAAA,EACA,wBAAA,E6BxBJ,KACE,QAAA,KACA,UAAA,KACA,aAAA,EACA,cAAA,EACA,WAAA,KAGF,UACE,QAAA,MACA,QAAA,MAAA,KAGA,MAAA,QACA,gBAAA,KdHI,WAAA,MAAA,KAAA,WAAA,CAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,YAIA,uCcPN,UdQQ,WAAA,McCN,gBAAA,gBAEE,MAAA,QAKF,mBACE,MAAA,QACA,eAAA,KACA,OAAA,QAQJ,UACE,cAAA,IAAA,MAAA,QAEA,oBACE,cAAA,KACA,WAAA,IACA,OAAA,IAAA,MAAA,Y7BlBA,uBAAA,OACA,wBAAA,O6BoBA,0BAAA,0BAEE,aAAA,QAAA,QAAA,QAEA,UAAA,QAGF,6BACE,MAAA,QACA,iBAAA,YACA,aAAA,Y/B8zHN,mC+B1zHE,2BAEE,MAAA,QACA,iBAAA,KACA,aAAA,QAAA,QAAA,KAGF,yBAEE,WAAA,K7B5CA,uBAAA,EACA,wBAAA,E6BuDF,qBACE,WAAA,IACA,OAAA,E7BnEA,cAAA,O6BuEF,4B/BgzHF,2B+B9yHI,MAAA,KbxFF,iBAAA,QlB44HF,oB+BzyHE,oBAEE,KAAA,EAAA,EAAA,KACA,WAAA,O/B4yHJ,yB+BvyHE,yBAEE,WAAA,EACA,UAAA,EACA,WAAA,OAMF,8B/BoyHF,mC+BnyHI,MAAA,KAUF,uBACE,QAAA,KAEF,qBACE,QAAA,MCxHJ,QACE,SAAA,SACA,QAAA,KACA,UAAA,KACA,YAAA,OACA,gBAAA,cACA,YAAA,MAEA,eAAA,MAOA,mBhCm5HF,yBAGA,sBADA,sBADA,sBAGA,sBACA,uBgCv5HI,QAAA,KACA,UAAA,QACA,YAAA,OACA,gBAAA,cAoBJ,cACE,YAAA,SACA,eAAA,SACA,aAAA,K/B2OI,UAAA,Q+BzOJ,gBAAA,KACA,YAAA,OAaF,YACE,QAAA,KACA,eAAA,OACA,aAAA,EACA,cAAA,EACA,WAAA,KAEA,sBACE,cAAA,EACA,aAAA,EAGF,2BACE,SAAA,OASJ,aACE,YAAA,MACA,eAAA,MAYF,iBACE,WAAA,KACA,UAAA,EAGA,YAAA,OAIF,gBACE,QAAA,OAAA,O/B6KI,UAAA,Q+B3KJ,YAAA,EACA,iBAAA,YACA,OAAA,IAAA,MAAA,Y9BzGE,cAAA,OeHE,WAAA,WAAA,KAAA,YAIA,uCemGN,gBflGQ,WAAA,Me2GN,sBACE,gBAAA,KAGF,sBACE,gBAAA,KACA,QAAA,EACA,WAAA,EAAA,EAAA,EAAA,OAMJ,qBACE,QAAA,aACA,MAAA,MACA,OAAA,MACA,eAAA,OACA,kBAAA,UACA,oBAAA,OACA,gBAAA,KAGF,mBACE,WAAA,6BACA,WAAA,KvB1FE,yBuBsGA,kBAEI,UAAA,OACA,gBAAA,WAEA,8BACE,eAAA,IAEA,6CACE,SAAA,SAGF,wCACE,cAAA,MACA,aAAA,MAIJ,qCACE,SAAA,QAGF,mCACE,QAAA,eACA,WAAA,KAGF,kCACE,QAAA,KAGF,oCACE,QAAA,KAGF,6BACE,SAAA,QACA,OAAA,EACA,QAAA,KACA,UAAA,EACA,WAAA,kBACA,iBAAA,YACA,aAAA,EACA,YAAA,EfhMJ,WAAA,KekMI,UAAA,KhC41HV,oCgC11HQ,iCAEE,OAAA,KACA,WAAA,EACA,cAAA,EAGF,kCACE,QAAA,KACA,UAAA,EACA,QAAA,EACA,WAAA,SvBhKN,yBuBsGA,kBAEI,UAAA,OACA,gBAAA,WAEA,8BACE,eAAA,IAEA,6CACE,SAAA,SAGF,wCACE,cAAA,MACA,aAAA,MAIJ,qCACE,SAAA,QAGF,mCACE,QAAA,eACA,WAAA,KAGF,kCACE,QAAA,KAGF,oCACE,QAAA,KAGF,6BACE,SAAA,QACA,OAAA,EACA,QAAA,KACA,UAAA,EACA,WAAA,kBACA,iBAAA,YACA,aAAA,EACA,YAAA,EfhMJ,WAAA,KekMI,UAAA,KhCi5HV,oCgC/4HQ,iCAEE,OAAA,KACA,WAAA,EACA,cAAA,EAGF,kCACE,QAAA,KACA,UAAA,EACA,QAAA,EACA,WAAA,SvBhKN,yBuBsGA,kBAEI,UAAA,OACA,gBAAA,WAEA,8BACE,eAAA,IAEA,6CACE,SAAA,SAGF,wCACE,cAAA,MACA,aAAA,MAIJ,qCACE,SAAA,QAGF,mCACE,QAAA,eACA,WAAA,KAGF,kCACE,QAAA,KAGF,oCACE,QAAA,KAGF,6BACE,SAAA,QACA,OAAA,EACA,QAAA,KACA,UAAA,EACA,WAAA,kBACA,iBAAA,YACA,aAAA,EACA,YAAA,EfhMJ,WAAA,KekMI,UAAA,KhCs8HV,oCgCp8HQ,iCAEE,OAAA,KACA,WAAA,EACA,cAAA,EAGF,kCACE,QAAA,KACA,UAAA,EACA,QAAA,EACA,WAAA,SvBhKN,0BuBsGA,kBAEI,UAAA,OACA,gBAAA,WAEA,8BACE,eAAA,IAEA,6CACE,SAAA,SAGF,wCACE,cAAA,MACA,aAAA,MAIJ,qCACE,SAAA,QAGF,mCACE,QAAA,eACA,WAAA,KAGF,kCACE,QAAA,KAGF,oCACE,QAAA,KAGF,6BACE,SAAA,QACA,OAAA,EACA,QAAA,KACA,UAAA,EACA,WAAA,kBACA,iBAAA,YACA,aAAA,EACA,YAAA,EfhMJ,WAAA,KekMI,UAAA,KhC2/HV,oCgCz/HQ,iCAEE,OAAA,KACA,WAAA,EACA,cAAA,EAGF,kCACE,QAAA,KACA,UAAA,EACA,QAAA,EACA,WAAA,SvBhKN,0BuBsGA,mBAEI,UAAA,OACA,gBAAA,WAEA,+BACE,eAAA,IAEA,8CACE,SAAA,SAGF,yCACE,cAAA,MACA,aAAA,MAIJ,sCACE,SAAA,QAGF,oCACE,QAAA,eACA,WAAA,KAGF,mCACE,QAAA,KAGF,qCACE,QAAA,KAGF,8BACE,SAAA,QACA,OAAA,EACA,QAAA,KACA,UAAA,EACA,WAAA,kBACA,iBAAA,YACA,aAAA,EACA,YAAA,EfhMJ,WAAA,KekMI,UAAA,KhCgjIV,qCgC9iIQ,kCAEE,OAAA,KACA,WAAA,EACA,cAAA,EAGF,mCACE,QAAA,KACA,UAAA,EACA,QAAA,EACA,WAAA,SA1DN,eAEI,UAAA,OACA,gBAAA,WAEA,2BACE,eAAA,IAEA,0CACE,SAAA,SAGF,qCACE,cAAA,MACA,aAAA,MAIJ,kCACE,SAAA,QAGF,gCACE,QAAA,eACA,WAAA,KAGF,+BACE,QAAA,KAGF,iCACE,QAAA,KAGF,0BACE,SAAA,QACA,OAAA,EACA,QAAA,KACA,UAAA,EACA,WAAA,kBACA,iBAAA,YACA,aAAA,EACA,YAAA,EfhMJ,WAAA,KekMI,UAAA,KhComIV,iCgClmIQ,8BAEE,OAAA,KACA,WAAA,EACA,cAAA,EAGF,+BACE,QAAA,KACA,UAAA,EACA,QAAA,EACA,WAAA,QAcR,4BACE,MAAA,eAEA,kCAAA,kCAEE,MAAA,eAKF,oCACE,MAAA,gBAEA,0CAAA,0CAEE,MAAA,eAGF,6CACE,MAAA,ehCklIR,2CgC9kII,0CAEE,MAAA,eAIJ,8BACE,MAAA,gBACA,aAAA,eAGF,mCACE,iBAAA,4OAGF,2BACE,MAAA,gBAEA,6BhC2kIJ,mCADA,mCgCvkIM,MAAA,eAOJ,2BACE,MAAA,KAEA,iCAAA,iCAEE,MAAA,KAKF,mCACE,MAAA,sBAEA,yCAAA,yCAEE,MAAA,sBAGF,4CACE,MAAA,sBhCkkIR,0CgC9jII,yCAEE,MAAA,KAIJ,6BACE,MAAA,sBACA,aAAA,qBAGF,kCACE,iBAAA,kPAGF,0BACE,MAAA,sBACA,4BhC4jIJ,kCADA,kCgCxjIM,MAAA,KCvUN,MACE,SAAA,SACA,QAAA,KACA,eAAA,OACA,UAAA,EAEA,UAAA,WACA,iBAAA,KACA,gBAAA,WACA,OAAA,IAAA,MAAA,iB/BME,cAAA,O+BFF,SACE,aAAA,EACA,YAAA,EAGF,kBACE,WAAA,QACA,cAAA,QAEA,8BACE,iBAAA,E/BCF,uBAAA,mBACA,wBAAA,mB+BEA,6BACE,oBAAA,E/BUF,2BAAA,mBACA,0BAAA,mB+BJF,+BjC+3IF,+BiC73II,WAAA,EAIJ,WAGE,KAAA,EAAA,EAAA,KACA,QAAA,KAAA,KAIF,YACE,cAAA,MAGF,eACE,WAAA,QACA,cAAA,EAGF,sBACE,cAAA,EAQA,sBACE,YAAA,KAQJ,aACE,QAAA,MAAA,KACA,cAAA,EAEA,iBAAA,gBACA,cAAA,IAAA,MAAA,iBAEA,yB/BpEE,cAAA,mBAAA,mBAAA,EAAA,E+ByEJ,aACE,QAAA,MAAA,KAEA,iBAAA,gBACA,WAAA,IAAA,MAAA,iBAEA,wB/B/EE,cAAA,EAAA,EAAA,mBAAA,mB+ByFJ,kBACE,aAAA,OACA,cAAA,OACA,YAAA,OACA,cAAA,EAUF,mBACE,aAAA,OACA,YAAA,OAIF,kBACE,SAAA,SACA,IAAA,EACA,MAAA,EACA,OAAA,EACA,KAAA,EACA,QAAA,K/BnHE,cAAA,mB+BuHJ,UjCi2IA,iBADA,ciC71IE,MAAA,KAGF,UjCg2IA,cEp9II,uBAAA,mBACA,wBAAA,mB+BwHJ,UjCi2IA,iBE58II,2BAAA,mBACA,0BAAA,mB+BuHF,kBACE,cAAA,OxBpGA,yBwBgGJ,YAQI,QAAA,KACA,UAAA,IAAA,KAGA,kBAEE,KAAA,EAAA,EAAA,GACA,cAAA,EAEA,wBACE,YAAA,EACA,YAAA,EAKA,mC/BpJJ,wBAAA,EACA,2BAAA,EF4+IJ,gDiCt1IU,iDAGE,wBAAA,EjCu1IZ,gDiCr1IU,oDAGE,2BAAA,EAIJ,oC/BrJJ,uBAAA,EACA,0BAAA,EF0+IJ,iDiCn1IU,kDAGE,uBAAA,EjCo1IZ,iDiCl1IU,qDAGE,0BAAA,GC7MZ,kBACE,SAAA,SACA,QAAA,KACA,YAAA,OACA,MAAA,KACA,QAAA,KAAA,QjC4RI,UAAA,KiC1RJ,MAAA,QACA,WAAA,KACA,iBAAA,KACA,OAAA,EhCKE,cAAA,EgCHF,gBAAA,KjBAI,WAAA,MAAA,KAAA,WAAA,CAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,WAAA,CAAA,cAAA,KAAA,KAIA,uCiBhBN,kBjBiBQ,WAAA,MiBFN,kCACE,MAAA,QACA,iBAAA,QACA,WAAA,MAAA,EAAA,KAAA,EAAA,iBAEA,yCACE,iBAAA,gRACA,UAAA,gBAKJ,yBACE,YAAA,EACA,MAAA,QACA,OAAA,QACA,YAAA,KACA,QAAA,GACA,iBAAA,gRACA,kBAAA,UACA,gBAAA,QjBvBE,WAAA,UAAA,IAAA,YAIA,uCiBWJ,yBjBVM,WAAA,MiBsBN,wBACE,QAAA,EAGF,wBACE,QAAA,EACA,aAAA,QACA,QAAA,EACA,WAAA,EAAA,EAAA,EAAA,OAAA,qBAIJ,kBACE,cAAA,EAGF,gBACE,iBAAA,KACA,OAAA,IAAA,MAAA,iBAEA,8BhCnCE,uBAAA,OACA,wBAAA,OgCqCA,gDhCtCA,uBAAA,mBACA,wBAAA,mBgC0CF,oCACE,WAAA,EAIF,6BhClCE,2BAAA,OACA,0BAAA,OgCqCE,yDhCtCF,2BAAA,mBACA,0BAAA,mBgC0CA,iDhC3CA,2BAAA,OACA,0BAAA,OgCgDJ,gBACE,QAAA,KAAA,QASA,qCACE,aAAA,EAGF,iCACE,aAAA,EACA,YAAA,EhCxFA,cAAA,EgC2FA,6CAAgB,WAAA,EAChB,4CAAe,cAAA,EAEf,mDhC9FA,cAAA,EiCnBJ,YACE,QAAA,KACA,UAAA,KACA,QAAA,EAAA,EACA,cAAA,KAEA,WAAA,KAOA,kCACE,aAAA,MAEA,0CACE,MAAA,KACA,cAAA,MACA,MAAA,QACA,QAAA,kCAIJ,wBACE,MAAA,QCzBJ,YACE,QAAA,KhCGA,aAAA,EACA,WAAA,KgCAF,WACE,SAAA,SACA,QAAA,MACA,MAAA,QACA,gBAAA,KACA,iBAAA,KACA,OAAA,IAAA,MAAA,QnBKI,WAAA,MAAA,KAAA,WAAA,CAAA,iBAAA,KAAA,WAAA,CAAA,aAAA,KAAA,WAAA,CAAA,WAAA,KAAA,YAIA,uCmBfN,WnBgBQ,WAAA,MmBPN,iBACE,QAAA,EACA,MAAA,QAEA,iBAAA,QACA,aAAA,QAGF,iBACE,QAAA,EACA,MAAA,QACA,iBAAA,QACA,QAAA,EACA,WAAA,EAAA,EAAA,EAAA,OAAA,qBAKF,wCACE,YAAA,KAGF,6BACE,QAAA,EACA,MAAA,KlBlCF,iBAAA,QkBoCE,aAAA,QAGF,+BACE,MAAA,QACA,eAAA,KACA,iBAAA,KACA,aAAA,QC3CF,WACE,QAAA,QAAA,OAOI,kCnCqCJ,uBAAA,OACA,0BAAA,OmChCI,iCnCiBJ,wBAAA,OACA,2BAAA,OmChCF,0BACE,QAAA,OAAA,OpCgSE,UAAA,QoCzRE,iDnCqCJ,uBAAA,MACA,0BAAA,MmChCI,gDnCiBJ,wBAAA,MACA,2BAAA,MmChCF,0BACE,QAAA,OAAA,MpCgSE,UAAA,QoCzRE,iDnCqCJ,uBAAA,MACA,0BAAA,MmChCI,gDnCiBJ,wBAAA,MACA,2BAAA,MoC/BJ,OACE,QAAA,aACA,QAAA,MAAA,MrC8RI,UAAA,MqC5RJ,YAAA,IACA,YAAA,EACA,MAAA,KACA,WAAA,OACA,YAAA,OACA,eAAA,SpCKE,cAAA,OoCAF,aACE,QAAA,KAKJ,YACE,SAAA,SACA,IAAA,KCvBF,OACE,SAAA,SACA,QAAA,KAAA,KACA,cAAA,KACA,OAAA,IAAA,MAAA,YrCWE,cAAA,OqCNJ,eAEE,MAAA,QAIF,YACE,YAAA,IAQF,mBACE,cAAA,KAGA,8BACE,SAAA,SACA,IAAA,EACA,MAAA,EACA,QAAA,EACA,QAAA,QAAA,KAeF,eClDA,MAAA,QtBEA,iBAAA,QsBAA,aAAA,QAEA,2BACE,MAAA,QD6CF,iBClDA,MAAA,QtBEA,iBAAA,QsBAA,aAAA,QAEA,6BACE,MAAA,QD6CF,eClDA,MAAA,QtBEA,iBAAA,QsBAA,aAAA,QAEA,2BACE,MAAA,QD6CF,YClDA,MAAA,QtBEA,iBAAA,QsBAA,aAAA,QAEA,wBACE,MAAA,QD6CF,eClDA,MAAA,QtBEA,iBAAA,QsBAA,aAAA,QAEA,2BACE,MAAA,QD6CF,cClDA,MAAA,QtBEA,iBAAA,QsBAA,aAAA,QAEA,0BACE,MAAA,QD6CF,aClDA,MAAA,QtBEA,iBAAA,QsBAA,aAAA,QAEA,yBACE,MAAA,QD6CF,YClDA,MAAA,QtBEA,iBAAA,QsBAA,aAAA,QAEA,wBACE,MAAA,QCHF,wCACE,GAAK,sBAAA,MADP,gCACE,GAAK,sBAAA,MAKT,UACE,QAAA,KACA,OAAA,KACA,SAAA,OxCwRI,UAAA,OwCtRJ,iBAAA,QvCIE,cAAA,OuCCJ,cACE,QAAA,KACA,eAAA,OACA,gBAAA,OACA,SAAA,OACA,MAAA,KACA,WAAA,OACA,YAAA,OACA,iBAAA,QxBZI,WAAA,MAAA,IAAA,KAIA,uCwBAN,cxBCQ,WAAA,MwBWR,sBvBYE,iBAAA,iKuBVA,gBAAA,KAAA,KAIA,uBACE,kBAAA,GAAA,OAAA,SAAA,qBAAA,UAAA,GAAA,OAAA,SAAA,qBAGE,uCAJJ,uBAKM,kBAAA,KAAA,UAAA,MCvCR,YACE,QAAA,KACA,eAAA,OAGA,aAAA,EACA,cAAA,ExCSE,cAAA,OwCLJ,qBACE,gBAAA,KACA,cAAA,QAEA,gCAEE,QAAA,uBAAA,KACA,kBAAA,QAUJ,wBACE,MAAA,KACA,MAAA,QACA,WAAA,QAGA,8BAAA,8BAEE,QAAA,EACA,MAAA,QACA,gBAAA,KACA,iBAAA,QAGF,+BACE,MAAA,QACA,iBAAA,QASJ,iBACE,SAAA,SACA,QAAA,MACA,QAAA,MAAA,KACA,MAAA,QACA,gBAAA,KACA,iBAAA,KACA,OAAA,IAAA,MAAA,iBAEA,6BxCrCE,uBAAA,QACA,wBAAA,QwCwCF,4BxC3BE,2BAAA,QACA,0BAAA,QwC8BF,0BAAA,0BAEE,MAAA,QACA,eAAA,KACA,iBAAA,KAIF,wBACE,QAAA,EACA,MAAA,KACA,iBAAA,QACA,aAAA,QAGF,kCACE,iBAAA,EAEA,yCACE,WAAA,KACA,iBAAA,IAcF,uBACE,eAAA,IAGE,oDxCrCJ,0BAAA,OAZA,wBAAA,EwCsDI,mDxCtDJ,wBAAA,OAYA,0BAAA,EwC+CI,+CACE,WAAA,EAGF,yDACE,iBAAA,IACA,kBAAA,EAEA,gEACE,YAAA,KACA,kBAAA,IjCpER,yBiC4CA,0BACE,eAAA,IAGE,uDxCrCJ,0BAAA,OAZA,wBAAA,EwCsDI,sDxCtDJ,wBAAA,OAYA,0BAAA,EwC+CI,kDACE,WAAA,EAGF,4DACE,iBAAA,IACA,kBAAA,EAEA,mEACE,YAAA,KACA,kBAAA,KjCpER,yBiC4CA,0BACE,eAAA,IAGE,uDxCrCJ,0BAAA,OAZA,wBAAA,EwCsDI,sDxCtDJ,wBAAA,OAYA,0BAAA,EwC+CI,kDACE,WAAA,EAGF,4DACE,iBAAA,IACA,kBAAA,EAEA,mEACE,YAAA,KACA,kBAAA,KjCpER,yBiC4CA,0BACE,eAAA,IAGE,uDxCrCJ,0BAAA,OAZA,wBAAA,EwCsDI,sDxCtDJ,wBAAA,OAYA,0BAAA,EwC+CI,kDACE,WAAA,EAGF,4DACE,iBAAA,IACA,kBAAA,EAEA,mEACE,YAAA,KACA,kBAAA,KjCpER,0BiC4CA,0BACE,eAAA,IAGE,uDxCrCJ,0BAAA,OAZA,wBAAA,EwCsDI,sDxCtDJ,wBAAA,OAYA,0BAAA,EwC+CI,kDACE,WAAA,EAGF,4DACE,iBAAA,IACA,kBAAA,EAEA,mEACE,YAAA,KACA,kBAAA,KjCpER,0BiC4CA,2BACE,eAAA,IAGE,wDxCrCJ,0BAAA,OAZA,wBAAA,EwCsDI,uDxCtDJ,wBAAA,OAYA,0BAAA,EwC+CI,mDACE,WAAA,EAGF,6DACE,iBAAA,IACA,kBAAA,EAEA,oEACE,YAAA,KACA,kBAAA,KAcZ,kBxC9HI,cAAA,EwCiIF,mCACE,aAAA,EAAA,EAAA,IAEA,8CACE,oBAAA,ECpJJ,yBACE,MAAA,QACA,iBAAA,QAGE,sDAAA,sDAEE,MAAA,QACA,iBAAA,QAGF,uDACE,MAAA,KACA,iBAAA,QACA,aAAA,QAdN,2BACE,MAAA,QACA,iBAAA,QAGE,wDAAA,wDAEE,MAAA,QACA,iBAAA,QAGF,yDACE,MAAA,KACA,iBAAA,QACA,aAAA,QAdN,yBACE,MAAA,QACA,iBAAA,QAGE,sDAAA,sDAEE,MAAA,QACA,iBAAA,QAGF,uDACE,MAAA,KACA,iBAAA,QACA,aAAA,QAdN,sBACE,MAAA,QACA,iBAAA,QAGE,mDAAA,mDAEE,MAAA,QACA,iBAAA,QAGF,oDACE,MAAA,KACA,iBAAA,QACA,aAAA,QAdN,yBACE,MAAA,QACA,iBAAA,QAGE,sDAAA,sDAEE,MAAA,QACA,iBAAA,QAGF,uDACE,MAAA,KACA,iBAAA,QACA,aAAA,QAdN,wBACE,MAAA,QACA,iBAAA,QAGE,qDAAA,qDAEE,MAAA,QACA,iBAAA,QAGF,sDACE,MAAA,KACA,iBAAA,QACA,aAAA,QAdN,uBACE,MAAA,QACA,iBAAA,QAGE,oDAAA,oDAEE,MAAA,QACA,iBAAA,QAGF,qDACE,MAAA,KACA,iBAAA,QACA,aAAA,QAdN,sBACE,MAAA,QACA,iBAAA,QAGE,mDAAA,mDAEE,MAAA,QACA,iBAAA,QAGF,oDACE,MAAA,KACA,iBAAA,QACA,aAAA,QCbR,WACE,WAAA,YACA,MAAA,IACA,OAAA,IACA,QAAA,MAAA,MACA,MAAA,KACA,WAAA,YAAA,0TAAA,MAAA,CAAA,IAAA,KAAA,UACA,OAAA,E1COE,cAAA,O0CLF,QAAA,GAGA,iBACE,MAAA,KACA,gBAAA,KACA,QAAA,IAGF,iBACE,QAAA,EACA,WAAA,EAAA,EAAA,EAAA,OAAA,qBACA,QAAA,EAGF,oBAAA,oBAEE,eAAA,KACA,oBAAA,KAAA,iBAAA,KAAA,YAAA,KACA,QAAA,IAIJ,iBACE,OAAA,UAAA,gBAAA,iBCtCF,OACE,MAAA,MACA,UAAA,K5CmSI,UAAA,Q4ChSJ,eAAA,KACA,iBAAA,sBACA,gBAAA,YACA,OAAA,IAAA,MAAA,eACA,WAAA,EAAA,MAAA,KAAA,gB3CUE,cAAA,O2CPF,eACE,QAAA,EAGF,kBACE,QAAA,KAIJ,iBACE,MAAA,oBAAA,MAAA,iBAAA,MAAA,YACA,UAAA,KACA,eAAA,KAEA,mCACE,cAAA,OAIJ,cACE,QAAA,KACA,YAAA,OACA,QAAA,MAAA,OACA,MAAA,QACA,iBAAA,sBACA,gBAAA,YACA,cAAA,IAAA,MAAA,gB3CVE,uBAAA,mBACA,wBAAA,mB2CYF,yBACE,aAAA,SACA,YAAA,OAIJ,YACE,QAAA,OACA,UAAA,WC1CF,OACE,SAAA,MACA,IAAA,EACA,KAAA,EACA,QAAA,KACA,QAAA,KACA,MAAA,KACA,OAAA,KACA,WAAA,OACA,WAAA,KAGA,QAAA,EAOF,cACE,SAAA,SACA,MAAA,KACA,OAAA,MAEA,eAAA,KAGA,0B7BlBI,WAAA,UAAA,IAAA,S6BoBF,UAAA,mB7BhBE,uC6BcJ,0B7BbM,WAAA,M6BiBN,0BACE,UAAA,KAIF,kCACE,UAAA,YAIJ,yBACE,OAAA,kBAEA,wCACE,WAAA,KACA,SAAA,OAGF,qCACE,WAAA,KAIJ,uBACE,QAAA,KACA,YAAA,OACA,WAAA,kBAIF,eACE,SAAA,SACA,QAAA,KACA,eAAA,OACA,MAAA,KAGA,eAAA,KACA,iBAAA,KACA,gBAAA,YACA,OAAA,IAAA,MAAA,e5C3DE,cAAA,M4C+DF,QAAA,EAIF,gBCpFE,SAAA,MACA,IAAA,EACA,KAAA,EACA,QAAA,KACA,MAAA,MACA,OAAA,MACA,iBAAA,KAGA,qBAAS,QAAA,EACT,qBAAS,QAAA,GDgFX,cACE,QAAA,KACA,YAAA,EACA,YAAA,OACA,gBAAA,cACA,QAAA,KAAA,KACA,cAAA,IAAA,MAAA,Q5CtEE,uBAAA,kBACA,wBAAA,kB4CwEF,yBACE,QAAA,MAAA,MACA,OAAA,OAAA,OAAA,OAAA,KAKJ,aACE,cAAA,EACA,YAAA,IAKF,YACE,SAAA,SAGA,KAAA,EAAA,EAAA,KACA,QAAA,KAIF,cACE,QAAA,KACA,UAAA,KACA,YAAA,EACA,YAAA,OACA,gBAAA,SACA,QAAA,OACA,WAAA,IAAA,MAAA,Q5CzFE,2BAAA,kBACA,0BAAA,kB4C8FF,gBACE,OAAA,OrC3EA,yBqCkFF,cACE,UAAA,MACA,OAAA,QAAA,KAGF,yBACE,OAAA,oBAGF,uBACE,WAAA,oBAOF,UAAY,UAAA,OrCnGV,yBqCuGF,U9CszKF,U8CpzKI,UAAA,OrCzGA,0BqC8GF,UAAY,UAAA,QASV,kBACE,MAAA,MACA,UAAA,KACA,OAAA,KACA,OAAA,EAEA,iCACE,OAAA,KACA,OAAA,E5C3KJ,cAAA,E4C+KE,gC5C/KF,cAAA,E4CmLE,8BACE,WAAA,KAGF,gC5CvLF,cAAA,EOyDA,4BqC0GA,0BACE,MAAA,MACA,UAAA,KACA,OAAA,KACA,OAAA,EAEA,yCACE,OAAA,KACA,OAAA,E5C3KJ,cAAA,E4C+KE,wC5C/KF,cAAA,E4CmLE,sCACE,WAAA,KAGF,wC5CvLF,cAAA,GOyDA,4BqC0GA,0BACE,MAAA,MACA,UAAA,KACA,OAAA,KACA,OAAA,EAEA,yCACE,OAAA,KACA,OAAA,E5C3KJ,cAAA,E4C+KE,wC5C/KF,cAAA,E4CmLE,sCACE,WAAA,KAGF,wC5CvLF,cAAA,GOyDA,4BqC0GA,0BACE,MAAA,MACA,UAAA,KACA,OAAA,KACA,OAAA,EAEA,yCACE,OAAA,KACA,OAAA,E5C3KJ,cAAA,E4C+KE,wC5C/KF,cAAA,E4CmLE,sCACE,WAAA,KAGF,wC5CvLF,cAAA,GOyDA,6BqC0GA,0BACE,MAAA,MACA,UAAA,KACA,OAAA,KACA,OAAA,EAEA,yCACE,OAAA,KACA,OAAA,E5C3KJ,cAAA,E4C+KE,wC5C/KF,cAAA,E4CmLE,sCACE,WAAA,KAGF,wC5CvLF,cAAA,GOyDA,6BqC0GA,2BACE,MAAA,MACA,UAAA,KACA,OAAA,KACA,OAAA,EAEA,0CACE,OAAA,KACA,OAAA,E5C3KJ,cAAA,E4C+KE,yC5C/KF,cAAA,E4CmLE,uCACE,WAAA,KAGF,yC5CvLF,cAAA,G8ClBJ,SACE,SAAA,SACA,QAAA,KACA,QAAA,MACA,OAAA,ECJA,YAAA,0BAEA,WAAA,OACA,YAAA,IACA,YAAA,IACA,WAAA,KACA,WAAA,MACA,gBAAA,KACA,YAAA,KACA,eAAA,KACA,eAAA,OACA,WAAA,OACA,aAAA,OACA,YAAA,OACA,WAAA,KhDsRI,UAAA,Q+C1RJ,UAAA,WACA,QAAA,EAEA,cAAS,QAAA,GAET,wBACE,SAAA,SACA,QAAA,MACA,MAAA,MACA,OAAA,MAEA,gCACE,SAAA,SACA,QAAA,GACA,aAAA,YACA,aAAA,MAKN,6CAAA,gBACE,QAAA,MAAA,EAEA,4DAAA,+BACE,OAAA,EAEA,oEAAA,uCACE,IAAA,KACA,aAAA,MAAA,MAAA,EACA,iBAAA,KAKN,+CAAA,gBACE,QAAA,EAAA,MAEA,8DAAA,+BACE,KAAA,EACA,MAAA,MACA,OAAA,MAEA,sEAAA,uCACE,MAAA,KACA,aAAA,MAAA,MAAA,MAAA,EACA,mBAAA,KAKN,gDAAA,mBACE,QAAA,MAAA,EAEA,+DAAA,kCACE,IAAA,EAEA,uEAAA,0CACE,OAAA,KACA,aAAA,EAAA,MAAA,MACA,oBAAA,KAKN,8CAAA,kBACE,QAAA,EAAA,MAEA,6DAAA,iCACE,MAAA,EACA,MAAA,MACA,OAAA,MAEA,qEAAA,yCACE,KAAA,KACA,aAAA,MAAA,EAAA,MAAA,MACA,kBAAA,KAqBN,eACE,UAAA,MACA,QAAA,OAAA,MACA,MAAA,KACA,WAAA,OACA,iBAAA,K9C7FE,cAAA,OgDnBJ,SACE,SAAA,SACA,IAAA,EACA,KAAA,EACA,QAAA,KACA,QAAA,MACA,UAAA,MDLA,YAAA,0BAEA,WAAA,OACA,YAAA,IACA,YAAA,IACA,WAAA,KACA,WAAA,MACA,gBAAA,KACA,YAAA,KACA,eAAA,KACA,eAAA,OACA,WAAA,OACA,aAAA,OACA,YAAA,OACA,WAAA,KhDsRI,UAAA,QiDzRJ,UAAA,WACA,iBAAA,KACA,gBAAA,YACA,OAAA,IAAA,MAAA,ehDIE,cAAA,MgDAF,wBACE,SAAA,SACA,QAAA,MACA,MAAA,KACA,OAAA,MAEA,+BAAA,gCAEE,SAAA,SACA,QAAA,MACA,QAAA,GACA,aAAA,YACA,aAAA,MAMJ,4DAAA,+BACE,OAAA,mBAEA,oEAAA,uCACE,OAAA,EACA,aAAA,MAAA,MAAA,EACA,iBAAA,gBAGF,mEAAA,sCACE,OAAA,IACA,aAAA,MAAA,MAAA,EACA,iBAAA,KAMJ,8DAAA,+BACE,KAAA,mBACA,MAAA,MACA,OAAA,KAEA,sEAAA,uCACE,KAAA,EACA,aAAA,MAAA,MAAA,MAAA,EACA,mBAAA,gBAGF,qEAAA,sCACE,KAAA,IACA,aAAA,MAAA,MAAA,MAAA,EACA,mBAAA,KAMJ,+DAAA,kCACE,IAAA,mBAEA,uEAAA,0CACE,IAAA,EACA,aAAA,EAAA,MAAA,MAAA,MACA,oBAAA,gBAGF,sEAAA,yCACE,IAAA,IACA,aAAA,EAAA,MAAA,MAAA,MACA,oBAAA,KAKJ,wEAAA,2CACE,SAAA,SACA,IAAA,EACA,KAAA,IACA,QAAA,MACA,MAAA,KACA,YAAA,OACA,QAAA,GACA,cAAA,IAAA,MAAA,QAKF,6DAAA,iCACE,MAAA,mBACA,MAAA,MACA,OAAA,KAEA,qEAAA,yCACE,MAAA,EACA,aAAA,MAAA,EAAA,MAAA,MACA,kBAAA,gBAGF,oEAAA,wCACE,MAAA,IACA,aAAA,MAAA,EAAA,MAAA,MACA,kBAAA,KAqBN,gBACE,QAAA,MAAA,KACA,cAAA,EjDuJI,UAAA,KiDpJJ,iBAAA,QACA,cAAA,IAAA,MAAA,ehDtHE,uBAAA,kBACA,wBAAA,kBgDwHF,sBACE,QAAA,KAIJ,cACE,QAAA,KAAA,KACA,MAAA,QC/IF,UACE,SAAA,SAGF,wBACE,aAAA,MAGF,gBACE,SAAA,SACA,MAAA,KACA,SAAA,OCtBA,uBACE,QAAA,MACA,MAAA,KACA,QAAA,GDuBJ,eACE,SAAA,SACA,QAAA,KACA,MAAA,KACA,MAAA,KACA,aAAA,MACA,4BAAA,OAAA,oBAAA,OlClBI,WAAA,UAAA,IAAA,YAIA,uCkCQN,elCPQ,WAAA,MjB61LR,oBACA,oBmD70LA,sBAGE,QAAA,MnDg1LF,0BmD50LA,8CAEE,UAAA,iBnD+0LF,4BmD50LA,4CAEE,UAAA,kBAWA,8BACE,QAAA,EACA,oBAAA,QACA,UAAA,KnDu0LJ,uDACA,qDmDr0LE,qCAGE,QAAA,EACA,QAAA,EnDs0LJ,yCmDn0LE,2CAEE,QAAA,EACA,QAAA,ElC/DE,WAAA,QAAA,GAAA,IAIA,uCjBk4LN,yCmD10LE,2ClCvDM,WAAA,MjBu4LR,uBmDn0LA,uBAEE,SAAA,SACA,IAAA,EACA,OAAA,EACA,QAAA,EAEA,QAAA,KACA,YAAA,OACA,gBAAA,OACA,MAAA,IACA,QAAA,EACA,MAAA,KACA,WAAA,OACA,WAAA,IACA,OAAA,EACA,QAAA,GlCzFI,WAAA,QAAA,KAAA,KAIA,uCjB25LN,uBmDt1LA,uBlCpEQ,WAAA,MjBg6LR,6BADA,6BmDv0LE,6BAAA,6BAEE,MAAA,KACA,gBAAA,KACA,QAAA,EACA,QAAA,GAGJ,uBACE,KAAA,EAGF,uBACE,MAAA,EnD20LF,4BmDt0LA,4BAEE,QAAA,aACA,MAAA,KACA,OAAA,KACA,kBAAA,UACA,oBAAA,IACA,gBAAA,KAAA,KAWF,4BACE,iBAAA,wPAEF,4BACE,iBAAA,yPAQF,qBACE,SAAA,SACA,MAAA,EACA,OAAA,EACA,KAAA,EACA,QAAA,EACA,QAAA,KACA,gBAAA,OACA,QAAA,EAEA,aAAA,IACA,cAAA,KACA,YAAA,IACA,WAAA,KAEA,sCACE,WAAA,YACA,KAAA,EAAA,EAAA,KACA,MAAA,KACA,OAAA,IACA,QAAA,EACA,aAAA,IACA,YAAA,IACA,YAAA,OACA,OAAA,QACA,iBAAA,KACA,gBAAA,YACA,OAAA,EAEA,WAAA,KAAA,MAAA,YACA,cAAA,KAAA,MAAA,YACA,QAAA,GlC5KE,WAAA,QAAA,IAAA,KAIA,uCkCwJJ,sClCvJM,WAAA,MkC2KN,6BACE,QAAA,EASJ,kBACE,SAAA,SACA,MAAA,IACA,OAAA,QACA,KAAA,IACA,YAAA,QACA,eAAA,QACA,MAAA,KACA,WAAA,OnDi0LF,2CmD3zLE,2CAEE,OAAA,UAAA,eAGF,qDACE,iBAAA,KAGF,iCACE,MAAA,KE7NJ,kCACE,GAAK,UAAA,gBADP,0BACE,GAAK,UAAA,gBAIP,gBACE,QAAA,aACA,MAAA,KACA,OAAA,KACA,eAAA,QACA,OAAA,MAAA,MAAA,aACA,mBAAA,YAEA,cAAA,IACA,kBAAA,KAAA,OAAA,SAAA,eAAA,UAAA,KAAA,OAAA,SAAA,eAGF,mBACE,MAAA,KACA,OAAA,KACA,aAAA,KAQF,gCACE,GACE,UAAA,SAEF,IACE,QAAA,EACA,UAAA,MANJ,wBACE,GACE,UAAA,SAEF,IACE,QAAA,EACA,UAAA,MAKJ,cACE,QAAA,aACA,MAAA,KACA,OAAA,KACA,eAAA,QACA,iBAAA,aAEA,cAAA,IACA,QAAA,EACA,kBAAA,KAAA,OAAA,SAAA,aAAA,UAAA,KAAA,OAAA,SAAA,aAGF,iBACE,MAAA,KACA,OAAA,KAIA,uCACE,gBrDiiMJ,cqD/hMM,2BAAA,KAAA,mBAAA,MCjEN,WACE,SAAA,MACA,OAAA,EACA,QAAA,KACA,QAAA,KACA,eAAA,OACA,UAAA,KAEA,WAAA,OACA,iBAAA,KACA,gBAAA,YACA,QAAA,ErCKI,WAAA,UAAA,IAAA,YAIA,uCqCpBN,WrCqBQ,WAAA,MqCLR,oBPdE,SAAA,MACA,IAAA,EACA,KAAA,EACA,QAAA,KACA,MAAA,MACA,OAAA,MACA,iBAAA,KAGA,yBAAS,QAAA,EACT,yBAAS,QAAA,GOQX,kBACE,QAAA,KACA,YAAA,OACA,gBAAA,cACA,QAAA,KAAA,KAEA,6BACE,QAAA,MAAA,MACA,WAAA,OACA,aAAA,OACA,cAAA,OAIJ,iBACE,cAAA,EACA,YAAA,IAGF,gBACE,UAAA,EACA,QAAA,KAAA,KACA,WAAA,KAGF,iBACE,IAAA,EACA,KAAA,EACA,MAAA,MACA,aAAA,IAAA,MAAA,eACA,UAAA,kBAGF,eACE,IAAA,EACA,MAAA,EACA,MAAA,MACA,YAAA,IAAA,MAAA,eACA,UAAA,iBAGF,eACE,IAAA,EACA,MAAA,EACA,KAAA,EACA,OAAA,KACA,WAAA,KACA,cAAA,IAAA,MAAA,eACA,UAAA,kBAGF,kBACE,MAAA,EACA,KAAA,EACA,OAAA,KACA,WAAA,KACA,WAAA,IAAA,MAAA,eACA,UAAA,iBAGF,gBACE,UAAA,KCjFF,aACE,QAAA,aACA,WAAA,IACA,eAAA,OACA,OAAA,KACA,iBAAA,aACA,QAAA,GAEA,yBACE,QAAA,aACA,QAAA,GAKJ,gBACE,WAAA,KAGF,gBACE,WAAA,KAGF,gBACE,WAAA,MAKA,+BACE,kBAAA,iBAAA,GAAA,YAAA,SAAA,UAAA,iBAAA,GAAA,YAAA,SAIJ,oCACE,IACE,QAAA,IAFJ,4BACE,IACE,QAAA,IAIJ,kBACE,mBAAA,8DAAA,WAAA,8DACA,kBAAA,KAAA,KAAA,UAAA,KAAA,KACA,kBAAA,iBAAA,GAAA,OAAA,SAAA,UAAA,iBAAA,GAAA,OAAA,SAGF,oCACE,KACE,sBAAA,MAAA,GAAA,cAAA,MAAA,IAFJ,4BACE,KACE,sBAAA,MAAA,GAAA,cAAA,MAAA,IH9CF,iBACE,QAAA,MACA,MAAA,KACA,QAAA,GIJF,cACE,MAAA,QAGE,oBAAA,oBAEE,MAAA,QANN,gBACE,MAAA,QAGE,sBAAA,sBAEE,MAAA,QANN,cACE,MAAA,QAGE,oBAAA,oBAEE,MAAA,QANN,WACE,MAAA,QAGE,iBAAA,iBAEE,MAAA,QANN,cACE,MAAA,QAGE,oBAAA,oBAEE,MAAA,QANN,aACE,MAAA,QAGE,mBAAA,mBAEE,MAAA,QANN,YACE,MAAA,QAGE,kBAAA,kBAEE,MAAA,QANN,WACE,MAAA,QAGE,iBAAA,iBAEE,MAAA,QCLR,OACE,SAAA,SACA,MAAA,KAEA,eACE,QAAA,MACA,YAAA,uBACA,QAAA,GAGF,SACE,SAAA,SACA,IAAA,EACA,KAAA,EACA,MAAA,KACA,OAAA,KAKF,WACE,kBAAA,KADF,WACE,kBAAA,IADF,YACE,kBAAA,OADF,YACE,kBAAA,eCrBJ,WACE,SAAA,MACA,IAAA,EACA,MAAA,EACA,KAAA,EACA,QAAA,KAGF,cACE,SAAA,MACA,MAAA,EACA,OAAA,EACA,KAAA,EACA,QAAA,KAQE,YACE,SAAA,eAAA,SAAA,OACA,IAAA,EACA,QAAA,KjDqCF,yBiDxCA,eACE,SAAA,eAAA,SAAA,OACA,IAAA,EACA,QAAA,MjDqCF,yBiDxCA,eACE,SAAA,eAAA,SAAA,OACA,IAAA,EACA,QAAA,MjDqCF,yBiDxCA,eACE,SAAA,eAAA,SAAA,OACA,IAAA,EACA,QAAA,MjDqCF,0BiDxCA,eACE,SAAA,eAAA,SAAA,OACA,IAAA,EACA,QAAA,MjDqCF,0BiDxCA,gBACE,SAAA,eAAA,SAAA,OACA,IAAA,EACA,QAAA,MCzBN,QACE,QAAA,KACA,eAAA,IACA,YAAA,OACA,WAAA,QAGF,QACE,QAAA,KACA,KAAA,EAAA,EAAA,KACA,eAAA,OACA,WAAA,QCRF,iB5D+6MA,0D6D36ME,SAAA,mBACA,MAAA,cACA,OAAA,cACA,QAAA,YACA,OAAA,eACA,SAAA,iBACA,KAAA,wBACA,YAAA,iBACA,OAAA,YCXA,uBACE,SAAA,SACA,IAAA,EACA,MAAA,EACA,OAAA,EACA,KAAA,EACA,QAAA,EACA,QAAA,GCRJ,eCAE,SAAA,OACA,cAAA,SACA,YAAA,OCNF,IACE,QAAA,aACA,WAAA,QACA,MAAA,IACA,WAAA,IACA,iBAAA,aACA,QAAA,ICyDM,gBAOI,eAAA,mBAPJ,WAOI,eAAA,cAPJ,cAOI,eAAA,iBAPJ,cAOI,eAAA,iBAPJ,mBAOI,eAAA,sBAPJ,gBAOI,eAAA,mBAPJ,aAOI,MAAA,eAPJ,WAOI,MAAA,gBAPJ,YAOI,MAAA,eAPJ,WAOI,QAAA,YAPJ,YAOI,QAAA,cAPJ,YAOI,QAAA,aAPJ,YAOI,QAAA,cAPJ,aAOI,QAAA,YAPJ,eAOI,SAAA,eAPJ,iBAOI,SAAA,iBAPJ,kBAOI,SAAA,kBAPJ,iBAOI,SAAA,iBAPJ,UAOI,QAAA,iBAPJ,gBAOI,QAAA,uBAPJ,SAOI,QAAA,gBAPJ,QAOI,QAAA,eAPJ,SAOI,QAAA,gBAPJ,aAOI,QAAA,oBAPJ,cAOI,QAAA,qBAPJ,QAOI,QAAA,eAPJ,eAOI,QAAA,sBAPJ,QAOI,QAAA,eAPJ,QAOI,WAAA,EAAA,MAAA,KAAA,0BAPJ,WAOI,WAAA,EAAA,QAAA,OAAA,2BAPJ,WAOI,WAAA,EAAA,KAAA,KAAA,2BAPJ,aAOI,WAAA,eAPJ,iBAOI,SAAA,iBAPJ,mBAOI,SAAA,mBAPJ,mBAOI,SAAA,mBAPJ,gBAOI,SAAA,gBAPJ,iBAOI,SAAA,yBAAA,SAAA,iBAPJ,OAOI,IAAA,YAPJ,QAOI,IAAA,cAPJ,SAOI,IAAA,eAPJ,UAOI,OAAA,YAPJ,WAOI,OAAA,cAPJ,YAOI,OAAA,eAPJ,SAOI,KAAA,YAPJ,UAOI,KAAA,cAPJ,WAOI,KAAA,eAPJ,OAOI,MAAA,YAPJ,QAOI,MAAA,cAPJ,SAOI,MAAA,eAPJ,kBAOI,UAAA,+BAPJ,oBAOI,UAAA,2BAPJ,oBAOI,UAAA,2BAPJ,QAOI,OAAA,IAAA,MAAA,kBAPJ,UAOI,OAAA,YAPJ,YAOI,WAAA,IAAA,MAAA,kBAPJ,cAOI,WAAA,YAPJ,YAOI,aAAA,IAAA,MAAA,kBAPJ,cAOI,aAAA,YAPJ,eAOI,cAAA,IAAA,MAAA,kBAPJ,iBAOI,cAAA,YAPJ,cAOI,YAAA,IAAA,MAAA,kBAPJ,gBAOI,YAAA,YAPJ,gBAOI,aAAA,kBAPJ,kBAOI,aAAA,kBAPJ,gBAOI,aAAA,kBAPJ,aAOI,aAAA,kBAPJ,gBAOI,aAAA,kBAPJ,eAOI,aAAA,kBAPJ,cAOI,aAAA,kBAPJ,aAOI,aAAA,kBAPJ,cAOI,aAAA,eAPJ,UAOI,aAAA,cAPJ,UAOI,aAAA,cAPJ,UAOI,aAAA,cAPJ,UAOI,aAAA,cAPJ,UAOI,aAAA,cAPJ,MAOI,MAAA,cAPJ,MAOI,MAAA,cAPJ,MAOI,MAAA,cAPJ,OAOI,MAAA,eAPJ,QAOI,MAAA,eAPJ,QAOI,UAAA,eAPJ,QAOI,MAAA,gBAPJ,YAOI,UAAA,gBAPJ,MAOI,OAAA,cAPJ,MAOI,OAAA,cAPJ,MAOI,OAAA,cAPJ,OAOI,OAAA,eAPJ,QAOI,OAAA,eAPJ,QAOI,WAAA,eAPJ,QAOI,OAAA,gBAPJ,YAOI,WAAA,gBAPJ,WAOI,KAAA,EAAA,EAAA,eAPJ,UAOI,eAAA,cAPJ,aAOI,eAAA,iBAPJ,kBAOI,eAAA,sBAPJ,qBAOI,eAAA,yBAPJ,aAOI,UAAA,YAPJ,aAOI,UAAA,YAPJ,eAOI,YAAA,YAPJ,eAOI,YAAA,YAPJ,WAOI,UAAA,eAPJ,aAOI,UAAA,iBAPJ,mBAOI,UAAA,uBAPJ,OAOI,IAAA,YAPJ,OAOI,IAAA,iBAPJ,OAOI,IAAA,gBAPJ,OAOI,IAAA,eAPJ,OAOI,IAAA,iBAPJ,OAOI,IAAA,eAPJ,uBAOI,gBAAA,qBAPJ,qBAOI,gBAAA,mBAPJ,wBAOI,gBAAA,iBAPJ,yBAOI,gBAAA,wBAPJ,wBAOI,gBAAA,uBAPJ,wBAOI,gBAAA,uBAPJ,mBAOI,YAAA,qBAPJ,iBAOI,YAAA,mBAPJ,oBAOI,YAAA,iBAPJ,sBAOI,YAAA,mBAPJ,qBAOI,YAAA,kBAPJ,qBAOI,cAAA,qBAPJ,mBAOI,cAAA,mBAPJ,sBAOI,cAAA,iBAPJ,uBAOI,cAAA,wBAPJ,sBAOI,cAAA,uBAPJ,uBAOI,cAAA,kBAPJ,iBAOI,WAAA,eAPJ,kBAOI,WAAA,qBAPJ,gBAOI,WAAA,mBAPJ,mBAOI,WAAA,iBAPJ,qBAOI,WAAA,mBAPJ,oBAOI,WAAA,kBAPJ,aAOI,MAAA,aAPJ,SAOI,MAAA,YAPJ,SAOI,MAAA,YAPJ,SAOI,MAAA,YAPJ,SAOI,MAAA,YAPJ,SAOI,MAAA,YAPJ,SAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,KAOI,OAAA,YAPJ,KAOI,OAAA,iBAPJ,KAOI,OAAA,gBAPJ,KAOI,OAAA,eAPJ,KAOI,OAAA,iBAPJ,KAOI,OAAA,eAPJ,QAOI,OAAA,eAPJ,MAOI,aAAA,YAAA,YAAA,YAPJ,MAOI,aAAA,iBAAA,YAAA,iBAPJ,MAOI,aAAA,gBAAA,YAAA,gBAPJ,MAOI,aAAA,eAAA,YAAA,eAPJ,MAOI,aAAA,iBAAA,YAAA,iBAPJ,MAOI,aAAA,eAAA,YAAA,eAPJ,SAOI,aAAA,eAAA,YAAA,eAPJ,MAOI,WAAA,YAAA,cAAA,YAPJ,MAOI,WAAA,iBAAA,cAAA,iBAPJ,MAOI,WAAA,gBAAA,cAAA,gBAPJ,MAOI,WAAA,eAAA,cAAA,eAPJ,MAOI,WAAA,iBAAA,cAAA,iBAPJ,MAOI,WAAA,eAAA,cAAA,eAPJ,SAOI,WAAA,eAAA,cAAA,eAPJ,MAOI,WAAA,YAPJ,MAOI,WAAA,iBAPJ,MAOI,WAAA,gBAPJ,MAOI,WAAA,eAPJ,MAOI,WAAA,iBAPJ,MAOI,WAAA,eAPJ,SAOI,WAAA,eAPJ,MAOI,aAAA,YAPJ,MAOI,aAAA,iBAPJ,MAOI,aAAA,gBAPJ,MAOI,aAAA,eAPJ,MAOI,aAAA,iBAPJ,MAOI,aAAA,eAPJ,SAOI,aAAA,eAPJ,MAOI,cAAA,YAPJ,MAOI,cAAA,iBAPJ,MAOI,cAAA,gBAPJ,MAOI,cAAA,eAPJ,MAOI,cAAA,iBAPJ,MAOI,cAAA,eAPJ,SAOI,cAAA,eAPJ,MAOI,YAAA,YAPJ,MAOI,YAAA,iBAPJ,MAOI,YAAA,gBAPJ,MAOI,YAAA,eAPJ,MAOI,YAAA,iBAPJ,MAOI,YAAA,eAPJ,SAOI,YAAA,eAPJ,KAOI,QAAA,YAPJ,KAOI,QAAA,iBAPJ,KAOI,QAAA,gBAPJ,KAOI,QAAA,eAPJ,KAOI,QAAA,iBAPJ,KAOI,QAAA,eAPJ,MAOI,cAAA,YAAA,aAAA,YAPJ,MAOI,cAAA,iBAAA,aAAA,iBAPJ,MAOI,cAAA,gBAAA,aAAA,gBAPJ,MAOI,cAAA,eAAA,aAAA,eAPJ,MAOI,cAAA,iBAAA,aAAA,iBAPJ,MAOI,cAAA,eAAA,aAAA,eAPJ,MAOI,YAAA,YAAA,eAAA,YAPJ,MAOI,YAAA,iBAAA,eAAA,iBAPJ,MAOI,YAAA,gBAAA,eAAA,gBAPJ,MAOI,YAAA,eAAA,eAAA,eAPJ,MAOI,YAAA,iBAAA,eAAA,iBAPJ,MAOI,YAAA,eAAA,eAAA,eAPJ,MAOI,YAAA,YAPJ,MAOI,YAAA,iBAPJ,MAOI,YAAA,gBAPJ,MAOI,YAAA,eAPJ,MAOI,YAAA,iBAPJ,MAOI,YAAA,eAPJ,MAOI,cAAA,YAPJ,MAOI,cAAA,iBAPJ,MAOI,cAAA,gBAPJ,MAOI,cAAA,eAPJ,MAOI,cAAA,iBAPJ,MAOI,cAAA,eAPJ,MAOI,eAAA,YAPJ,MAOI,eAAA,iBAPJ,MAOI,eAAA,gBAPJ,MAOI,eAAA,eAPJ,MAOI,eAAA,iBAPJ,MAOI,eAAA,eAPJ,MAOI,aAAA,YAPJ,MAOI,aAAA,iBAPJ,MAOI,aAAA,gBAPJ,MAOI,aAAA,eAPJ,MAOI,aAAA,iBAPJ,MAOI,aAAA,eAPJ,gBAOI,YAAA,mCAPJ,MAOI,UAAA,iCAPJ,MAOI,UAAA,gCAPJ,MAOI,UAAA,8BAPJ,MAOI,UAAA,gCAPJ,MAOI,UAAA,kBAPJ,MAOI,UAAA,eAPJ,YAOI,WAAA,iBAPJ,YAOI,WAAA,iBAPJ,UAOI,YAAA,cAPJ,YAOI,YAAA,kBAPJ,WAOI,YAAA,cAPJ,SAOI,YAAA,cAPJ,WAOI,YAAA,iBAPJ,MAOI,YAAA,YAPJ,OAOI,YAAA,eAPJ,SAOI,YAAA,cAPJ,OAOI,YAAA,YAPJ,YAOI,WAAA,eAPJ,UAOI,WAAA,gBAPJ,aAOI,WAAA,iBAPJ,sBAOI,gBAAA,eAPJ,2BAOI,gBAAA,oBAPJ,8BAOI,gBAAA,uBAPJ,gBAOI,eAAA,oBAPJ,gBAOI,eAAA,oBAPJ,iBAOI,eAAA,qBAPJ,WAOI,YAAA,iBAPJ,aAOI,YAAA,iBAPJ,YAOI,UAAA,qBAAA,WAAA,qBAPJ,cAIQ,kBAAA,EAGJ,MAAA,6DAPJ,gBAIQ,kBAAA,EAGJ,MAAA,+DAPJ,cAIQ,kBAAA,EAGJ,MAAA,6DAPJ,WAIQ,kBAAA,EAGJ,MAAA,0DAPJ,cAIQ,kBAAA,EAGJ,MAAA,6DAPJ,aAIQ,kBAAA,EAGJ,MAAA,4DAPJ,YAIQ,kBAAA,EAGJ,MAAA,2DAPJ,WAIQ,kBAAA,EAGJ,MAAA,0DAPJ,YAIQ,kBAAA,EAGJ,MAAA,2DAPJ,YAIQ,kBAAA,EAGJ,MAAA,2DAPJ,WAIQ,kBAAA,EAGJ,MAAA,gEAPJ,YAIQ,kBAAA,EAGJ,MAAA,kBAPJ,eAIQ,kBAAA,EAGJ,MAAA,yBAPJ,eAIQ,kBAAA,EAGJ,MAAA,+BAPJ,YAIQ,kBAAA,EAGJ,MAAA,kBAjBJ,iBACE,kBAAA,KADF,iBACE,kBAAA,IADF,iBACE,kBAAA,KADF,kBACE,kBAAA,EASF,YAIQ,gBAAA,EAGJ,iBAAA,2DAPJ,cAIQ,gBAAA,EAGJ,iBAAA,6DAPJ,YAIQ,gBAAA,EAGJ,iBAAA,2DAPJ,SAIQ,gBAAA,EAGJ,iBAAA,wDAPJ,YAIQ,gBAAA,EAGJ,iBAAA,2DAPJ,WAIQ,gBAAA,EAGJ,iBAAA,0DAPJ,UAIQ,gBAAA,EAGJ,iBAAA,yDAPJ,SAIQ,gBAAA,EAGJ,iBAAA,wDAPJ,UAIQ,gBAAA,EAGJ,iBAAA,yDAPJ,UAIQ,gBAAA,EAGJ,iBAAA,yDAPJ,SAIQ,gBAAA,EAGJ,iBAAA,2DAPJ,gBAIQ,gBAAA,EAGJ,iBAAA,sBAjBJ,eACE,gBAAA,IADF,eACE,gBAAA,KADF,eACE,gBAAA,IADF,eACE,gBAAA,KADF,gBACE,gBAAA,EASF,aAOI,iBAAA,6BAPJ,iBAOI,oBAAA,cAAA,iBAAA,cAAA,YAAA,cAPJ,kBAOI,oBAAA,eAAA,iBAAA,eAAA,YAAA,eAPJ,kBAOI,oBAAA,eAAA,iBAAA,eAAA,YAAA,eAPJ,SAOI,eAAA,eAPJ,SAOI,eAAA,eAPJ,SAOI,cAAA,iBAPJ,WAOI,cAAA,YAPJ,WAOI,cAAA,gBAPJ,WAOI,cAAA,iBAPJ,WAOI,cAAA,gBAPJ,gBAOI,cAAA,cAPJ,cAOI,cAAA,gBAPJ,aAOI,uBAAA,iBAAA,wBAAA,iBAPJ,aAOI,wBAAA,iBAAA,2BAAA,iBAPJ,gBAOI,2BAAA,iBAAA,0BAAA,iBAPJ,eAOI,0BAAA,iBAAA,uBAAA,iBAPJ,SAOI,WAAA,kBAPJ,WAOI,WAAA,iBzDPR,yByDAI,gBAOI,MAAA,eAPJ,cAOI,MAAA,gBAPJ,eAOI,MAAA,eAPJ,aAOI,QAAA,iBAPJ,mBAOI,QAAA,uBAPJ,YAOI,QAAA,gBAPJ,WAOI,QAAA,eAPJ,YAOI,QAAA,gBAPJ,gBAOI,QAAA,oBAPJ,iBAOI,QAAA,qBAPJ,WAOI,QAAA,eAPJ,kBAOI,QAAA,sBAPJ,WAOI,QAAA,eAPJ,cAOI,KAAA,EAAA,EAAA,eAPJ,aAOI,eAAA,cAPJ,gBAOI,eAAA,iBAPJ,qBAOI,eAAA,sBAPJ,wBAOI,eAAA,yBAPJ,gBAOI,UAAA,YAPJ,gBAOI,UAAA,YAPJ,kBAOI,YAAA,YAPJ,kBAOI,YAAA,YAPJ,cAOI,UAAA,eAPJ,gBAOI,UAAA,iBAPJ,sBAOI,UAAA,uBAPJ,UAOI,IAAA,YAPJ,UAOI,IAAA,iBAPJ,UAOI,IAAA,gBAPJ,UAOI,IAAA,eAPJ,UAOI,IAAA,iBAPJ,UAOI,IAAA,eAPJ,0BAOI,gBAAA,qBAPJ,wBAOI,gBAAA,mBAPJ,2BAOI,gBAAA,iBAPJ,4BAOI,gBAAA,wBAPJ,2BAOI,gBAAA,uBAPJ,2BAOI,gBAAA,uBAPJ,sBAOI,YAAA,qBAPJ,oBAOI,YAAA,mBAPJ,uBAOI,YAAA,iBAPJ,yBAOI,YAAA,mBAPJ,wBAOI,YAAA,kBAPJ,wBAOI,cAAA,qBAPJ,sBAOI,cAAA,mBAPJ,yBAOI,cAAA,iBAPJ,0BAOI,cAAA,wBAPJ,yBAOI,cAAA,uBAPJ,0BAOI,cAAA,kBAPJ,oBAOI,WAAA,eAPJ,qBAOI,WAAA,qBAPJ,mBAOI,WAAA,mBAPJ,sBAOI,WAAA,iBAPJ,wBAOI,WAAA,mBAPJ,uBAOI,WAAA,kBAPJ,gBAOI,MAAA,aAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,eAOI,MAAA,YAPJ,QAOI,OAAA,YAPJ,QAOI,OAAA,iBAPJ,QAOI,OAAA,gBAPJ,QAOI,OAAA,eAPJ,QAOI,OAAA,iBAPJ,QAOI,OAAA,eAPJ,WAOI,OAAA,eAPJ,SAOI,aAAA,YAAA,YAAA,YAPJ,SAOI,aAAA,iBAAA,YAAA,iBAPJ,SAOI,aAAA,gBAAA,YAAA,gBAPJ,SAOI,aAAA,eAAA,YAAA,eAPJ,SAOI,aAAA,iBAAA,YAAA,iBAPJ,SAOI,aAAA,eAAA,YAAA,eAPJ,YAOI,aAAA,eAAA,YAAA,eAPJ,SAOI,WAAA,YAAA,cAAA,YAPJ,SAOI,WAAA,iBAAA,cAAA,iBAPJ,SAOI,WAAA,gBAAA,cAAA,gBAPJ,SAOI,WAAA,eAAA,cAAA,eAPJ,SAOI,WAAA,iBAAA,cAAA,iBAPJ,SAOI,WAAA,eAAA,cAAA,eAPJ,YAOI,WAAA,eAAA,cAAA,eAPJ,SAOI,WAAA,YAPJ,SAOI,WAAA,iBAPJ,SAOI,WAAA,gBAPJ,SAOI,WAAA,eAPJ,SAOI,WAAA,iBAPJ,SAOI,WAAA,eAPJ,YAOI,WAAA,eAPJ,SAOI,aAAA,YAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,gBAPJ,SAOI,aAAA,eAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,eAPJ,YAOI,aAAA,eAPJ,SAOI,cAAA,YAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,gBAPJ,SAOI,cAAA,eAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,eAPJ,YAOI,cAAA,eAPJ,SAOI,YAAA,YAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,gBAPJ,SAOI,YAAA,eAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,eAPJ,YAOI,YAAA,eAPJ,QAOI,QAAA,YAPJ,QAOI,QAAA,iBAPJ,QAOI,QAAA,gBAPJ,QAOI,QAAA,eAPJ,QAOI,QAAA,iBAPJ,QAOI,QAAA,eAPJ,SAOI,cAAA,YAAA,aAAA,YAPJ,SAOI,cAAA,iBAAA,aAAA,iBAPJ,SAOI,cAAA,gBAAA,aAAA,gBAPJ,SAOI,cAAA,eAAA,aAAA,eAPJ,SAOI,cAAA,iBAAA,aAAA,iBAPJ,SAOI,cAAA,eAAA,aAAA,eAPJ,SAOI,YAAA,YAAA,eAAA,YAPJ,SAOI,YAAA,iBAAA,eAAA,iBAPJ,SAOI,YAAA,gBAAA,eAAA,gBAPJ,SAOI,YAAA,eAAA,eAAA,eAPJ,SAOI,YAAA,iBAAA,eAAA,iBAPJ,SAOI,YAAA,eAAA,eAAA,eAPJ,SAOI,YAAA,YAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,gBAPJ,SAOI,YAAA,eAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,eAPJ,SAOI,cAAA,YAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,gBAPJ,SAOI,cAAA,eAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,eAPJ,SAOI,eAAA,YAPJ,SAOI,eAAA,iBAPJ,SAOI,eAAA,gBAPJ,SAOI,eAAA,eAPJ,SAOI,eAAA,iBAPJ,SAOI,eAAA,eAPJ,SAOI,aAAA,YAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,gBAPJ,SAOI,aAAA,eAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,eAPJ,eAOI,WAAA,eAPJ,aAOI,WAAA,gBAPJ,gBAOI,WAAA,kBzDPR,yByDAI,gBAOI,MAAA,eAPJ,cAOI,MAAA,gBAPJ,eAOI,MAAA,eAPJ,aAOI,QAAA,iBAPJ,mBAOI,QAAA,uBAPJ,YAOI,QAAA,gBAPJ,WAOI,QAAA,eAPJ,YAOI,QAAA,gBAPJ,gBAOI,QAAA,oBAPJ,iBAOI,QAAA,qBAPJ,WAOI,QAAA,eAPJ,kBAOI,QAAA,sBAPJ,WAOI,QAAA,eAPJ,cAOI,KAAA,EAAA,EAAA,eAPJ,aAOI,eAAA,cAPJ,gBAOI,eAAA,iBAPJ,qBAOI,eAAA,sBAPJ,wBAOI,eAAA,yBAPJ,gBAOI,UAAA,YAPJ,gBAOI,UAAA,YAPJ,kBAOI,YAAA,YAPJ,kBAOI,YAAA,YAPJ,cAOI,UAAA,eAPJ,gBAOI,UAAA,iBAPJ,sBAOI,UAAA,uBAPJ,UAOI,IAAA,YAPJ,UAOI,IAAA,iBAPJ,UAOI,IAAA,gBAPJ,UAOI,IAAA,eAPJ,UAOI,IAAA,iBAPJ,UAOI,IAAA,eAPJ,0BAOI,gBAAA,qBAPJ,wBAOI,gBAAA,mBAPJ,2BAOI,gBAAA,iBAPJ,4BAOI,gBAAA,wBAPJ,2BAOI,gBAAA,uBAPJ,2BAOI,gBAAA,uBAPJ,sBAOI,YAAA,qBAPJ,oBAOI,YAAA,mBAPJ,uBAOI,YAAA,iBAPJ,yBAOI,YAAA,mBAPJ,wBAOI,YAAA,kBAPJ,wBAOI,cAAA,qBAPJ,sBAOI,cAAA,mBAPJ,yBAOI,cAAA,iBAPJ,0BAOI,cAAA,wBAPJ,yBAOI,cAAA,uBAPJ,0BAOI,cAAA,kBAPJ,oBAOI,WAAA,eAPJ,qBAOI,WAAA,qBAPJ,mBAOI,WAAA,mBAPJ,sBAOI,WAAA,iBAPJ,wBAOI,WAAA,mBAPJ,uBAOI,WAAA,kBAPJ,gBAOI,MAAA,aAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,eAOI,MAAA,YAPJ,QAOI,OAAA,YAPJ,QAOI,OAAA,iBAPJ,QAOI,OAAA,gBAPJ,QAOI,OAAA,eAPJ,QAOI,OAAA,iBAPJ,QAOI,OAAA,eAPJ,WAOI,OAAA,eAPJ,SAOI,aAAA,YAAA,YAAA,YAPJ,SAOI,aAAA,iBAAA,YAAA,iBAPJ,SAOI,aAAA,gBAAA,YAAA,gBAPJ,SAOI,aAAA,eAAA,YAAA,eAPJ,SAOI,aAAA,iBAAA,YAAA,iBAPJ,SAOI,aAAA,eAAA,YAAA,eAPJ,YAOI,aAAA,eAAA,YAAA,eAPJ,SAOI,WAAA,YAAA,cAAA,YAPJ,SAOI,WAAA,iBAAA,cAAA,iBAPJ,SAOI,WAAA,gBAAA,cAAA,gBAPJ,SAOI,WAAA,eAAA,cAAA,eAPJ,SAOI,WAAA,iBAAA,cAAA,iBAPJ,SAOI,WAAA,eAAA,cAAA,eAPJ,YAOI,WAAA,eAAA,cAAA,eAPJ,SAOI,WAAA,YAPJ,SAOI,WAAA,iBAPJ,SAOI,WAAA,gBAPJ,SAOI,WAAA,eAPJ,SAOI,WAAA,iBAPJ,SAOI,WAAA,eAPJ,YAOI,WAAA,eAPJ,SAOI,aAAA,YAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,gBAPJ,SAOI,aAAA,eAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,eAPJ,YAOI,aAAA,eAPJ,SAOI,cAAA,YAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,gBAPJ,SAOI,cAAA,eAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,eAPJ,YAOI,cAAA,eAPJ,SAOI,YAAA,YAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,gBAPJ,SAOI,YAAA,eAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,eAPJ,YAOI,YAAA,eAPJ,QAOI,QAAA,YAPJ,QAOI,QAAA,iBAPJ,QAOI,QAAA,gBAPJ,QAOI,QAAA,eAPJ,QAOI,QAAA,iBAPJ,QAOI,QAAA,eAPJ,SAOI,cAAA,YAAA,aAAA,YAPJ,SAOI,cAAA,iBAAA,aAAA,iBAPJ,SAOI,cAAA,gBAAA,aAAA,gBAPJ,SAOI,cAAA,eAAA,aAAA,eAPJ,SAOI,cAAA,iBAAA,aAAA,iBAPJ,SAOI,cAAA,eAAA,aAAA,eAPJ,SAOI,YAAA,YAAA,eAAA,YAPJ,SAOI,YAAA,iBAAA,eAAA,iBAPJ,SAOI,YAAA,gBAAA,eAAA,gBAPJ,SAOI,YAAA,eAAA,eAAA,eAPJ,SAOI,YAAA,iBAAA,eAAA,iBAPJ,SAOI,YAAA,eAAA,eAAA,eAPJ,SAOI,YAAA,YAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,gBAPJ,SAOI,YAAA,eAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,eAPJ,SAOI,cAAA,YAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,gBAPJ,SAOI,cAAA,eAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,eAPJ,SAOI,eAAA,YAPJ,SAOI,eAAA,iBAPJ,SAOI,eAAA,gBAPJ,SAOI,eAAA,eAPJ,SAOI,eAAA,iBAPJ,SAOI,eAAA,eAPJ,SAOI,aAAA,YAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,gBAPJ,SAOI,aAAA,eAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,eAPJ,eAOI,WAAA,eAPJ,aAOI,WAAA,gBAPJ,gBAOI,WAAA,kBzDPR,yByDAI,gBAOI,MAAA,eAPJ,cAOI,MAAA,gBAPJ,eAOI,MAAA,eAPJ,aAOI,QAAA,iBAPJ,mBAOI,QAAA,uBAPJ,YAOI,QAAA,gBAPJ,WAOI,QAAA,eAPJ,YAOI,QAAA,gBAPJ,gBAOI,QAAA,oBAPJ,iBAOI,QAAA,qBAPJ,WAOI,QAAA,eAPJ,kBAOI,QAAA,sBAPJ,WAOI,QAAA,eAPJ,cAOI,KAAA,EAAA,EAAA,eAPJ,aAOI,eAAA,cAPJ,gBAOI,eAAA,iBAPJ,qBAOI,eAAA,sBAPJ,wBAOI,eAAA,yBAPJ,gBAOI,UAAA,YAPJ,gBAOI,UAAA,YAPJ,kBAOI,YAAA,YAPJ,kBAOI,YAAA,YAPJ,cAOI,UAAA,eAPJ,gBAOI,UAAA,iBAPJ,sBAOI,UAAA,uBAPJ,UAOI,IAAA,YAPJ,UAOI,IAAA,iBAPJ,UAOI,IAAA,gBAPJ,UAOI,IAAA,eAPJ,UAOI,IAAA,iBAPJ,UAOI,IAAA,eAPJ,0BAOI,gBAAA,qBAPJ,wBAOI,gBAAA,mBAPJ,2BAOI,gBAAA,iBAPJ,4BAOI,gBAAA,wBAPJ,2BAOI,gBAAA,uBAPJ,2BAOI,gBAAA,uBAPJ,sBAOI,YAAA,qBAPJ,oBAOI,YAAA,mBAPJ,uBAOI,YAAA,iBAPJ,yBAOI,YAAA,mBAPJ,wBAOI,YAAA,kBAPJ,wBAOI,cAAA,qBAPJ,sBAOI,cAAA,mBAPJ,yBAOI,cAAA,iBAPJ,0BAOI,cAAA,wBAPJ,yBAOI,cAAA,uBAPJ,0BAOI,cAAA,kBAPJ,oBAOI,WAAA,eAPJ,qBAOI,WAAA,qBAPJ,mBAOI,WAAA,mBAPJ,sBAOI,WAAA,iBAPJ,wBAOI,WAAA,mBAPJ,uBAOI,WAAA,kBAPJ,gBAOI,MAAA,aAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,eAOI,MAAA,YAPJ,QAOI,OAAA,YAPJ,QAOI,OAAA,iBAPJ,QAOI,OAAA,gBAPJ,QAOI,OAAA,eAPJ,QAOI,OAAA,iBAPJ,QAOI,OAAA,eAPJ,WAOI,OAAA,eAPJ,SAOI,aAAA,YAAA,YAAA,YAPJ,SAOI,aAAA,iBAAA,YAAA,iBAPJ,SAOI,aAAA,gBAAA,YAAA,gBAPJ,SAOI,aAAA,eAAA,YAAA,eAPJ,SAOI,aAAA,iBAAA,YAAA,iBAPJ,SAOI,aAAA,eAAA,YAAA,eAPJ,YAOI,aAAA,eAAA,YAAA,eAPJ,SAOI,WAAA,YAAA,cAAA,YAPJ,SAOI,WAAA,iBAAA,cAAA,iBAPJ,SAOI,WAAA,gBAAA,cAAA,gBAPJ,SAOI,WAAA,eAAA,cAAA,eAPJ,SAOI,WAAA,iBAAA,cAAA,iBAPJ,SAOI,WAAA,eAAA,cAAA,eAPJ,YAOI,WAAA,eAAA,cAAA,eAPJ,SAOI,WAAA,YAPJ,SAOI,WAAA,iBAPJ,SAOI,WAAA,gBAPJ,SAOI,WAAA,eAPJ,SAOI,WAAA,iBAPJ,SAOI,WAAA,eAPJ,YAOI,WAAA,eAPJ,SAOI,aAAA,YAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,gBAPJ,SAOI,aAAA,eAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,eAPJ,YAOI,aAAA,eAPJ,SAOI,cAAA,YAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,gBAPJ,SAOI,cAAA,eAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,eAPJ,YAOI,cAAA,eAPJ,SAOI,YAAA,YAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,gBAPJ,SAOI,YAAA,eAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,eAPJ,YAOI,YAAA,eAPJ,QAOI,QAAA,YAPJ,QAOI,QAAA,iBAPJ,QAOI,QAAA,gBAPJ,QAOI,QAAA,eAPJ,QAOI,QAAA,iBAPJ,QAOI,QAAA,eAPJ,SAOI,cAAA,YAAA,aAAA,YAPJ,SAOI,cAAA,iBAAA,aAAA,iBAPJ,SAOI,cAAA,gBAAA,aAAA,gBAPJ,SAOI,cAAA,eAAA,aAAA,eAPJ,SAOI,cAAA,iBAAA,aAAA,iBAPJ,SAOI,cAAA,eAAA,aAAA,eAPJ,SAOI,YAAA,YAAA,eAAA,YAPJ,SAOI,YAAA,iBAAA,eAAA,iBAPJ,SAOI,YAAA,gBAAA,eAAA,gBAPJ,SAOI,YAAA,eAAA,eAAA,eAPJ,SAOI,YAAA,iBAAA,eAAA,iBAPJ,SAOI,YAAA,eAAA,eAAA,eAPJ,SAOI,YAAA,YAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,gBAPJ,SAOI,YAAA,eAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,eAPJ,SAOI,cAAA,YAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,gBAPJ,SAOI,cAAA,eAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,eAPJ,SAOI,eAAA,YAPJ,SAOI,eAAA,iBAPJ,SAOI,eAAA,gBAPJ,SAOI,eAAA,eAPJ,SAOI,eAAA,iBAPJ,SAOI,eAAA,eAPJ,SAOI,aAAA,YAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,gBAPJ,SAOI,aAAA,eAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,eAPJ,eAOI,WAAA,eAPJ,aAOI,WAAA,gBAPJ,gBAOI,WAAA,kBzDPR,0ByDAI,gBAOI,MAAA,eAPJ,cAOI,MAAA,gBAPJ,eAOI,MAAA,eAPJ,aAOI,QAAA,iBAPJ,mBAOI,QAAA,uBAPJ,YAOI,QAAA,gBAPJ,WAOI,QAAA,eAPJ,YAOI,QAAA,gBAPJ,gBAOI,QAAA,oBAPJ,iBAOI,QAAA,qBAPJ,WAOI,QAAA,eAPJ,kBAOI,QAAA,sBAPJ,WAOI,QAAA,eAPJ,cAOI,KAAA,EAAA,EAAA,eAPJ,aAOI,eAAA,cAPJ,gBAOI,eAAA,iBAPJ,qBAOI,eAAA,sBAPJ,wBAOI,eAAA,yBAPJ,gBAOI,UAAA,YAPJ,gBAOI,UAAA,YAPJ,kBAOI,YAAA,YAPJ,kBAOI,YAAA,YAPJ,cAOI,UAAA,eAPJ,gBAOI,UAAA,iBAPJ,sBAOI,UAAA,uBAPJ,UAOI,IAAA,YAPJ,UAOI,IAAA,iBAPJ,UAOI,IAAA,gBAPJ,UAOI,IAAA,eAPJ,UAOI,IAAA,iBAPJ,UAOI,IAAA,eAPJ,0BAOI,gBAAA,qBAPJ,wBAOI,gBAAA,mBAPJ,2BAOI,gBAAA,iBAPJ,4BAOI,gBAAA,wBAPJ,2BAOI,gBAAA,uBAPJ,2BAOI,gBAAA,uBAPJ,sBAOI,YAAA,qBAPJ,oBAOI,YAAA,mBAPJ,uBAOI,YAAA,iBAPJ,yBAOI,YAAA,mBAPJ,wBAOI,YAAA,kBAPJ,wBAOI,cAAA,qBAPJ,sBAOI,cAAA,mBAPJ,yBAOI,cAAA,iBAPJ,0BAOI,cAAA,wBAPJ,yBAOI,cAAA,uBAPJ,0BAOI,cAAA,kBAPJ,oBAOI,WAAA,eAPJ,qBAOI,WAAA,qBAPJ,mBAOI,WAAA,mBAPJ,sBAOI,WAAA,iBAPJ,wBAOI,WAAA,mBAPJ,uBAOI,WAAA,kBAPJ,gBAOI,MAAA,aAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,YAOI,MAAA,YAPJ,eAOI,MAAA,YAPJ,QAOI,OAAA,YAPJ,QAOI,OAAA,iBAPJ,QAOI,OAAA,gBAPJ,QAOI,OAAA,eAPJ,QAOI,OAAA,iBAPJ,QAOI,OAAA,eAPJ,WAOI,OAAA,eAPJ,SAOI,aAAA,YAAA,YAAA,YAPJ,SAOI,aAAA,iBAAA,YAAA,iBAPJ,SAOI,aAAA,gBAAA,YAAA,gBAPJ,SAOI,aAAA,eAAA,YAAA,eAPJ,SAOI,aAAA,iBAAA,YAAA,iBAPJ,SAOI,aAAA,eAAA,YAAA,eAPJ,YAOI,aAAA,eAAA,YAAA,eAPJ,SAOI,WAAA,YAAA,cAAA,YAPJ,SAOI,WAAA,iBAAA,cAAA,iBAPJ,SAOI,WAAA,gBAAA,cAAA,gBAPJ,SAOI,WAAA,eAAA,cAAA,eAPJ,SAOI,WAAA,iBAAA,cAAA,iBAPJ,SAOI,WAAA,eAAA,cAAA,eAPJ,YAOI,WAAA,eAAA,cAAA,eAPJ,SAOI,WAAA,YAPJ,SAOI,WAAA,iBAPJ,SAOI,WAAA,gBAPJ,SAOI,WAAA,eAPJ,SAOI,WAAA,iBAPJ,SAOI,WAAA,eAPJ,YAOI,WAAA,eAPJ,SAOI,aAAA,YAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,gBAPJ,SAOI,aAAA,eAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,eAPJ,YAOI,aAAA,eAPJ,SAOI,cAAA,YAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,gBAPJ,SAOI,cAAA,eAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,eAPJ,YAOI,cAAA,eAPJ,SAOI,YAAA,YAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,gBAPJ,SAOI,YAAA,eAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,eAPJ,YAOI,YAAA,eAPJ,QAOI,QAAA,YAPJ,QAOI,QAAA,iBAPJ,QAOI,QAAA,gBAPJ,QAOI,QAAA,eAPJ,QAOI,QAAA,iBAPJ,QAOI,QAAA,eAPJ,SAOI,cAAA,YAAA,aAAA,YAPJ,SAOI,cAAA,iBAAA,aAAA,iBAPJ,SAOI,cAAA,gBAAA,aAAA,gBAPJ,SAOI,cAAA,eAAA,aAAA,eAPJ,SAOI,cAAA,iBAAA,aAAA,iBAPJ,SAOI,cAAA,eAAA,aAAA,eAPJ,SAOI,YAAA,YAAA,eAAA,YAPJ,SAOI,YAAA,iBAAA,eAAA,iBAPJ,SAOI,YAAA,gBAAA,eAAA,gBAPJ,SAOI,YAAA,eAAA,eAAA,eAPJ,SAOI,YAAA,iBAAA,eAAA,iBAPJ,SAOI,YAAA,eAAA,eAAA,eAPJ,SAOI,YAAA,YAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,gBAPJ,SAOI,YAAA,eAPJ,SAOI,YAAA,iBAPJ,SAOI,YAAA,eAPJ,SAOI,cAAA,YAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,gBAPJ,SAOI,cAAA,eAPJ,SAOI,cAAA,iBAPJ,SAOI,cAAA,eAPJ,SAOI,eAAA,YAPJ,SAOI,eAAA,iBAPJ,SAOI,eAAA,gBAPJ,SAOI,eAAA,eAPJ,SAOI,eAAA,iBAPJ,SAOI,eAAA,eAPJ,SAOI,aAAA,YAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,gBAPJ,SAOI,aAAA,eAPJ,SAOI,aAAA,iBAPJ,SAOI,aAAA,eAPJ,eAOI,WAAA,eAPJ,aAOI,WAAA,gBAPJ,gBAOI,WAAA,kBzDPR,0ByDAI,iBAOI,MAAA,eAPJ,eAOI,MAAA,gBAPJ,gBAOI,MAAA,eAPJ,cAOI,QAAA,iBAPJ,oBAOI,QAAA,uBAPJ,aAOI,QAAA,gBAPJ,YAOI,QAAA,eAPJ,aAOI,QAAA,gBAPJ,iBAOI,QAAA,oBAPJ,kBAOI,QAAA,qBAPJ,YAOI,QAAA,eAPJ,mBAOI,QAAA,sBAPJ,YAOI,QAAA,eAPJ,eAOI,KAAA,EAAA,EAAA,eAPJ,cAOI,eAAA,cAPJ,iBAOI,eAAA,iBAPJ,sBAOI,eAAA,sBAPJ,yBAOI,eAAA,yBAPJ,iBAOI,UAAA,YAPJ,iBAOI,UAAA,YAPJ,mBAOI,YAAA,YAPJ,mBAOI,YAAA,YAPJ,eAOI,UAAA,eAPJ,iBAOI,UAAA,iBAPJ,uBAOI,UAAA,uBAPJ,WAOI,IAAA,YAPJ,WAOI,IAAA,iBAPJ,WAOI,IAAA,gBAPJ,WAOI,IAAA,eAPJ,WAOI,IAAA,iBAPJ,WAOI,IAAA,eAPJ,2BAOI,gBAAA,qBAPJ,yBAOI,gBAAA,mBAPJ,4BAOI,gBAAA,iBAPJ,6BAOI,gBAAA,wBAPJ,4BAOI,gBAAA,uBAPJ,4BAOI,gBAAA,uBAPJ,uBAOI,YAAA,qBAPJ,qBAOI,YAAA,mBAPJ,wBAOI,YAAA,iBAPJ,0BAOI,YAAA,mBAPJ,yBAOI,YAAA,kBAPJ,yBAOI,cAAA,qBAPJ,uBAOI,cAAA,mBAPJ,0BAOI,cAAA,iBAPJ,2BAOI,cAAA,wBAPJ,0BAOI,cAAA,uBAPJ,2BAOI,cAAA,kBAPJ,qBAOI,WAAA,eAPJ,sBAOI,WAAA,qBAPJ,oBAOI,WAAA,mBAPJ,uBAOI,WAAA,iBAPJ,yBAOI,WAAA,mBAPJ,wBAOI,WAAA,kBAPJ,iBAOI,MAAA,aAPJ,aAOI,MAAA,YAPJ,aAOI,MAAA,YAPJ,aAOI,MAAA,YAPJ,aAOI,MAAA,YAPJ,aAOI,MAAA,YAPJ,aAOI,MAAA,YAPJ,gBAOI,MAAA,YAPJ,SAOI,OAAA,YAPJ,SAOI,OAAA,iBAPJ,SAOI,OAAA,gBAPJ,SAOI,OAAA,eAPJ,SAOI,OAAA,iBAPJ,SAOI,OAAA,eAPJ,YAOI,OAAA,eAPJ,UAOI,aAAA,YAAA,YAAA,YAPJ,UAOI,aAAA,iBAAA,YAAA,iBAPJ,UAOI,aAAA,gBAAA,YAAA,gBAPJ,UAOI,aAAA,eAAA,YAAA,eAPJ,UAOI,aAAA,iBAAA,YAAA,iBAPJ,UAOI,aAAA,eAAA,YAAA,eAPJ,aAOI,aAAA,eAAA,YAAA,eAPJ,UAOI,WAAA,YAAA,cAAA,YAPJ,UAOI,WAAA,iBAAA,cAAA,iBAPJ,UAOI,WAAA,gBAAA,cAAA,gBAPJ,UAOI,WAAA,eAAA,cAAA,eAPJ,UAOI,WAAA,iBAAA,cAAA,iBAPJ,UAOI,WAAA,eAAA,cAAA,eAPJ,aAOI,WAAA,eAAA,cAAA,eAPJ,UAOI,WAAA,YAPJ,UAOI,WAAA,iBAPJ,UAOI,WAAA,gBAPJ,UAOI,WAAA,eAPJ,UAOI,WAAA,iBAPJ,UAOI,WAAA,eAPJ,aAOI,WAAA,eAPJ,UAOI,aAAA,YAPJ,UAOI,aAAA,iBAPJ,UAOI,aAAA,gBAPJ,UAOI,aAAA,eAPJ,UAOI,aAAA,iBAPJ,UAOI,aAAA,eAPJ,aAOI,aAAA,eAPJ,UAOI,cAAA,YAPJ,UAOI,cAAA,iBAPJ,UAOI,cAAA,gBAPJ,UAOI,cAAA,eAPJ,UAOI,cAAA,iBAPJ,UAOI,cAAA,eAPJ,aAOI,cAAA,eAPJ,UAOI,YAAA,YAPJ,UAOI,YAAA,iBAPJ,UAOI,YAAA,gBAPJ,UAOI,YAAA,eAPJ,UAOI,YAAA,iBAPJ,UAOI,YAAA,eAPJ,aAOI,YAAA,eAPJ,SAOI,QAAA,YAPJ,SAOI,QAAA,iBAPJ,SAOI,QAAA,gBAPJ,SAOI,QAAA,eAPJ,SAOI,QAAA,iBAPJ,SAOI,QAAA,eAPJ,UAOI,cAAA,YAAA,aAAA,YAPJ,UAOI,cAAA,iBAAA,aAAA,iBAPJ,UAOI,cAAA,gBAAA,aAAA,gBAPJ,UAOI,cAAA,eAAA,aAAA,eAPJ,UAOI,cAAA,iBAAA,aAAA,iBAPJ,UAOI,cAAA,eAAA,aAAA,eAPJ,UAOI,YAAA,YAAA,eAAA,YAPJ,UAOI,YAAA,iBAAA,eAAA,iBAPJ,UAOI,YAAA,gBAAA,eAAA,gBAPJ,UAOI,YAAA,eAAA,eAAA,eAPJ,UAOI,YAAA,iBAAA,eAAA,iBAPJ,UAOI,YAAA,eAAA,eAAA,eAPJ,UAOI,YAAA,YAPJ,UAOI,YAAA,iBAPJ,UAOI,YAAA,gBAPJ,UAOI,YAAA,eAPJ,UAOI,YAAA,iBAPJ,UAOI,YAAA,eAPJ,UAOI,cAAA,YAPJ,UAOI,cAAA,iBAPJ,UAOI,cAAA,gBAPJ,UAOI,cAAA,eAPJ,UAOI,cAAA,iBAPJ,UAOI,cAAA,eAPJ,UAOI,eAAA,YAPJ,UAOI,eAAA,iBAPJ,UAOI,eAAA,gBAPJ,UAOI,eAAA,eAPJ,UAOI,eAAA,iBAPJ,UAOI,eAAA,eAPJ,UAOI,aAAA,YAPJ,UAOI,aAAA,iBAPJ,UAOI,aAAA,gBAPJ,UAOI,aAAA,eAPJ,UAOI,aAAA,iBAPJ,UAOI,aAAA,eAPJ,gBAOI,WAAA,eAPJ,cAOI,WAAA,gBAPJ,iBAOI,WAAA,kBCnDZ,0BD4CQ,MAOI,UAAA,iBAPJ,MAOI,UAAA,eAPJ,MAOI,UAAA,kBAPJ,MAOI,UAAA,kBChCZ,aDyBQ,gBAOI,QAAA,iBAPJ,sBAOI,QAAA,uBAPJ,eAOI,QAAA,gBAPJ,cAOI,QAAA,eAPJ,eAOI,QAAA,gBAPJ,mBAOI,QAAA,oBAPJ,oBAOI,QAAA,qBAPJ,cAOI,QAAA,eAPJ,qBAOI,QAAA,sBAPJ,cAOI,QAAA","sourcesContent":["/*!\n * Bootstrap v5.1.3 (https://getbootstrap.com/)\n * Copyright 2011-2021 The Bootstrap Authors\n * Copyright 2011-2021 Twitter, Inc.\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n */\n\n// scss-docs-start import-stack\n// Configuration\n@import \"functions\";\n@import \"variables\";\n@import \"mixins\";\n@import \"utilities\";\n\n// Layout & components\n@import \"root\";\n@import \"reboot\";\n@import \"type\";\n@import \"images\";\n@import \"containers\";\n@import \"grid\";\n@import \"tables\";\n@import \"forms\";\n@import \"buttons\";\n@import \"transitions\";\n@import \"dropdown\";\n@import \"button-group\";\n@import \"nav\";\n@import \"navbar\";\n@import \"card\";\n@import \"accordion\";\n@import \"breadcrumb\";\n@import \"pagination\";\n@import \"badge\";\n@import \"alert\";\n@import \"progress\";\n@import \"list-group\";\n@import \"close\";\n@import \"toasts\";\n@import \"modal\";\n@import \"tooltip\";\n@import \"popover\";\n@import \"carousel\";\n@import \"spinners\";\n@import \"offcanvas\";\n@import \"placeholders\";\n\n// Helpers\n@import \"helpers\";\n\n// Utilities\n@import \"utilities/api\";\n// scss-docs-end import-stack\n",":root {\n // Note: Custom variable values only support SassScript inside `#{}`.\n\n // Colors\n //\n // Generate palettes for full colors, grays, and theme colors.\n\n @each $color, $value in $colors {\n --#{$variable-prefix}#{$color}: #{$value};\n }\n\n @each $color, $value in $grays {\n --#{$variable-prefix}gray-#{$color}: #{$value};\n }\n\n @each $color, $value in $theme-colors {\n --#{$variable-prefix}#{$color}: #{$value};\n }\n\n @each $color, $value in $theme-colors-rgb {\n --#{$variable-prefix}#{$color}-rgb: #{$value};\n }\n\n --#{$variable-prefix}white-rgb: #{to-rgb($white)};\n --#{$variable-prefix}black-rgb: #{to-rgb($black)};\n --#{$variable-prefix}body-color-rgb: #{to-rgb($body-color)};\n --#{$variable-prefix}body-bg-rgb: #{to-rgb($body-bg)};\n\n // Fonts\n\n // Note: Use `inspect` for lists so that quoted items keep the quotes.\n // See https://github.com/sass/sass/issues/2383#issuecomment-336349172\n --#{$variable-prefix}font-sans-serif: #{inspect($font-family-sans-serif)};\n --#{$variable-prefix}font-monospace: #{inspect($font-family-monospace)};\n --#{$variable-prefix}gradient: #{$gradient};\n\n // Root and body\n // stylelint-disable custom-property-empty-line-before\n // scss-docs-start root-body-variables\n @if $font-size-root != null {\n --#{$variable-prefix}root-font-size: #{$font-size-root};\n }\n --#{$variable-prefix}body-font-family: #{$font-family-base};\n --#{$variable-prefix}body-font-size: #{$font-size-base};\n --#{$variable-prefix}body-font-weight: #{$font-weight-base};\n --#{$variable-prefix}body-line-height: #{$line-height-base};\n --#{$variable-prefix}body-color: #{$body-color};\n @if $body-text-align != null {\n --#{$variable-prefix}body-text-align: #{$body-text-align};\n }\n --#{$variable-prefix}body-bg: #{$body-bg};\n // scss-docs-end root-body-variables\n // stylelint-enable custom-property-empty-line-before\n}\n","// stylelint-disable declaration-no-important, selector-no-qualifying-type, property-no-vendor-prefix\n\n\n// Reboot\n//\n// Normalization of HTML elements, manually forked from Normalize.css to remove\n// styles targeting irrelevant browsers while applying new styles.\n//\n// Normalize is licensed MIT. https://github.com/necolas/normalize.css\n\n\n// Document\n//\n// Change from `box-sizing: content-box` so that `width` is not affected by `padding` or `border`.\n\n*,\n*::before,\n*::after {\n box-sizing: border-box;\n}\n\n\n// Root\n//\n// Ability to the value of the root font sizes, affecting the value of `rem`.\n// null by default, thus nothing is generated.\n\n:root {\n @if $font-size-root != null {\n font-size: var(--#{$variable-prefix}root-font-size);\n }\n\n @if $enable-smooth-scroll {\n @media (prefers-reduced-motion: no-preference) {\n scroll-behavior: smooth;\n }\n }\n}\n\n\n// Body\n//\n// 1. Remove the margin in all browsers.\n// 2. As a best practice, apply a default `background-color`.\n// 3. Prevent adjustments of font size after orientation changes in iOS.\n// 4. Change the default tap highlight to be completely transparent in iOS.\n\n// scss-docs-start reboot-body-rules\nbody {\n margin: 0; // 1\n font-family: var(--#{$variable-prefix}body-font-family);\n @include font-size(var(--#{$variable-prefix}body-font-size));\n font-weight: var(--#{$variable-prefix}body-font-weight);\n line-height: var(--#{$variable-prefix}body-line-height);\n color: var(--#{$variable-prefix}body-color);\n text-align: var(--#{$variable-prefix}body-text-align);\n background-color: var(--#{$variable-prefix}body-bg); // 2\n -webkit-text-size-adjust: 100%; // 3\n -webkit-tap-highlight-color: rgba($black, 0); // 4\n}\n// scss-docs-end reboot-body-rules\n\n\n// Content grouping\n//\n// 1. Reset Firefox's gray color\n// 2. Set correct height and prevent the `size` attribute to make the `hr` look like an input field\n\nhr {\n margin: $hr-margin-y 0;\n color: $hr-color; // 1\n background-color: currentColor;\n border: 0;\n opacity: $hr-opacity;\n}\n\nhr:not([size]) {\n height: $hr-height; // 2\n}\n\n\n// Typography\n//\n// 1. Remove top margins from headings\n// By default, `

`-`

` all receive top and bottom margins. We nuke the top\n// margin for easier control within type scales as it avoids margin collapsing.\n\n%heading {\n margin-top: 0; // 1\n margin-bottom: $headings-margin-bottom;\n font-family: $headings-font-family;\n font-style: $headings-font-style;\n font-weight: $headings-font-weight;\n line-height: $headings-line-height;\n color: $headings-color;\n}\n\nh1 {\n @extend %heading;\n @include font-size($h1-font-size);\n}\n\nh2 {\n @extend %heading;\n @include font-size($h2-font-size);\n}\n\nh3 {\n @extend %heading;\n @include font-size($h3-font-size);\n}\n\nh4 {\n @extend %heading;\n @include font-size($h4-font-size);\n}\n\nh5 {\n @extend %heading;\n @include font-size($h5-font-size);\n}\n\nh6 {\n @extend %heading;\n @include font-size($h6-font-size);\n}\n\n\n// Reset margins on paragraphs\n//\n// Similarly, the top margin on `

`s get reset. However, we also reset the\n// bottom margin to use `rem` units instead of `em`.\n\np {\n margin-top: 0;\n margin-bottom: $paragraph-margin-bottom;\n}\n\n\n// Abbreviations\n//\n// 1. Duplicate behavior to the data-bs-* attribute for our tooltip plugin\n// 2. Add the correct text decoration in Chrome, Edge, Opera, and Safari.\n// 3. Add explicit cursor to indicate changed behavior.\n// 4. Prevent the text-decoration to be skipped.\n\nabbr[title],\nabbr[data-bs-original-title] { // 1\n text-decoration: underline dotted; // 2\n cursor: help; // 3\n text-decoration-skip-ink: none; // 4\n}\n\n\n// Address\n\naddress {\n margin-bottom: 1rem;\n font-style: normal;\n line-height: inherit;\n}\n\n\n// Lists\n\nol,\nul {\n padding-left: 2rem;\n}\n\nol,\nul,\ndl {\n margin-top: 0;\n margin-bottom: 1rem;\n}\n\nol ol,\nul ul,\nol ul,\nul ol {\n margin-bottom: 0;\n}\n\ndt {\n font-weight: $dt-font-weight;\n}\n\n// 1. Undo browser default\n\ndd {\n margin-bottom: .5rem;\n margin-left: 0; // 1\n}\n\n\n// Blockquote\n\nblockquote {\n margin: 0 0 1rem;\n}\n\n\n// Strong\n//\n// Add the correct font weight in Chrome, Edge, and Safari\n\nb,\nstrong {\n font-weight: $font-weight-bolder;\n}\n\n\n// Small\n//\n// Add the correct font size in all browsers\n\nsmall {\n @include font-size($small-font-size);\n}\n\n\n// Mark\n\nmark {\n padding: $mark-padding;\n background-color: $mark-bg;\n}\n\n\n// Sub and Sup\n//\n// Prevent `sub` and `sup` elements from affecting the line height in\n// all browsers.\n\nsub,\nsup {\n position: relative;\n @include font-size($sub-sup-font-size);\n line-height: 0;\n vertical-align: baseline;\n}\n\nsub { bottom: -.25em; }\nsup { top: -.5em; }\n\n\n// Links\n\na {\n color: $link-color;\n text-decoration: $link-decoration;\n\n &:hover {\n color: $link-hover-color;\n text-decoration: $link-hover-decoration;\n }\n}\n\n// And undo these styles for placeholder links/named anchors (without href).\n// It would be more straightforward to just use a[href] in previous block, but that\n// causes specificity issues in many other styles that are too complex to fix.\n// See https://github.com/twbs/bootstrap/issues/19402\n\na:not([href]):not([class]) {\n &,\n &:hover {\n color: inherit;\n text-decoration: none;\n }\n}\n\n\n// Code\n\npre,\ncode,\nkbd,\nsamp {\n font-family: $font-family-code;\n @include font-size(1em); // Correct the odd `em` font sizing in all browsers.\n direction: ltr #{\"/* rtl:ignore */\"};\n unicode-bidi: bidi-override;\n}\n\n// 1. Remove browser default top margin\n// 2. Reset browser default of `1em` to use `rem`s\n// 3. Don't allow content to break outside\n\npre {\n display: block;\n margin-top: 0; // 1\n margin-bottom: 1rem; // 2\n overflow: auto; // 3\n @include font-size($code-font-size);\n color: $pre-color;\n\n // Account for some code outputs that place code tags in pre tags\n code {\n @include font-size(inherit);\n color: inherit;\n word-break: normal;\n }\n}\n\ncode {\n @include font-size($code-font-size);\n color: $code-color;\n word-wrap: break-word;\n\n // Streamline the style when inside anchors to avoid broken underline and more\n a > & {\n color: inherit;\n }\n}\n\nkbd {\n padding: $kbd-padding-y $kbd-padding-x;\n @include font-size($kbd-font-size);\n color: $kbd-color;\n background-color: $kbd-bg;\n @include border-radius($border-radius-sm);\n\n kbd {\n padding: 0;\n @include font-size(1em);\n font-weight: $nested-kbd-font-weight;\n }\n}\n\n\n// Figures\n//\n// Apply a consistent margin strategy (matches our type styles).\n\nfigure {\n margin: 0 0 1rem;\n}\n\n\n// Images and content\n\nimg,\nsvg {\n vertical-align: middle;\n}\n\n\n// Tables\n//\n// Prevent double borders\n\ntable {\n caption-side: bottom;\n border-collapse: collapse;\n}\n\ncaption {\n padding-top: $table-cell-padding-y;\n padding-bottom: $table-cell-padding-y;\n color: $table-caption-color;\n text-align: left;\n}\n\n// 1. Removes font-weight bold by inheriting\n// 2. Matches default `` alignment by inheriting `text-align`.\n// 3. Fix alignment for Safari\n\nth {\n font-weight: $table-th-font-weight; // 1\n text-align: inherit; // 2\n text-align: -webkit-match-parent; // 3\n}\n\nthead,\ntbody,\ntfoot,\ntr,\ntd,\nth {\n border-color: inherit;\n border-style: solid;\n border-width: 0;\n}\n\n\n// Forms\n//\n// 1. Allow labels to use `margin` for spacing.\n\nlabel {\n display: inline-block; // 1\n}\n\n// Remove the default `border-radius` that macOS Chrome adds.\n// See https://github.com/twbs/bootstrap/issues/24093\n\nbutton {\n // stylelint-disable-next-line property-disallowed-list\n border-radius: 0;\n}\n\n// Explicitly remove focus outline in Chromium when it shouldn't be\n// visible (e.g. as result of mouse click or touch tap). It already\n// should be doing this automatically, but seems to currently be\n// confused and applies its very visible two-tone outline anyway.\n\nbutton:focus:not(:focus-visible) {\n outline: 0;\n}\n\n// 1. Remove the margin in Firefox and Safari\n\ninput,\nbutton,\nselect,\noptgroup,\ntextarea {\n margin: 0; // 1\n font-family: inherit;\n @include font-size(inherit);\n line-height: inherit;\n}\n\n// Remove the inheritance of text transform in Firefox\nbutton,\nselect {\n text-transform: none;\n}\n// Set the cursor for non-` - - - -{% endfor %} {% endif %} - - -

-
- - - - -
- - - -
RECON LIST
- -
-
- -
- -
-
-
-
- - -
-
-
- -

- SETTINGS -

-
- -
-
-
-
-
-
- -

- API KEYS -

-
-
-
- -
-
-
-
-
-
-
-

RECONFTW CONFIG

-
-
-
-
- -
- -
-
-
-
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
-
-
-
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
-
- -
-
-
-
- - -
-
-
-
-
-
- {% csrf_token %} -
-
-
-
- -
-
-
-
-
-
-
-

AMASS TOOL

-
-
-
-
- -
- -
-
-
-
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
-
- -
-
- -
- -
-
-
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
-
- -
-
-
-
- - -
-
-
-
-
-
- {% csrf_token %} -
-
-
-
- -
-
-
-
-
-
-
-

GITHUB TOKENS

-
-
-
-
- -
- -
-
-
-
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
-
-
-
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
-
- -
-
-
-
- - -
-
-
-
-
-
- {% csrf_token %} -
-
-
-
-
-
-
-
-
- -
-
-
- reconFTW © -
-
-
- - - - - - - - - - - - - - - - - -{% endblock %} diff --git a/web/templates/base.html b/web/templates/base.html deleted file mode 100644 index 4dd3e1db..00000000 --- a/web/templates/base.html +++ /dev/null @@ -1,234 +0,0 @@ -{% load static %} - - - - - - - - - {% block title %}{% endblock %} - - - - - - - - - - - - - - - - - - - - - - - -
- - - -
- - - - {% block content %}{% endblock %} - {% include 'scans_all_js.html' %} - diff --git a/web/templates/edit_profile.html b/web/templates/edit_profile.html deleted file mode 100644 index 9d0966c1..00000000 --- a/web/templates/edit_profile.html +++ /dev/null @@ -1,1151 +0,0 @@ -{% extends 'base.html' %} -{% block title %}reconFTW - Edits Profile{% endblock %} -{% block content %} -{% load static %} - - - - - -
-
- - - - -
- - - -
EDIT PROFILE
- -
-
- -
- -
-
-
-
- - -
-
-
-
- -

- EDIT PROFILE -

-
-
-
- -
-
-
-
- -

- ACCOUNT DETAILS -

-
-
-
- -
-
- -
- -
-
- -
-
-
-
-
-

PROFILE IMAGE

-
-
-
-
-
- -
-
-
-
-
- User avatar -
JPG or PNG no larger than 5 MB
-
-
-
-
-
- - - - -
-
-
-
-
- -
- -
- -
-
-
-
-
-
-
- -
- -
- -
-
- -
-
-
-
-
-

CREDENTIAL INFORMATION

-
-
-
-
-
- - -
- -
- -
-
-
- -
- -
-
-
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
- - -
- - -
-
-
-
-
- - -
-
-
-
-
- {% csrf_token %} - - {% if response != "" %} -

{{ response }}

- {% endif %} -
-
-
- -
-
-
- - -
- -
-
-
-
- -

- PROFILE IMAGE -

-
-
-
- - - User avatar -
JPG or PNG no larger than 5 MB
- -
-
-
-
-
- -
- -
- -
-
-
-
-
-
-
-
- - - -
-
-
-
- -

- ACCOUNT INFORMATION -

-
-
-
- -
-
-
-
-
- -
- -
- -
-
-
- -
- -
-
- -
-
-
-
-
- - - -
- -
-
-
-
- -

- CHANGE PASSWORD -

-
-
-
- -
-
- -
-
-
-
-
-
- -
- -
-
- -
- -
- -
-
- -
- -
- -
-
-
-
-
-
-
-
- -
-
-
-
-
- - -
-
-
-
-
- -
-
- {% csrf_token %} -
- {% if response != "" %} -

{{ response }}

- {% endif %} -
-
-
- - - - - -
- -
- - -
-
- -
-
- - - - - -{% endblock %} \ No newline at end of file diff --git a/web/templates/scans_web.html b/web/templates/scans_web.html deleted file mode 100644 index 1ffd489d..00000000 --- a/web/templates/scans_web.html +++ /dev/null @@ -1,1889 +0,0 @@ -{% extends 'base.html' %} -{% load static %} -{% block title %}reconFTW - Results{% endblock %} -{% block content %} - - - - - - -
-
- - -
- - -
FINALIZED
- - -
SCANNING
- - -
SCAN MODE: WEB
- - -
{{ title_domain_target }}
- - - -
-
- -
- -
- - -
- - -
- - -
-
-
-
- -

- CLOUD ASSETS -

-
- -
-
- - - - - - - - - - - - - - - - - - - -
S3 BUCKETSAPP FOUNDSTORAGE ACCOUNTAZURE DNS NAMEGOOGLE BUCKET
N/AN/AN/AN/AN/A
-
-
-
-
- -
-
-
- -

- Nuclei Output -

-
- -
-
- - - - - - - - - - - - - - - - - - - -
INFOLOWMEDIUMHIGHCRITICAL
N/AN/AN/AN/AN/A
-
-
-
-
- -
- -
-
-
- -

- Vulnerabilities -

-
-
- -
-
-
- - - - - - - - - - - -
OPEN REDIRECT
N/A
-
-
- -
-
- - - - - - - - - - - -
4XX BYPASS
N/A
-
-
- -
-
- - - - - - - - - - - -
SERVER-SIDE REQUEST FORGERY
N/A
-
-
- -
-
- - - - - - - - - - - -
CARRIAGE RETURN LINE FEED
N/A
-
-
- -
-
- - - - - - - - - - - -
CROSS-SITE SCRIPTING
N/A
-
-
- -
-
- - - - - - - - - - - -
LOCAL FILE INCLUSION
N/A
-
-
- -
-
- - - - - - - - - - - -
SQL INJECTION
N/A
-
-
- -
-
- - - - - - - - - - - -
SERVER-SIDE TEMPLATE INJECTION
N/A
-
-
-
-
- -
- -
-
-
- -

- GF Output -

-
- -
-
-
- - - - - - - - - - - -
ENDPOINTS
N/A
-
-
- -
-
- - - - - - - - - - - -
LOCAL FILE INCLUSION
N/A
-
-
- -
-
- - - - - - - - - - - -
CODE EXECUTION
N/A
-
-
- -
-
- - - - - - - - - - - -
OPEN REDIRECT
N/A
-
-
- -
-
- - - - - - - - - - - -
SQL INJECTION
N/A
-
-
- -
-
- - - - - - - - - - - -
SERVER-SIDE REQUEST FORGERY
N/A
-
-
- -
-
- - - - - - - - - - - -
SERVER-SIDE TEMPLATE INJECTION
N/A
-
-
- -
-
- - - - - - - - - - - -
CROSS-SITE SCRIPTING
N/A
-
-
-
-
- -
- -
-
-
- -

- Fuzzing Paths -

-
- -
-
- - - - - - - - - - - - - -
HTTP RESPONSE CODEURL
N/AN/A
-
-
-
-
- -
- -
-
-
- -

- Data Extracted -

-
- -
-
- - - - - - - - - - - - - - - - - - - -
CMSENDPOINTSBROKEN LINKSURL EXTRACTEDURL BY EXTENSION
N/AN/AN/AN/AN/A
-
-
-
-
- -
- -
-
-
- -

- Javascript -

-
-
- -
-
-
- - - - - - - - - - - -
ENDPOINTS
N/A
-
-
- -
-
- - - - - - - - - - - -
LINKS
N/A
-
-
- -
-
- - - - - - - - - - - -
SECRETS
N/A
-
-
-
-
-
- -
- -
-
-
- -

- Dictionaries -

-
- -
-
- - - - - - - - - - - - - - - - - -
PARAMSVALUESWORDSPASSWORD
N/AN/AN/AN/A
-
-
-
-
- -
- - - -
-
- -
-
-
- - - - - -{% endblock %} \ No newline at end of file diff --git a/web/web/__init__.py b/web/web/__init__.py deleted file mode 100644 index 9e0d95fd..00000000 --- a/web/web/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .celery import app as celery_app - -__all__ = ('celery_app',) \ No newline at end of file diff --git a/web/web/__pycache__/__init__.cpython-310.pyc b/web/web/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 333b930775afaf53f94fcb69addcbc52b598edb8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 195 zcmd1j<>g`kf`>eRlO2HcV-N=!FabFZKwK;WBvKes7;_kM8KamO8B&;{m{XX888lg5 zG6JPE8E-Ks78C#}uH@95)S}9GAkR;ePO2Tq!eSO6!Nb78 F1OV%nEjR!G diff --git a/web/web/__pycache__/__init__.cpython-39.pyc b/web/web/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 0eebd5cf750b5cc835604c3cf53c6938217f9e6c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 232 zcmY*TI}UK{DVg+Ay!io#T3`9;Hk%%@={;A6=gdM~B4Xpk1GQ0<{!!kQf=X_5`QtD@JQ`wx2T4;8xC zp85-gLMIkV2j&fLhWFlxMx#!!-u>_B4d+Km{#OOb6}IXlaKdRyX63X}nprc85+I4a zuW*}J4kfd=^Gs(BIEl+$Ugh3Pxpnr{1EHv%kaq18oye$7joX71m}#nzuVX24Q%5O>1zV-s*sLf_t2B(4;X;Zm6!QckZCY%l1&>uE)*wU8 z)RTH8vWZ4V_&V!KA#_(lsnwe<{&(!(#7?m_-bE((DuqjmBIP4^#HUT$vzpYWKNf#$ A>i_@% diff --git a/web/web/__pycache__/celery.cpython-39.pyc b/web/web/__pycache__/celery.cpython-39.pyc deleted file mode 100644 index 9b21480b8c67f4aa09c01f4ae30e7e221491f937..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 417 zcmYjN!A`*8JIj}rX1ngZ9#g`)yR6vxObvlin-^E8YA zMf$Nuby_=>NTbFR_6#s0lbW?089P1~9)$|PY8efVDjeEE-2M`0_7w691&3HKR zXYPD{J08yb`-$@~au2N?EcybZ4AYhHbcGIQG$QGW^#hhJFaBEQz#X~MM@894Ndh7x z0radwW1=vFN`q@kv#3M54LM7TrW&QNBzY_gon)EUEeXSwzvL|O*&>2KdR>xBMne&> zEpT5Fu@P^Gm+z{=_t0Lm1nfBHA;h)=Vk22*PeB0T!P=fP7FSK5=38QOC?(|1R%cZH cyI4Ql7uIFMXdc5AR@|Z#uy9N7Xcq3^ADt$6^8f$< diff --git a/web/web/__pycache__/settings.cpython-310.pyc b/web/web/__pycache__/settings.cpython-310.pyc deleted file mode 100644 index 06402239e4fc4c73aae042c91c796db4a774be08..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2651 zcma)8TXWM!6qXeEN*qEGLLgjBxEdx(DII7t9g1Raf(P01STYyAXcX_p*+iC%RwhK# zKJ^du2k?;a*1xo`ec})Fq0>T-WIMQqnPQFha?bhAp8a&rN&G?~FTn5mFaNZ)B|-Qj z-i-g!uz3tGD~f`E1uVLPHJC+9guUcS-6?BI6vz~je-)2X$HHEEFKuOrc$CFc$D)

i;V9k&joW_~Xly$lz;OslmnuR$B^BJ7C=I|*Z?9Cr7kPMza zmO3IX;2F@LekW29&%TvAqIDLZu@>>1bq>#4OLzgF1>41C0iSy-QPDb&m+*P8UBVZV zaXA?;Qn4dj7w{!(8DF+8;(~Pv-plxkbp_DHWdZya@m1?8Ua?kSzJ{+^*WtZ^uUj|C zMe7#6Vco_ztrEV4Z-d8Mcmc-JTgkeEMMEgvc^`9EFNs`kIN|R5BJAD+aYA%)*Jq*U zbV+fwTKu`#?uW%yTwH-?b>|+>xqjPmW0NR%@7ssUH}Kpm<@p@mbG(jkw~0%FLwlPz zVYD!iDQ({i1G=p^xJ$igep1>GccbMAVGJQ&7#{Wri_T3b8DWh2-sg%gD4Y%;E=(dq zC#3Bi>JrAYy};ii?U3;lYdap}8K+10$ssFcqSYa;OEGrIffEpgk#;|z;h|y-7qt^z z^xfn!!@!g=yfKiCp-P`0()PPu-&4xT^gpI7*ip(bJfq}b%=F9+UWe>=<(hpn~tqnbd zq}y|ykSN;#B_3wVP*F)T${ea0&y6gZ$>)*;c@x_3G20NFAFz@f-JW#$xs`&+cE1zd z`3lQ`yy_Fk^SeXalQfQ*{etJ1@#y}9@d=w>0E~1cnP)cC@>5i=aJf=#8a%5u8g{bE z=nb=~*Ntc)ChmZBsN(c`z;yB%sUZ9&(%Bh7;2BJIoW2{L4eX{k66FS=tBgRM+T^im7K490!#AV6`?LbZYo0Pbiryy1Pgy(|3XD3~) zBt;9!LJ1wV&yXq(t$%noOtuG+ZCo?_x3*pl3K!OzE zS=){qscrL2K$!2oCOqAPPU(d_yP+B=&ReKeA8Og&C2x4%K;1 zbW_(&yVcandVQ12aiCn@^Qp&kCaN_w)kFrLeQs=3W2{y!t8q2)<%(*mcB`T3YQ?Up zFYFqs>CKn6QMC|XDy!uu$o_PS4L8v16@%wuSbRr=FR3l_iQP~Q>b>vlJRPr03%D^MpmKZxK08=%RyAY{YCosx zP@jo%Zm?`3AQzRB8}oUP=`GX#kMK+xX$Wqz(bS)!W`aFC6g832(oB2QX-1Z)u5M_^ zZkc6%dT1GkoJ2f=ZXngP4Pd3x0`GjTf;QFI7m%Ran^mM$cwux3goKRWg+PU5Nrt~x z)&k!T*8&1Pa3?%igHD#bjD#WN9aDOTJo_W0H6;I|_kGL`lp_(ntAk`J=ojPsyT`mBh3te}MftkjrE5pYps6czDw36zFAHgl90uD=9sd I7V{te1tbf9`Tzg` diff --git a/web/web/__pycache__/settings.cpython-39.pyc b/web/web/__pycache__/settings.cpython-39.pyc deleted file mode 100644 index d055b303286556cbff52a9ab6a40200be84d8740..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2276 zcma)7OLN;c5GHBrVfhiqcAid}cc+TSR1;^MHq)k|CCbr+7Cj;rH@I*p0hXcgp%7GL z^U_2AMh;1C{YyCX(o_CG51mfkB_%tmoSB*&ES9_bEf$N#(wUnp2=M%J`(5kGj3E4x ze2hO?_;?0`{SE{RSab!H%#tg55|TuLNM!0)@xA;(=w`YZl*Lmhhh>z8N-w4KG8H>ATEth-626L-@eH~E<04)`mjHcjMF4-7@pW_se}%5X zyn=6_YcQ_ko9HV*+`zZcO)Q#1@%AU!qfjr3T;6e_{dg019lsN-QJ;}^KO}3EZS0Zm z{^5^bug7=p-q~MWf7=?6*X=cL@7^2gu7=UW=-XSQ^YGrr!CidtsQ>tr2p+k3znG7g zhu)NS;71{CDGv6iA1_Tx`>^oJgfM{+KZ*`}gvFO8l#DP&1OK$b1BKH8#MMbe3h{b_yj>nUWf%tD)l&qn9kk#&G!oX(|2CWpQcbAtGMSbt0m)07`t! zl#!y6mY_9KGd_K6$t<*(*2O_&!x^?AxFBRjIsRtS$8NZ#}bi{jOh z?Md|}nf-$2B;)b=gz-r>zW_Ma6=j~=RLd{4dIiaqTEpadb!W#;xty_M)r`6sFDHo` zuntw6UJo*z9wX0Tvgh>O=%mnb>vJC28<$38JuZN7#1|!=)#_Wdy2j~ zoT}@_u2!+18>VIQ`C8qyKm-CPCX*MoYn6(w?Wzrp&o&IhvYQRPY}BhrB+urb^C2wan zJOM&N;}PqHY`KK9*f$zjBO@?jAuE$;Ov6)ktrrV_4kwglmsMv#3Ul|qYG~GUpEDJ z2HDJHbDNz|hfE*vg9Go)fajL4fUhmT0DftC0equ*8N8s2KErv@(ox=_k~HB#fEeNf@AtHpq8Rcsu8y3EArN)dft{Fs^niDi92o_F z@|VZ4d39*M(tEK!@3|N69Gpf+;SEUzw zQ}IV#_%HBrARx?IDgh75XUr7J1>aIq-ihys-@??U`SrTseJ=}6^^)>boami-ba_{Y hdlkKFdtJ=;7+w)?pqZfzo7)-tg}cCu(J^iG^AF%={A~aL diff --git a/web/web/__pycache__/urls.cpython-39.pyc b/web/web/__pycache__/urls.cpython-39.pyc deleted file mode 100644 index 0e22b011bae876575a2c843a424b1aa4adc5c27f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 685 zcmZuuyKdVs6eabRY{hQUrr+R2LYGcOk)dPJr9cM_HE5BMWssCXQgy2}bH)Psg)IFb zuATY|?b1ueMlys1FV8u=I=Yu6WmzB?KVRN$z6J>WcI4^EfLy`SzaUUVF-HP7SOiUg z9S?XYq9*n@U}B5(3zP60X55zihl!!%~`ZoVsNLepDJGn(Iqpcc=nGf>YU zckXk`W9}IX=;8+71n}Jw@C%2#xLMHi&uDe=2Wm#Ez(#vwwl?i$%SXyq*v8*_cF+J4 zZHVbxK$&L7^s>_yO$8siURGNpIGmJ*i9IKVttAo6#{LlW$k8eBw4PGfhtbs20zk*; z8i;qWE(6Gv@71tltoRXbNvazqi<{#Z9e1VIyDbe25 z7N9{ZUYtScaIc0bsqc|`1tvcHt?TU|SS`N4ePc}hVQ5FeBy_Eq`ZMWhzmr zNu~*%9h26bX(wTjEW1DUR9EfYr<8T>*mPf|6E@htdCa~|h9h30o$HFv(eX80^2Xs3 z%8+N-C~)}Rpa;*x0{Hf^G)7(;;L>x+YgAVT-WXVc;VbF%retLeiqB@UX^bvq&}e5f z!uv!FH9Fwh5?Q?|aI^%jJ)dg>uek@v79lkLoe&C3pR1*`75>*$VlBXmxA~D+;5wjC z&=|a~!3MGRl@{4=8U-53{B=*3rv_l&+YE&?nS0==#J8TckwF}&Y qKd*#C48lP>?ywd_xOKwIo9lS+eml$$v}xwszAig>(odhIS^5u(B%m7r diff --git a/web/web/__pycache__/wsgi.cpython-39.pyc b/web/web/__pycache__/wsgi.cpython-39.pyc deleted file mode 100644 index 5a06c0e528e8240231552d74da5c3dc6c2384198..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 537 zcmYk3&2AGh5XZev+JvP=9IBpujRfl=Ar1&3kW!+ck{(iol*`6hPd1MHv06Jx+e;5T z0}@9#_IbGZ%86I#sk4cYGSZAS9((>jkHztEO3;3s{G8n-g#2xieQRU#0o|WrkYqw+ zBB@R$39c5Fh2qeq)uLIeZ2frz1t?y)VDCwB=4sZXY-^PO>E1aFHlFePsHZWsV z?Uqo4EKP@j!FA<4cor7Gc87)5Vy1x!&jd40t~9*Vum;W6qE_3I6$T`mPDNE|RfwRR zolddeCsZ)52Bs{EnQehNi_Mj1bEV-0^8o20gv!6=T)M(%aw%--{xcQMaIpM!cE%TO z6C9UNX?J752EOs7;^{ve1sU~{cv3>ho4tGwBcAp@e;f@i^YQuB)o?Tz=U*=SUoXyY zkMQN0hY*x4eXkQcd5-dV9;x&3`3@p0kUeW!))Ylz2kcta&c+lkN|=k44)N)p+C8b8 z58Q?aj>HDIxqkC11m1VWs-gD0hGza@!;xU;D3Db*vHNZ}-S_zQ#CI{YpLFRFeMTPv D33sKL diff --git a/web/web/asgi.py b/web/web/asgi.py deleted file mode 100644 index aff4a644..00000000 --- a/web/web/asgi.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -ASGI config for web project. - -It exposes the ASGI callable as a module-level variable named ``application``. - -For more information on this file, see -https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/ -""" - -import os - -from django.core.asgi import get_asgi_application - -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'web.settings') - -application = get_asgi_application() diff --git a/web/web/celery.py b/web/web/celery.py deleted file mode 100644 index 31256dc4..00000000 --- a/web/web/celery.py +++ /dev/null @@ -1,9 +0,0 @@ -import os -from celery import Celery - - -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'web.settings') - -app = Celery('web') -app.config_from_object('django.conf:settings', namespace='CELERY') -app.autodiscover_tasks() \ No newline at end of file diff --git a/web/web/settings.py b/web/web/settings.py deleted file mode 100644 index d11a8dab..00000000 --- a/web/web/settings.py +++ /dev/null @@ -1,227 +0,0 @@ -import os, secrets -from pathlib import Path - -BASE_DIR = Path(__file__).resolve().parent.parent - -SECRET_KEY = secrets.token_hex(32) - -DEBUG = 1 - -ipAddress=os.popen('hostname -i | cut -d " " -f1').read().strip() -ALLOWED_HOSTS = [ipAddress, 'localhost', '127.0.0.1', '*'] - -# SESSION_COOKIE_SECURE = True -# CSRF_COOKIE_SECURE = True -# Application definition - -INSTALLED_APPS = [ - 'django_celery_beat', - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'projects', - 'scans', - 'apikeys', -] - -MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', -] - -ROOT_URLCONF = 'web.urls' - -TEMPLATES = [ - { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [os.path.join(BASE_DIR, 'templates'),], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - ], - # 'loaders': [ - # ('django.template.loaders.cached.Loader', [ - # 'django.template.loaders.filesystem.Loader', - # 'django.template.loaders.app_directories.Loader', - # ]), - # ], - }, - }, -] - -WSGI_APPLICATION = 'web.wsgi.application' - -# DATA_UPLOAD_MAX_MEMORY_SIZE = 2621440 -DATA_UPLOAD_MAX_MEMORY_SIZE = 26214400 -CACHE_MIDDLEWARE_SECONDS = 3600 - - - -# Database -# https://docs.djangoproject.com/en/4.0/ref/settings/#databases - -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': BASE_DIR / 'db.sqlite3', - } -} - -#DATABASES = { -# 'default': { -# 'ENGINE': 'django.db.backends.postgresql_psycopg2', -# 'NAME': 'web', -# 'USER': 'reconftw', -# 'PASSWORD': 'TorvaldS*12', -# 'HOST': 'localhost', -# 'PORT': '5432', -# } -#} - - -# Password validation -# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators -AUTH_PASSWORD_VALIDATORS = [ - { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', - }, -] - - -# Internationalization -# https://docs.djangoproject.com/en/4.0/topics/i18n/ - -LANGUAGE_CODE = 'en-us' - -TIME_ZONE = 'America/Sao_Paulo' - -USE_I18N = True - -USE_TZ = True - - -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/4.0/howto/static-files/ - -STATIC_URL = 'static/' - -#STATIC_ROOT = BASE_DIR/"static" - -STATICFILES_DIRS = [ - BASE_DIR / "static", -] - -LOGIN_URL = 'login' -LOGIN_REDIRECT_URL = 'projects:index' -LOGOUT_REDIRECT_URL = 'login' - - -# Celery Settings -CELERY_BROKER_URL = 'redis://localhost:6379' -CELERY_RESULT_BACKEND = 'redis://localhost:6379' -CELERY_ENABLE_UTC = False -CELERY_TIMEZONE = TIME_ZONE -CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler' - -DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' - -CELERY_ROUTES = { - 'scans.tasks.run_scan': {'queue': 'run_scans'}, - 'scans.tasks.new_scan_single_domain': {'queue': 'default'} -} - -# LOGGING = { -# 'version': 1, -# 'disable_existing_loggers': True, -# 'formatters': { -# 'standard': { -# 'format': '[%(levelname)s] %(asctime)-15s - %(message)s', -# 'datefmt': '%d/%b/%Y %H:%M:%S', -# }, -# 'color': { -# '()': 'colorlog.ColoredFormatter', -# 'format': -# '%(log_color)s[%(levelname)s] %(asctime)-15s - %(message)s', -# 'datefmt': '%d/%b/%Y %H:%M:%S', -# 'log_colors': { -# 'DEBUG': 'cyan', -# 'INFO': 'green', -# 'WARNING': 'yellow', -# 'ERROR': 'red', -# 'CRITICAL': 'red,bg_white', -# }, -# }, -# }, -# 'handlers': { -# 'logfile': { -# 'level': 'DEBUG', -# 'class': 'logging.FileHandler', -# 'filename': 'debug.log', -# 'formatter': 'standard', -# }, -# 'console': { -# 'level': 'DEBUG', -# 'class': 'logging.StreamHandler', -# 'formatter': 'color', -# }, -# }, -# 'loggers': { -# 'django': { -# 'handlers': ['console', 'logfile'], -# 'level': 'DEBUG', -# 'propagate': True, -# }, -# 'django.db.backends': { -# 'handlers': ['console', 'logfile'], -# # DEBUG will log all queries, so change it to WARNING. -# 'level': 'INFO', -# 'propagate': False, # Don't propagate to other handlers -# }, -# 'web.apikeys': { -# 'handlers': ['console', 'logfile'], -# 'level': 'DEBUG', -# 'propagate': False, -# }, -# 'web.projects': { -# 'handlers': ['console', 'logfile'], -# 'level': 'DEBUG', -# 'propagate': False, -# }, -# 'web.scans': { -# 'handlers': ['console', 'logfile'], -# 'level': 'DEBUG', -# 'propagate': False, -# }, -# 'web.schedules': { -# 'handlers': ['console', 'logfile'], -# 'level': 'DEBUG', -# 'propagate': False, -# }, -# 'web.web': { -# 'handlers': ['console', 'logfile'], -# 'level': 'DEBUG', -# 'propagate': False, -# }, -# }, -# } diff --git a/web/web/urls.py b/web/web/urls.py deleted file mode 100644 index d75e52b1..00000000 --- a/web/web/urls.py +++ /dev/null @@ -1,21 +0,0 @@ -from django.urls import path,include -from django.contrib.auth import views as auth_views -from django.conf.urls.static import static -from django.conf import settings - -from projects import views -from scans import views -from apikeys import views -from editprofile import views - -urlpatterns = [ - path('', auth_views.LoginView.as_view(template_name='login.html'), name='login'), - path('login/', auth_views.LoginView.as_view(template_name='login.html'), name='login'), - path('logout/', auth_views.LogoutView.as_view(template_name='logged_out.html'), name='logout'), - path('projects/', include('projects.urls')), - path('scans/', include('scans.urls')), - path('schedules/', include('schedules.urls')), - path('apikeys_settings/', include('apikeys.urls')), - path('edit_profile/', include('editprofile.urls')), - -] diff --git a/web/web/wsgi.py b/web/web/wsgi.py deleted file mode 100644 index 18fbc92e..00000000 --- a/web/web/wsgi.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -WSGI config for web project. - -It exposes the WSGI callable as a module-level variable named ``application``. - -For more information on this file, see -https://docs.djangoproject.com/en/4.0/howto/deployment/wsgi/ -""" - -import os - -from django.core.wsgi import get_wsgi_application - -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'web.settings') - -application = get_wsgi_application() From ca575420058387ab975d4daa048259daaad2b349 Mon Sep 17 00:00:00 2001 From: six2dez Date: Wed, 17 Jan 2024 12:58:34 +0100 Subject: [PATCH 17/17] unused vars --- install.sh | 6 ------ 1 file changed, 6 deletions(-) diff --git a/install.sh b/install.sh index 97e49876..9a78a957 100755 --- a/install.sh +++ b/install.sh @@ -249,12 +249,6 @@ function install_tools() { banner printf "\n${bgreen} reconFTW installer/updater script ${reset}\n\n" -if [[ -d $dir && -d ~/.gf && -d ~/.config/notify/ && -d ~/.config/amass/ && -d ~/.config/nuclei/ && -f "${dir}"/.github_tokens ]]; then - rftw_installed=true -else - rftw_installed=false -fi - printf "${yellow} This may take time. So, go grab a coffee! ${reset}\n\n" if [[ $(id -u | grep -o '^0$') == "0" ]]; then