From d3e6c85482c4ea9f2835ab839fcec32254858f61 Mon Sep 17 00:00:00 2001 From: six2dez Date: Wed, 31 Jan 2024 15:37:16 +0100 Subject: [PATCH 1/7] iisshortname --- install.sh | 1 + reconftw.cfg | 1 + reconftw.sh | 30 ++++++++++++++++++++++++++++++ 3 files changed, 32 insertions(+) diff --git a/install.sh b/install.sh index 512389ef..5c8c4cfd 100755 --- a/install.sh +++ b/install.sh @@ -71,6 +71,7 @@ gotools["mantra"]="go install -v github.com/MrEmpy/mantra@latest" gotools["crt"]="go install -v github.com/cemulus/crt@latest" gotools["s3scanner"]="go install -v github.com/sa7mon/s3scanner@latest" gotools["nmapurls"]="go install -v github.com/sdcampbell/nmapurls@latest" +gotools["shortscan"]="go install -v github.com/bitquark/shortscan/cmd/shortscan@latest" # Declaring repositories and their paths declare -A repos diff --git a/reconftw.cfg b/reconftw.cfg index 018c003b..6479220d 100644 --- a/reconftw.cfg +++ b/reconftw.cfg @@ -110,6 +110,7 @@ URL_GF=true # Url patterns classification URL_EXT=true # Returns a list of files divided by extension JSCHECKS=true # JS analysis FUZZ=true # Web fuzzing +IIS_SHORTNAME=true CMS_SCANNER=true # CMS scanner WORDLIST=true # Wordlist generation ROBOTSWORDLIST=true # Check historic disallow entries on waybackMachine diff --git a/reconftw.sh b/reconftw.sh index 8a4abadd..be7773bc 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -383,6 +383,10 @@ function tools_installed() { printf "${bred} [*] porch-pirate [NO]${reset}\n" allinstalled=false } + command -v shortscan &>/dev/null || { + printf "${bred} [*] shortscan [NO]${reset}\n" + allinstalled=false + } if [[ ${allinstalled} == true ]]; then printf "${bgreen} Good! All installed! ${reset}\n\n" else @@ -1673,6 +1677,29 @@ function fuzz() { } +function iishortname() { + + if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $IIS_SHORTNAME == true ]]; then + start_func ${FUNCNAME[0]} "IIS Shortname Scanner" + [ -s "nuclei_output/info.txt" ] && cat nuclei_output/info.txt | grep "iis-version" | cut -d " " -f4 > .tmp/iis_sites.txt + if [[ -s ".tmp/iis_sites.txt" ]]; then + mkdir -p $dir/vulns/iis-shortname/ + interlace -tL .tmp/iis_sites.txt -threads ${INTERLACE_THREADS} -c "shortscan _target_ -F -s -p 1 > _output_/_cleantarget_.txt" -o $dir/vulns/iis-shortname/ 2>>"$LOGFILE" >/dev/null + find $dir/vulns/iis-shortname/ -type f -print0 | xargs --null grep -Z -L 'Vulnerable: Yes' | xargs --null rm + end_func "Results are saved in vulns/iis-shortname/" ${FUNCNAME[0]} + else + end_func "No IIS sites detected, iishortname check skipped " ${FUNCNAME[0]} + fi + else + if [[ $IIS_SHORTNAME == false ]]; then + printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" + else + printf "${yellow} ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n" + fi + fi + +} + function cms_scanner() { if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CMS_SCANNER == true ]]; then @@ -3052,6 +3079,7 @@ function recon() { waf_checks nuclei_check fuzz + iishortname urlchecks jschecks @@ -3224,6 +3252,7 @@ function multi_recon() { } loopstart=$(date +%s) fuzz + iishortname urlchecks jschecks currently=$(date +"%H:%M:%S") @@ -3309,6 +3338,7 @@ function webs_menu() { nuclei_check cms_scanner fuzz + iishortname urlchecks jschecks url_gf From 912876758c5d82c3c47aeed74824db6b302e4311 Mon Sep 17 00:00:00 2001 From: six2dez Date: Fri, 2 Feb 2024 11:55:47 +0100 Subject: [PATCH 2/7] IIS with sns --- install.sh | 1 + reconftw.sh | 13 ++++++++++--- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/install.sh b/install.sh index 5c8c4cfd..696e8c05 100755 --- a/install.sh +++ b/install.sh @@ -72,6 +72,7 @@ gotools["crt"]="go install -v github.com/cemulus/crt@latest" gotools["s3scanner"]="go install -v github.com/sa7mon/s3scanner@latest" gotools["nmapurls"]="go install -v github.com/sdcampbell/nmapurls@latest" gotools["shortscan"]="go install -v github.com/bitquark/shortscan/cmd/shortscan@latest" +gotools["sns"]="go install github.com/sw33tLie/sns@latest" # Declaring repositories and their paths declare -A repos diff --git a/reconftw.sh b/reconftw.sh index be7773bc..7ee69786 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -387,6 +387,10 @@ function tools_installed() { printf "${bred} [*] shortscan [NO]${reset}\n" allinstalled=false } + command -v sns &>/dev/null || { + printf "${bred} [*] sns [NO]${reset}\n" + allinstalled=false + } if [[ ${allinstalled} == true ]]; then printf "${bgreen} Good! All installed! ${reset}\n\n" else @@ -1683,9 +1687,12 @@ function iishortname() { start_func ${FUNCNAME[0]} "IIS Shortname Scanner" [ -s "nuclei_output/info.txt" ] && cat nuclei_output/info.txt | grep "iis-version" | cut -d " " -f4 > .tmp/iis_sites.txt if [[ -s ".tmp/iis_sites.txt" ]]; then - mkdir -p $dir/vulns/iis-shortname/ - interlace -tL .tmp/iis_sites.txt -threads ${INTERLACE_THREADS} -c "shortscan _target_ -F -s -p 1 > _output_/_cleantarget_.txt" -o $dir/vulns/iis-shortname/ 2>>"$LOGFILE" >/dev/null - find $dir/vulns/iis-shortname/ -type f -print0 | xargs --null grep -Z -L 'Vulnerable: Yes' | xargs --null rm + mkdir -p $$dir/vulns/iis-shortname-shortscan/ + mkdir -p $$dir/vulns/iis-shortname-sns/ + interlace -tL .tmp/iis_sites.txt -threads ${INTERLACE_THREADS} -c "shortscan _target_ -F -s -p 1 > _output_/_cleantarget_.txt" -o $dir/vulns/iis-shortname-shortscan/ 2>>"$LOGFILE" >/dev/null + find $dir/vulns/iis-shortname-shortscan/ -type f -print0 | xargs --null grep -Z -L 'Vulnerable: Yes' | xargs --null rm + interlace -tL .tmp/iis_sites.txt -threads ${INTERLACE_THREADS} -c "sns -u _target_ > _output_/_cleantarget_.txt" -o $dir/vulns/iis-shortname-sns/ 2>>"$LOGFILE" >/dev/null + find $dir/vulns/iis-shortname-sns/ -type f -print0 | xargs --null grep -Z 'Target is not vulnerable' | xargs --null rm end_func "Results are saved in vulns/iis-shortname/" ${FUNCNAME[0]} else end_func "No IIS sites detected, iishortname check skipped " ${FUNCNAME[0]} From 943795ab8cc8ff40cf88b6da96509b091fba606e Mon Sep 17 00:00:00 2001 From: six2dez Date: Sat, 3 Feb 2024 09:55:46 +0100 Subject: [PATCH 3/7] Leaksearch added --- install.sh | 1 + reconftw.sh | 20 +++++++++++++++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/install.sh b/install.sh index 696e8c05..8cc5a869 100755 --- a/install.sh +++ b/install.sh @@ -106,6 +106,7 @@ repos["gitleaks"]="gitleaks/gitleaks" repos["trufflehog"]="trufflesecurity/trufflehog" repos["dontgo403"]="devploit/dontgo403" repos["SwaggerSpy"]="UndeadSec/SwaggerSpy" +repos["LeakSearch"]="JoelGMSec/LeakSearch" function banner() { tput clear diff --git a/reconftw.sh b/reconftw.sh index 7ee69786..81a230c4 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -175,6 +175,10 @@ function tools_installed() { printf "${bred} [*] swaggerspy [NO]${reset}\n" allinstalled=false } + [ -f "${tools}/LeakSearch/LeakSearch.py" ] || { + printf "${bred} [*] LeakSearch [NO]${reset}\n" + allinstalled=false + } command -v github-endpoints &>/dev/null || { printf "${bred} [*] github-endpoints [NO]${reset}\n" allinstalled=false @@ -563,7 +567,21 @@ function emails() { } [ -s ".tmp/emailfinder.txt" ] && cat .tmp/emailfinder.txt | grep "@" | grep -iv "|_" | anew -q osint/emails.txt - end_func "Results are saved in $domain/osint/emails.txt" ${FUNCNAME[0]} + pushd "${tools}/LeakSearch" >/dev/null || { + echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}" + } + + python3 LeakSearch.py -k $domain -o ${dir}/.tmp/passwords.txt 2>>"$LOGFILE" || { + echo "LeakSearch command failed" + } + + popd >/dev/null || { + echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}" + } + + [ -s ".tmp/passwords.txt" ] && cat .tmp/passwords.txt | anew -q osint/passwords.txt + + end_func "Results are saved in $domain/osint/emails|passwords.txt" ${FUNCNAME[0]} else if [[ $EMAILS == false ]] || [[ $OSINT == false ]]; then printf "\n${yellow} ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n" From 9aabdfd7608c579038d63a78e8852b1faade1153 Mon Sep 17 00:00:00 2001 From: six2dez Date: Sat, 3 Feb 2024 09:58:32 +0100 Subject: [PATCH 4/7] leaksearch added to readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index cc323209..bd779647 100644 --- a/README.md +++ b/README.md @@ -476,7 +476,7 @@ reset='\033[0m' ## Osint - Domain information ([whois](https://github.com/rfc1036/whois) and [amass](https://github.com/OWASP/Amass)) -- Emails addresses and users ([emailfinder](https://github.com/Josue87/EmailFinder)) +- Emails addresses and passwords leaks ([emailfinder](https://github.com/Josue87/EmailFinder) and [LeakSearch](https://github.com/JoelGMSec/LeakSearch)) - Metadata finder ([MetaFinder](https://github.com/Josue87/MetaFinder)) - API leaks search ([porch-pirate](https://github.com/MandConsultingGroup/porch-pirate) and [SwaggerSpy](https://github.com/UndeadSec/SwaggerSpy)) - Google Dorks ([dorks_hunter](https://github.com/six2dez/dorks_hunter)) From 7d91cb27c2d6ac38012c61e2d596d5976481cd63 Mon Sep 17 00:00:00 2001 From: six2dez Date: Sat, 3 Feb 2024 10:12:25 +0100 Subject: [PATCH 5/7] Added timeout for csp and tls with httpx --- reconftw.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reconftw.sh b/reconftw.sh index 81a230c4..bcdc6496 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -897,7 +897,7 @@ function sub_scraping() { resolvers_update_quick_local cat subdomains/subdomains.txt | httpx -follow-host-redirects -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/probed_tmp_scrap.txt" ] && cat .tmp/probed_tmp_scrap.txt | httpx -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m cat .tmp/probed_tmp_scrap.txt | httpx -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt if [[ $DEEP == true ]]; then From 356581924aded0047f1f4342f4a22d9dfbb34cb8 Mon Sep 17 00:00:00 2001 From: six2dez Date: Sat, 3 Feb 2024 10:16:24 +0100 Subject: [PATCH 6/7] kill timeout httpx cdsp --- reconftw.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reconftw.sh b/reconftw.sh index bcdc6496..df138ba3 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -897,7 +897,7 @@ function sub_scraping() { resolvers_update_quick_local cat subdomains/subdomains.txt | httpx -follow-host-redirects -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m cat .tmp/probed_tmp_scrap.txt | httpx -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m | httpx -l .tmp/probed_tmp_scrap.txt -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt if [[ $DEEP == true ]]; then From 9730271cfd6c3c55e816fa0b991a35ac8591d542 Mon Sep 17 00:00:00 2001 From: six2dez Date: Sat, 3 Feb 2024 10:17:00 +0100 Subject: [PATCH 7/7] timeout on axiom httpx --- reconftw.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index df138ba3..9c9d26dc 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -897,7 +897,7 @@ function sub_scraping() { resolvers_update_quick_local cat subdomains/subdomains.txt | httpx -follow-host-redirects -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m | httpx -l .tmp/probed_tmp_scrap.txt -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m httpx -l .tmp/probed_tmp_scrap.txt -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt if [[ $DEEP == true ]]; then @@ -909,7 +909,7 @@ function sub_scraping() { resolvers_update_quick_axiom axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt - [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + [ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null [ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt if [[ $DEEP == true ]]; then [ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 3 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null