From c7a691a24915bc25aef596444948bf3ec2e395a9 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 6 Sep 2024 09:50:38 +0200
Subject: [PATCH 01/34] fix comment

---
 reconftw.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/reconftw.sh b/reconftw.sh
index 176bf591..054f7d74 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1018,7 +1018,7 @@ function sub_scraping() {
 					resolvers_update_quick_axiom
 					axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
 					[ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
-					[ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null || (true && echo "git fetch timeout reached")
+					[ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null || (true && echo "Httpx  timeout reached")
 					[ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
 					if [[ $DEEP == true ]]; then
 						[ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 3 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null

From 48a663812c58e2e83ef522fbedca53fcb51b89f5 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 20 Sep 2024 10:57:09 +0200
Subject: [PATCH 02/34] ipinfo improved

---
 reconftw.sh | 42 ++++--------------------------------------
 1 file changed, 4 insertions(+), 38 deletions(-)

diff --git a/reconftw.sh b/reconftw.sh
index 054f7d74..e443f607 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1478,7 +1478,7 @@ function geo_info() {
 
 	mkdir -p hosts
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $GEO_INFO == true ]]; then
-		start_func ${FUNCNAME[0]} "Running: ipinfo and geoinfo"
+		start_func ${FUNCNAME[0]} "Running: ipinfo"
 		ips_file="${dir}/hosts/ips.txt"
 		if [ ! -f $ips_file ]; then
 			if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
@@ -1489,46 +1489,12 @@ function geo_info() {
 				echo $domain | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt
 			fi
 		else
+			touch ${dir}/hosts/ipinfo.txt
 			for ip in $(cat "$ips_file"); do
-				json_output=$(curl -s https://ipapi.co/$ip/json)
-				echo $json_output >>${dir}/hosts/geoip.json
-				ip=$(echo $json_output | jq '.ip' | tr -d '''"''')
-				network=$(echo $json_output | jq '.network' | tr -d '''"''')
-				city=$(echo $json_output | jq '.city' | tr -d '''"''')
-				region=$(echo $json_output | jq '.region' | tr -d '''"''')
-				country=$(echo $json_output | jq '.country' | tr -d '''"''')
-				country_name=$(echo $json_output | jq '.country_name' | tr -d '''"''')
-				country_code=$(echo $json_output | jq '.country_code' | tr -d '''"''')
-				country_code_iso3=$(echo $json_output | jq '.country_code_iso3' | tr -d '''"''')
-				country_tld=$(echo $json_output | jq '.country_tld' | tr -d '''"''')
-				continent_code=$(echo $json_output | jq '.continent_code' | tr -d '''"''')
-				latitude=$(echo $json_output | jq '.latitude' | tr -d '''"''')
-				longitude=$(echo $json_output | jq '.longitude' | tr -d '''"''')
-				timezone=$(echo $json_output | jq '.timezone' | tr -d '''"''')
-				utc_offset=$(echo $json_output | jq '.utc_offset' | tr -d '''"''')
-				asn=$(echo $json_output | jq '.asn' | tr -d '''"''')
-				org=$(echo $json_output | jq '.org' | tr -d '''"''')
-
-				echo "IP: $ip" >>${dir}/hosts/geoip.txt
-				echo "Network: $network" >>${dir}/hosts/geoip.txt
-				echo "City: $city" >>${dir}/hosts/geoip.txt
-				echo "Region: $region" >>${dir}/hosts/geoip.txt
-				echo "Country: $country" >>${dir}/hosts/geoip.txt
-				echo "Country Name: $country_name" >>${dir}/hosts/geoip.txt
-				echo "Country Code: $country_code" >>${dir}/hosts/geoip.txt
-				echo "Country Code ISO3: $country_code_iso3" >>${dir}/hosts/geoip.txt
-				echo "Country tld: $country_tld" >>${dir}/hosts/geoip.txt
-				echo "Continent Code: $continent_code" >>${dir}/hosts/geoip.txt
-				echo "Latitude: $latitude" >>${dir}/hosts/geoip.txt
-				echo "Longitude: $longitude" >>${dir}/hosts/geoip.txt
-				echo "Timezone: $timezone" >>${dir}/hosts/geoip.txt
-				echo "UTC Offset: $utc_offset" >>${dir}/hosts/geoip.txt
-				echo "ASN: $asn" >>${dir}/hosts/geoip.txt
-				echo "ORG: $org" >>${dir}/hosts/geoip.txt
-				echo -e "------------------------------\n" >>${dir}/hosts/geoip.txt
+				curl -s https://ipinfo.io/widget/demo/$ip >> ${dir}/hosts/ipinfo.txt
 			done
 		fi
-		end_func "Results are saved in hosts/geoip.txt and hosts/geoip.json" ${FUNCNAME[0]}
+		end_func "Results are saved in hosts/ipinfo.txt" ${FUNCNAME[0]}
 	else
 		if [[ $GEO_INFO == false ]]; then
 			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"

From 051ce08fd7a32e4dae03163542ef3d03695b5af8 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 20 Sep 2024 10:58:40 +0200
Subject: [PATCH 03/34] ipinfo update readme

---
 README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/README.md b/README.md
index 1a2fa073..08a1d253 100644
--- a/README.md
+++ b/README.md
@@ -501,7 +501,7 @@ reset='\033[0m'
 
 ## Hosts
 
-- IP info ([whoisxmlapi API](https://www.whoisxmlapi.com/))
+- IP info ([ipinfo](https://www.ipinfo.io/))
 - CDN checker ([ipcdn](https://github.com/six2dez/ipcdn))
 - WAF checker ([wafw00f](https://github.com/EnableSecurity/wafw00f))
 - Port Scanner (Active with [nmap](https://github.com/nmap/nmap) and passive with [smap](https://github.com/s0md3v/Smap))

From 688e5d6de007d4ba30f081b92c69d7eeb40cbfd3 Mon Sep 17 00:00:00 2001
From: Bilel Eljaamii <bileleljaamii@gmail.com>
Date: Sat, 5 Oct 2024 14:51:26 +0200
Subject: [PATCH 04/34] add waymore to auto-installer

---
 install.sh | 1 +
 1 file changed, 1 insertion(+)

diff --git a/install.sh b/install.sh
index ef40436d..15b8f615 100755
--- a/install.sh
+++ b/install.sh
@@ -110,6 +110,7 @@ repos["LeakSearch"]="JoelGMSec/LeakSearch"
 repos["ffufPostprocessing"]="Damian89/ffufPostprocessing"
 repos["misconfig-mapper"]="intigriti/misconfig-mapper"
 repos["Spoofy"]="MattKeeley/Spoofy"
+repos["Waymore"]="xnl-h4ck3r/waymore"
 
 function banner() {
 	tput clear

From ed6917ce619740982f148213b2986180bf9b93e4 Mon Sep 17 00:00:00 2001
From: Bilel Eljaamii <bileleljaamii@gmail.com>
Date: Sat, 5 Oct 2024 14:57:27 +0200
Subject: [PATCH 05/34] Add xnLinkFinder to auto installer

---
 install.sh | 1 +
 1 file changed, 1 insertion(+)

diff --git a/install.sh b/install.sh
index 15b8f615..cb4dd24e 100755
--- a/install.sh
+++ b/install.sh
@@ -111,6 +111,7 @@ repos["ffufPostprocessing"]="Damian89/ffufPostprocessing"
 repos["misconfig-mapper"]="intigriti/misconfig-mapper"
 repos["Spoofy"]="MattKeeley/Spoofy"
 repos["Waymore"]="xnl-h4ck3r/waymore"
+repos["xnLinkFinder"]="xnl-h4ck3r/xnLinkFinder"
 
 function banner() {
 	tput clear

From c564fdc966907feb611f0a42b2cd7c172e44f759 Mon Sep 17 00:00:00 2001
From: Bilel Eljaamii <bileleljaamii@gmail.com>
Date: Sat, 5 Oct 2024 15:03:56 +0200
Subject: [PATCH 06/34] add porch-pirate to auto-install

---
 install.sh | 1 +
 1 file changed, 1 insertion(+)

diff --git a/install.sh b/install.sh
index cb4dd24e..67dff8d2 100755
--- a/install.sh
+++ b/install.sh
@@ -112,6 +112,7 @@ repos["misconfig-mapper"]="intigriti/misconfig-mapper"
 repos["Spoofy"]="MattKeeley/Spoofy"
 repos["Waymore"]="xnl-h4ck3r/waymore"
 repos["xnLinkFinder"]="xnl-h4ck3r/xnLinkFinder"
+repos["porch-pirate"]="MandConsultingGroup/porch-pirate"
 
 function banner() {
 	tput clear

From fc608c487122ef4a3eb84af7af43c9fce0fd4f81 Mon Sep 17 00:00:00 2001
From: Bilel Eljaamii <bileleljaamii@gmail.com>
Date: Sat, 5 Oct 2024 15:18:10 +0200
Subject: [PATCH 07/34] add metaFinder to auto installer

---
 install.sh | 1 +
 1 file changed, 1 insertion(+)

diff --git a/install.sh b/install.sh
index 67dff8d2..24714110 100755
--- a/install.sh
+++ b/install.sh
@@ -113,6 +113,7 @@ repos["Spoofy"]="MattKeeley/Spoofy"
 repos["Waymore"]="xnl-h4ck3r/waymore"
 repos["xnLinkFinder"]="xnl-h4ck3r/xnLinkFinder"
 repos["porch-pirate"]="MandConsultingGroup/porch-pirate"
+repos["MetaFinder"]="Josue87/MetaFinder"
 
 function banner() {
 	tput clear

From 87b28d02f259b31b39d53a46ee4abe96bf9656dd Mon Sep 17 00:00:00 2001
From: Bilel Eljaamii <bileleljaamii@gmail.com>
Date: Sat, 5 Oct 2024 15:20:43 +0200
Subject: [PATCH 08/34] adding EmailFinder to auto isntaller

---
 install.sh | 1 +
 1 file changed, 1 insertion(+)

diff --git a/install.sh b/install.sh
index 24714110..7b88e58b 100755
--- a/install.sh
+++ b/install.sh
@@ -114,6 +114,7 @@ repos["Waymore"]="xnl-h4ck3r/waymore"
 repos["xnLinkFinder"]="xnl-h4ck3r/xnLinkFinder"
 repos["porch-pirate"]="MandConsultingGroup/porch-pirate"
 repos["MetaFinder"]="Josue87/MetaFinder"
+repos["MetaFinder"]="Josue87/EmailFinder"
 
 function banner() {
 	tput clear

From e591cb07462858c7a8e08eb1587dc419d506e630 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Mon, 14 Oct 2024 12:53:01 +0200
Subject: [PATCH 09/34] p1radup output filtered

---
 reconftw.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/reconftw.sh b/reconftw.sh
index e443f607..eb991e90 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -2031,7 +2031,7 @@ function urlchecks() {
 			NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | sed '/^$/d' | wc -l)
 			notification "${NUMOFLINES} new urls with params" info
 			end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]}
-			p1radup -i webs/url_extract.txt -o webs/url_extract_nodupes.txt -s
+			p1radup -i webs/url_extract.txt -o webs/url_extract_nodupes.txt -s  2>>"$LOGFILE" >/dev/null
 			if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then
 				notification "Sending urls to proxy" info
 				ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null

From c46277a486b12fd0dbeeab16844d11ae996f2228 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 25 Oct 2024 12:05:13 +0200
Subject: [PATCH 10/34] Most code rewritten, readability, error checks

---
 install.sh   | 1054 ++++++----
 reconftw.cfg |    3 +-
 reconftw.sh  | 5608 ++++++++++++++++++++++++++++++++++++--------------
 3 files changed, 4653 insertions(+), 2012 deletions(-)

diff --git a/install.sh b/install.sh
index 7b88e58b..c0473ab0 100755
--- a/install.sh
+++ b/install.sh
@@ -1,532 +1,696 @@
 #!/usr/bin/env bash
 
-. ./reconftw.cfg
+# Enable strict error handling
+#IFS=$'\n\t'
 
-dir=${tools}
+# Load main configuration
+CONFIG_FILE="./reconftw.cfg"
+
+if [[ ! -f $CONFIG_FILE ]]; then
+	echo -e "${bred}[!] Config file reconftw.cfg not found.${reset}"
+	exit 1
+fi
+
+source "$CONFIG_FILE"
+
+# Initialize variables
+dir="${tools}"
 double_check=false
 
 # ARM Detection
 ARCH=$(uname -m)
-case $ARCH in
-amd64 | x86_64) IS_ARM="False" ;;
+case "$ARCH" in
+amd64 | x86_64)
+	IS_ARM="False"
+	;;
 arm64 | armv6l | aarch64)
 	IS_ARM="True"
-	RPI_4=$([[ $ARCH == "arm64" ]] && echo "True" || echo "False")
-	RPI_3=$([[ $ARCH == "arm64" ]] && echo "False" || echo "True")
+	if [[ $ARCH == "arm64" ]]; then
+		RPI_4="True"
+		RPI_3="False"
+	else
+		RPI_4="False"
+		RPI_3="True"
+	fi
+	;;
+*)
+	IS_ARM="False"
 	;;
 esac
 
-#Mac Osx Detecting
+# macOS Detection
 IS_MAC=$([[ $OSTYPE == "darwin"* ]] && echo "True" || echo "False")
 
-BASH_VERSION=$(bash --version | awk 'NR==1{print $4}' | cut -d'.' -f1)
-if [[ ${BASH_VERSION} -lt 4 ]]; then
-	printf "${bred} Your Bash version is lower than 4, please update${reset}\n"
-	printf "%s Your Bash version is lower than 4, please update%s\n" "${bred}" "${reset}" >&2
-	if [[ "True" == "$IS_MAC" ]]; then
-		printf "${yellow} For MacOS run 'brew install bash' and rerun installer in a new terminal${reset}\n\n"
-		exit 1
+# Check Bash version
+BASH_VERSION_NUM=$(bash --version | awk 'NR==1{print $4}' | cut -d'.' -f1)
+if [[ $BASH_VERSION_NUM -lt 4 ]]; then
+	echo -e "${bred}Your Bash version is lower than 4, please update.${reset}"
+	if [[ $IS_MAC == "True" ]]; then
+		echo -e "${yellow}For macOS, run 'brew install bash' and rerun the installer in a new terminal.${reset}"
 	fi
+	exit 1
 fi
 
-# Declaring Go tools and their installation commands
-declare -A gotools
-gotools["gf"]="go install -v github.com/tomnomnom/gf@latest"
-gotools["brutespray"]="go install -v github.com/x90skysn3k/brutespray@latest"
-gotools["qsreplace"]="go install -v github.com/tomnomnom/qsreplace@latest"
-gotools["ffuf"]="go install -v github.com/ffuf/ffuf/v2@latest"
-gotools["github-subdomains"]="go install -v github.com/gwen001/github-subdomains@latest"
-gotools["gitlab-subdomains"]="go install -v github.com/gwen001/gitlab-subdomains@latest"
-gotools["nuclei"]="go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest"
-gotools["anew"]="go install -v github.com/tomnomnom/anew@latest"
-gotools["notify"]="go install -v github.com/projectdiscovery/notify/cmd/notify@latest"
-gotools["unfurl"]="go install -v github.com/tomnomnom/unfurl@v0.3.0"
-gotools["httpx"]="go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest"
-gotools["github-endpoints"]="go install -v github.com/gwen001/github-endpoints@latest"
-gotools["dnsx"]="go install -v github.com/projectdiscovery/dnsx/cmd/dnsx@latest"
-gotools["subjs"]="go install -v github.com/lc/subjs@latest"
-gotools["Gxss"]="go install -v github.com/KathanP19/Gxss@latest"
-gotools["katana"]="go install -v github.com/projectdiscovery/katana/cmd/katana@latest"
-gotools["crlfuzz"]="go install -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@latest"
-gotools["dalfox"]="go install -v github.com/hahwul/dalfox/v2@latest"
-gotools["puredns"]="go install -v github.com/d3mondev/puredns/v2@latest"
-gotools["interactsh-client"]="go install -v github.com/projectdiscovery/interactsh/cmd/interactsh-client@latest"
-gotools["analyticsrelationships"]="go install -v github.com/Josue87/analyticsrelationships@latest"
-gotools["gotator"]="go install -v github.com/Josue87/gotator@latest"
-gotools["roboxtractor"]="go install -v github.com/Josue87/roboxtractor@latest"
-gotools["mapcidr"]="go install -v github.com/projectdiscovery/mapcidr/cmd/mapcidr@latest"
-gotools["cdncheck"]="go install -v github.com/projectdiscovery/cdncheck/cmd/cdncheck@latest"
-gotools["dnstake"]="go install -v github.com/pwnesia/dnstake/cmd/dnstake@latest"
-gotools["tlsx"]="go install -v github.com/projectdiscovery/tlsx/cmd/tlsx@latest"
-gotools["gitdorks_go"]="go install -v github.com/damit5/gitdorks_go@latest"
-gotools["smap"]="go install -v github.com/s0md3v/smap/cmd/smap@latest"
-gotools["dsieve"]="go install -v github.com/trickest/dsieve@master"
-gotools["inscope"]="go install -v github.com/tomnomnom/hacks/inscope@latest"
-gotools["enumerepo"]="go install -v github.com/trickest/enumerepo@latest"
-gotools["Web-Cache-Vulnerability-Scanner"]="go install -v github.com/Hackmanit/Web-Cache-Vulnerability-Scanner@latest"
-gotools["subfinder"]="go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest"
-gotools["hakip2host"]="go install -v github.com/hakluke/hakip2host@latest"
-gotools["gau"]="go install -v github.com/lc/gau/v2/cmd/gau@latest"
-gotools["mantra"]="go install -v github.com/MrEmpy/mantra@latest"
-gotools["crt"]="go install -v github.com/cemulus/crt@latest"
-gotools["s3scanner"]="go install -v github.com/sa7mon/s3scanner@latest"
-gotools["nmapurls"]="go install -v github.com/sdcampbell/nmapurls@latest"
-gotools["shortscan"]="go install -v github.com/bitquark/shortscan/cmd/shortscan@latest"
-gotools["sns"]="go install github.com/sw33tLie/sns@latest"
-gotools["ppmap"]="go install -v github.com/kleiton0x00/ppmap@latest"
-gotools["sourcemapper"]="go install -v github.com/denandz/sourcemapper@latest"
-gotools["jsluice"]="go install -v github.com/BishopFox/jsluice/cmd/jsluice@latest"
-
-# Declaring repositories and their paths
-declare -A repos
-repos["dorks_hunter"]="six2dez/dorks_hunter"
-repos["dnsvalidator"]="vortexau/dnsvalidator"
-repos["interlace"]="codingo/Interlace"
-repos["wafw00f"]="EnableSecurity/wafw00f"
-repos["gf"]="tomnomnom/gf"
-repos["Gf-Patterns"]="1ndianl33t/Gf-Patterns"
-repos["Corsy"]="s0md3v/Corsy"
-repos["CMSeeK"]="Tuhinshubhra/CMSeeK"
-repos["fav-up"]="pielco11/fav-up"
-repos["massdns"]="blechschmidt/massdns"
-repos["Oralyzer"]="r0075h3ll/Oralyzer"
-repos["testssl"]="drwetter/testssl.sh"
-repos["commix"]="commixproject/commix"
-repos["JSA"]="w9w/JSA"
-repos["CloudHunter"]="belane/CloudHunter"
-repos["ultimate-nmap-parser"]="shifty0g/ultimate-nmap-parser"
-repos["pydictor"]="LandGrey/pydictor"
-repos["gitdorks_go"]="damit5/gitdorks_go"
-repos["urless"]="xnl-h4ck3r/urless"
-repos["smuggler"]="defparam/smuggler"
-repos["Web-Cache-Vulnerability-Scanner"]="Hackmanit/Web-Cache-Vulnerability-Scanner"
-repos["regulator"]="cramppet/regulator"
-repos["ghauri"]="r0oth3x49/ghauri"
-repos["gitleaks"]="gitleaks/gitleaks"
-repos["trufflehog"]="trufflesecurity/trufflehog"
-repos["nomore403"]="devploit/nomore403"
-repos["SwaggerSpy"]="UndeadSec/SwaggerSpy"
-repos["LeakSearch"]="JoelGMSec/LeakSearch"
-repos["ffufPostprocessing"]="Damian89/ffufPostprocessing"
-repos["misconfig-mapper"]="intigriti/misconfig-mapper"
-repos["Spoofy"]="MattKeeley/Spoofy"
-repos["Waymore"]="xnl-h4ck3r/waymore"
-repos["xnLinkFinder"]="xnl-h4ck3r/xnLinkFinder"
-repos["porch-pirate"]="MandConsultingGroup/porch-pirate"
-repos["MetaFinder"]="Josue87/MetaFinder"
-repos["MetaFinder"]="Josue87/EmailFinder"
-
+# Declare Go tools and their installation commands
+declare -A gotools=(
+	["gf"]="go install -v github.com/tomnomnom/gf@latest"
+	["brutespray"]="go install -v github.com/x90skysn3k/brutespray@latest"
+	["qsreplace"]="go install -v github.com/tomnomnom/qsreplace@latest"
+	["ffuf"]="go install -v github.com/ffuf/ffuf/v2@latest"
+	["github-subdomains"]="go install -v github.com/gwen001/github-subdomains@latest"
+	["gitlab-subdomains"]="go install -v github.com/gwen001/gitlab-subdomains@latest"
+	["nuclei"]="go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest"
+	["anew"]="go install -v github.com/tomnomnom/anew@latest"
+	["notify"]="go install -v github.com/projectdiscovery/notify/cmd/notify@latest"
+	["unfurl"]="go install -v github.com/tomnomnom/unfurl@v0.3.0"
+	["httpx"]="go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest"
+	["github-endpoints"]="go install -v github.com/gwen001/github-endpoints@latest"
+	["dnsx"]="go install -v github.com/projectdiscovery/dnsx/cmd/dnsx@latest"
+	["subjs"]="go install -v github.com/lc/subjs@latest"
+	["Gxss"]="go install -v github.com/KathanP19/Gxss@latest"
+	["katana"]="go install -v github.com/projectdiscovery/katana/cmd/katana@latest"
+	["crlfuzz"]="go install -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@latest"
+	["dalfox"]="go install -v github.com/hahwul/dalfox/v2@latest"
+	["puredns"]="go install -v github.com/d3mondev/puredns/v2@latest"
+	["interactsh-client"]="go install -v github.com/projectdiscovery/interactsh/cmd/interactsh-client@latest"
+	["analyticsrelationships"]="go install -v github.com/Josue87/analyticsrelationships@latest"
+	["gotator"]="go install -v github.com/Josue87/gotator@latest"
+	["roboxtractor"]="go install -v github.com/Josue87/roboxtractor@latest"
+	["mapcidr"]="go install -v github.com/projectdiscovery/mapcidr/cmd/mapcidr@latest"
+	["cdncheck"]="go install -v github.com/projectdiscovery/cdncheck/cmd/cdncheck@latest"
+	["dnstake"]="go install -v github.com/pwnesia/dnstake/cmd/dnstake@latest"
+	["tlsx"]="go install -v github.com/projectdiscovery/tlsx/cmd/tlsx@latest"
+	["gitdorks_go"]="go install -v github.com/damit5/gitdorks_go@latest"
+	["smap"]="go install -v github.com/s0md3v/smap/cmd/smap@latest"
+	["dsieve"]="go install -v github.com/trickest/dsieve@master"
+	["inscope"]="go install -v github.com/tomnomnom/hacks/inscope@latest"
+	["enumerepo"]="go install -v github.com/trickest/enumerepo@latest"
+	["Web-Cache-Vulnerability-Scanner"]="go install -v github.com/Hackmanit/Web-Cache-Vulnerability-Scanner@latest"
+	["subfinder"]="go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest"
+	["hakip2host"]="go install -v github.com/hakluke/hakip2host@latest"
+	["gau"]="go install -v github.com/lc/gau/v2/cmd/gau@latest"
+	["mantra"]="go install -v github.com/MrEmpy/mantra@latest"
+	["crt"]="go install -v github.com/cemulus/crt@latest"
+	["s3scanner"]="go install -v github.com/sa7mon/s3scanner@latest"
+	["nmapurls"]="go install -v github.com/sdcampbell/nmapurls@latest"
+	["shortscan"]="go install -v github.com/bitquark/shortscan/cmd/shortscan@latest"
+	["sns"]="go install github.com/sw33tLie/sns@latest"
+	["ppmap"]="go install -v github.com/kleiton0x00/ppmap@latest"
+	["sourcemapper"]="go install -v github.com/denandz/sourcemapper@latest"
+	["jsluice"]="go install -v github.com/BishopFox/jsluice/cmd/jsluice@latest"
+)
+
+# Declare repositories and their paths
+declare -A repos=(
+	["dorks_hunter"]="six2dez/dorks_hunter"
+	["dnsvalidator"]="vortexau/dnsvalidator"
+	["interlace"]="codingo/Interlace"
+	["wafw00f"]="EnableSecurity/wafw00f"
+	["gf"]="tomnomnom/gf"
+	["Gf-Patterns"]="1ndianl33t/Gf-Patterns"
+	["Corsy"]="s0md3v/Corsy"
+	["CMSeeK"]="Tuhinshubhra/CMSeeK"
+	["fav-up"]="pielco11/fav-up"
+	["massdns"]="blechschmidt/massdns"
+	["Oralyzer"]="r0075h3ll/Oralyzer"
+	["testssl"]="drwetter/testssl.sh"
+	["commix"]="commixproject/commix"
+	["JSA"]="w9w/JSA"
+	["CloudHunter"]="belane/CloudHunter"
+	["ultimate-nmap-parser"]="shifty0g/ultimate-nmap-parser"
+	["pydictor"]="LandGrey/pydictor"
+	["gitdorks_go"]="damit5/gitdorks_go"
+	["urless"]="xnl-h4ck3r/urless"
+	["smuggler"]="defparam/smuggler"
+	["Web-Cache-Vulnerability-Scanner"]="Hackmanit/Web-Cache-Vulnerability-Scanner"
+	["regulator"]="cramppet/regulator"
+	["ghauri"]="r0oth3x49/ghauri"
+	["gitleaks"]="gitleaks/gitleaks"
+	["trufflehog"]="trufflesecurity/trufflehog"
+	["nomore403"]="devploit/nomore403"
+	["SwaggerSpy"]="UndeadSec/SwaggerSpy"
+	["LeakSearch"]="JoelGMSec/LeakSearch"
+	["ffufPostprocessing"]="Damian89/ffufPostprocessing"
+	["misconfig-mapper"]="intigriti/misconfig-mapper"
+	["Spoofy"]="MattKeeley/Spoofy"
+	["Waymore"]="xnl-h4ck3r/waymore"
+	["xnLinkFinder"]="xnl-h4ck3r/xnLinkFinder"
+	["porch-pirate"]="MandConsultingGroup/porch-pirate"
+	["MetaFinder"]="Josue87/MetaFinder"
+	["EmailFinder"]="Josue87/EmailFinder"
+)
+
+# Function to display the banner
 function banner() {
 	tput clear
-	printf "\n${bgreen}"
-	printf "  ██▀███  ▓█████  ▄████▄   ▒█████   ███▄    █   █████▒▄▄▄█████▓ █     █░\n"
-	printf " ▓██ ▒ ██▒▓█   ▀ ▒██▀ ▀█  ▒██▒  ██▒ ██ ▀█   █ ▓██   ▒ ▓  ██▒ ▓▒▓█░ █ ░█░\n"
-	printf " ▓██ ░▄█ ▒▒███   ▒▓█    ▄ ▒██░  ██▒▓██  ▀█ ██▒▒████ ░ ▒ ▓██░ ▒░▒█░ █ ░█ \n"
-	printf " ▒██▀▀█▄  ▒▓█  ▄ ▒▓▓▄ ▄██▒▒██   ██░▓██▒  ▐▌██▒░▓█▒  ░ ░ ▓██▓ ░ ░█░ █ ░█ \n"
-	printf " ░██▓ ▒██▒░▒████▒▒ ▓███▀ ░░ ████▓▒░▒██░   ▓██░░▒█░      ▒██▒ ░ ░░██▒██▓ \n"
-	printf " ░ ▒▓ ░▒▓░░░ ▒░ ░░ ░▒ ▒  ░░ ▒░▒░▒░ ░ ▒░   ▒ ▒  ▒ ░      ▒ ░░   ░ ▓░▒ ▒  \n"
-	printf "   ░▒ ░ ▒░ ░ ░  ░  ░  ▒     ░ ▒ ▒░ ░ ░░   ░ ▒░ ░          ░      ▒ ░ ░  \n"
-	printf "   ░░   ░    ░   ░        ░ ░ ░ ▒     ░   ░ ░  ░ ░      ░        ░   ░  \n"
-	printf "    ░        ░  ░░ ░          ░ ░           ░                      ░    \n"
-	printf "                 ░                                                      \n"
-	printf " ${reconftw_version}                                         by @six2dez\n"
+	cat <<"EOF"
+
+  ██▀███  ▓█████  ▄████▄   ▒█████   ███▄    █   █████▒▄▄▄█████▓ █     █░
+ ▓██ ▒ ██▒▓█   ▀ ▒██▀ ▀█  ▒██▒  ██▒ ██ ▀█   █ ▓██   ▒ ▓  ██▒ ▓▒▓█░ █ ░█░
+ ▓██ ░▄█ ▒▒███   ▒▓█    ▄ ▒██░  ██▒▓██  ▀█ ██▒▒████ ░ ▒ ▓██░ ▒░▒█░ █ ░█
+ ▒██▀▀█▄  ▒▓█  ▄ ▒▓▓▄ ▄██▒▒██   ██░▓██▒  ▐▌██▒░▓█▒  ░ ░ ▓██▓ ░ ░█░ █ ░█
+ ░██▓ ▒██▒░▒████▒▒ ▓███▀ ░░ ████▓▒░▒██░   ▓██░░▒█░      ▒██▒ ░ ░░██▒██▓
+ ░ ▒▓ ░▒▓░░░ ▒░ ░░ ░▒ ▒  ░░ ▒░▒░▒░ ░ ▒░   ▒ ▒  ▒ ░      ▒ ░░   ░ ▓░▒ ▒
+   ░▒ ░ ▒░ ░ ░  ░  ░  ▒     ░ ▒ ▒░ ░ ░░   ░ ▒░ ░          ░      ▒ ░ ░
+   ░░   ░    ░   ░        ░ ░ ░ ▒     ░   ░ ░  ░ ░      ░        ░   ░
+    ░        ░  ░░ ░          ░ ░           ░                      ░
+
+                 ${reconftw_version}                                         by @six2dez
+
+EOF
 }
 
-# This function installs various tools and repositories as per the configuration.
+# Function to install Go tools
 function install_tools() {
+	echo -e "${bblue}Running: Installing Golang tools (${#gotools[@]})${reset}\n"
 
-	eval pip3 install -I -r requirements.txt $DEBUG_STD
-
-	printf "${bblue} Running: Installing Golang tools (${#gotools[@]})${reset}\n\n"
-	go env -w GO111MODULE=auto
-	go_step=0
+	local go_step=0
+	local failed_tools=()
 	for gotool in "${!gotools[@]}"; do
-		go_step=$((go_step + 1))
+		((go_step++))
 		if [[ $upgrade_tools == "false" ]]; then
-			res=$(command -v "$gotool") && {
-				echo -e "[${yellow}SKIPPING${reset}] $gotool already installed in...${blue}${res}${reset}"
+			if command -v "$gotool" &>/dev/null; then
+				echo -e "[${yellow}SKIPPING${reset}] $gotool already installed at $(command -v "$gotool")"
 				continue
-			}
+			fi
 		fi
-		eval ${gotools[$gotool]} $DEBUG_STD
+
+		# Install the Go tool
+		eval "${gotools[$gotool]}" &>/dev/null
 		exit_status=$?
 		if [[ $exit_status -eq 0 ]]; then
-			printf "${yellow} $gotool installed (${go_step}/${#gotools[@]})${reset}\n"
+			echo -e "${yellow}$gotool installed (${go_step}/${#gotools[@]})${reset}"
 		else
-			printf "${red} Unable to install $gotool, try manually (${go_step}/${#gotools[@]})${reset}\n"
+			echo -e "${red}Unable to install $gotool, try manually (${go_step}/${#gotools[@]})${reset}"
+			failed_tools+=("$gotool")
 			double_check=true
 		fi
 	done
 
-	printf "${bblue}\n Running: Installing repositories (${#repos[@]})${reset}\n\n"
+	echo -e "\n${bblue}Running: Installing repositories (${#repos[@]})${reset}\n"
 
-	# Repos with special configs
-	eval git clone https://github.com/projectdiscovery/nuclei-templates ${NUCLEI_TEMPLATES_PATH} $DEBUG_STD
-	eval git clone https://github.com/geeknik/the-nuclei-templates.git ${NUCLEI_TEMPLATES_PATH}/extra_templates $DEBUG_STD
-	eval git clone https://github.com/projectdiscovery/fuzzing-templates ${tools}/fuzzing-templates $DEBUG_STD
-	eval nuclei -update-templates update-template-dir ${NUCLEI_TEMPLATES_PATH} $DEBUG_STD
-	cd "${dir}" || {
-		echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"
-		exit 1
-	}
-	eval git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git "${dir}"/sqlmap $DEBUG_STD
-	eval git clone --depth 1 https://github.com/drwetter/testssl.sh.git "${dir}"/testssl.sh $DEBUG_STD
-	eval $SUDO git clone https://gitlab.com/exploit-database/exploitdb /opt/exploitdb $DEBUG_STD
+	local repos_step=0
+	local failed_repos=()
 
-	# Standard repos installation
-	repos_step=0
 	for repo in "${!repos[@]}"; do
-		repos_step=$((repos_step + 1))
+		((repos_step++))
 		if [[ $upgrade_tools == "false" ]]; then
-			unset is_installed
-			unset is_need_dl
-			[[ $repo == "Gf-Patterns" ]] && is_need_dl=1
-			[[ $repo == "gf" ]] && is_need_dl=1
-			res=$(command -v "$repo") && is_installed=1
-			[[ -z $is_need_dl ]] && [[ -n $is_installed ]] && {
-				# HERE: not installed yet.
-				echo -e "[${yellow}SKIPPING${reset}] $repo already installed in...${blue}${res}${reset}"
+			if [[ -d "${dir}/${repo}" ]]; then
+				echo -e "[${yellow}SKIPPING${reset}] Repository $repo already cloned in ${dir}/${repo}"
 				continue
-			}
+			fi
 		fi
-		eval git clone --filter="blob:none" https://github.com/${repos[$repo]} "${dir}"/$repo $DEBUG_STD
-        eval cd "${dir}"/$repo $DEBUG_STD
-		eval git pull $DEBUG_STD
+		# Clone the repository
+		if [[ ! -d "${dir}/${repo}" || -z "$(ls -A "${dir}/${repo}")" ]]; then
+			git clone --filter="blob:none" "https://github.com/${repos[$repo]}" "${dir}/${repo}" #&>/dev/null
+			exit_status=$?
+			if [[ $exit_status -ne 0 ]]; then
+				echo -e "${red}Unable to clone repository $repo.${reset}"
+				failed_repos+=("$repo")
+				double_check=true
+				continue
+			fi
+		fi
+
+		# Navigate to the repository directory
+		cd "${dir}/${repo}" || {
+			echo -e "${red}Failed to navigate to directory '${dir}/${repo}'${reset}"
+			failed_repos+=("$repo")
+			double_check=true
+			continue
+		}
+
+		# Pull the latest changes
+		git pull &>/dev/null
 		exit_status=$?
-		if [[ $exit_status -eq 0 ]]; then
-			printf "${yellow} $repo installed (${repos_step}/${#repos[@]})${reset}\n"
-		else
-			printf "${red} Unable to install $repo, try manually (${repos_step}/${#repos[@]})${reset}\n"
+		if [[ $exit_status -ne 0 ]]; then
+			echo -e "${red}Failed to pull updates for repository $repo.${reset}"
+			failed_repos+=("$repo")
 			double_check=true
+			continue
 		fi
-		if ([[ -z $is_installed ]] && [[ $upgrade_tools == "false" ]]) || [[ $upgrade_tools == "true" ]]; then
-            if [[ -s "requirements.txt" ]]; then
-                eval $SUDO pip3 install -r requirements.txt $DEBUG_STD
-            fi
-            if [[ -s "setup.py" ]]; then
-                eval $SUDO pip3 install . $DEBUG_STD
-            fi
-            if [[ "massdns" == "$repo" ]]; then
-                eval make $DEBUG_STD && strip -s bin/massdns && eval $SUDO cp bin/massdns /usr/local/bin/ $DEBUG_ERROR
-            fi
-            if [[ "gitleaks" == "$repo" ]]; then
-                eval make build $DEBUG_STD && eval $SUDO cp ./gitleaks /usr/local/bin/ $DEBUG_ERROR
-            fi
-            if [[ "nomore403" == "$repo" ]]; then
-                eval go get $DEBUG_STD && eval go build $DEBUG_STD && eval chmod +x ./nomore403 $DEBUG_STD
-            fi
-			if [[ "ffufPostprocessing" == "$repo" ]]; then
-				eval git reset --hard origin/main $DEBUG_STD
-				eval git pull $DEBUG_STD
-				eval go build -o ffufPostprocessing main.go $DEBUG_STD && eval chmod +x ./ffufPostprocessing $DEBUG_STD
-			fi
-			if [[ "misconfig-mapper" == "$repo" ]]; then
-				eval git reset --hard origin/main $DEBUG_STD
-				eval git pull $DEBUG_STD
-				eval go build -o misconfig-mapper $DEBUG_STD && eval chmod +x ./misconfig-mapper $DEBUG_STD
-			fi
+
+		# Install dependencies if setup.py exists
+		if [[ -f "setup.py" ]]; then
+			eval "$SUDO pip3 install . $DEBUG_STD" &>/dev/null
 		fi
-		if [[ "gf" == "$repo" ]]; then
-            eval cp -r examples ~/.gf $DEBUG_ERROR
-        elif [[ "Gf-Patterns" == "$repo" ]]; then
-            eval mv ./*.json ~/.gf $DEBUG_ERROR
-        fi
-        cd "${dir}" || {
-			echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"
+
+		# Special handling for certain repositories
+		case "$repo" in
+		"massdns")
+			make &>/dev/null && strip -s bin/massdns && "$SUDO" cp bin/massdns /usr/local/bin/ &>/dev/null
+			;;
+		"gitleaks")
+			make build &>/dev/null && "$SUDO" cp ./gitleaks /usr/local/bin/ &>/dev/null
+			;;
+		"nomore403")
+			go get &>/dev/null
+			go build &>/dev/null
+			chmod +x ./nomore403
+			;;
+		"ffufPostprocessing")
+			git reset --hard origin/main &>/dev/null
+			git pull &>/dev/null
+			go build -o ffufPostprocessing main.go &>/dev/null
+			chmod +x ./ffufPostprocessing
+			;;
+		"misconfig-mapper")
+			git reset --hard origin/main &>/dev/null
+			git pull &>/dev/null
+			go build -o misconfig-mapper &>/dev/null
+			chmod +x ./misconfig-mapper
+			;;
+		esac
+
+		# Copy gf patterns if applicable
+		if [[ $repo == "gf" ]]; then
+			cp -r examples ${HOME}/.gf &>/dev/null
+		elif [[ $repo == "Gf-Patterns" ]]; then
+			mv ./*.json ${HOME}/.gf &>/dev/null
+		fi
+
+		# Return to the main directory
+		cd "$dir" || {
+			echo -e "${red}Failed to navigate back to directory '$dir'.${reset}"
 			exit 1
 		}
+
+		echo -e "${yellow}$repo installed (${repos_step}/${#repos[@]})${reset}"
 	done
 
-	eval notify $DEBUG_STD
-	eval subfinder $DEBUG_STD
-	eval subfinder $DEBUG_STD
+	# Notify and ensure subfinder is installed twice (as per original script)
+	notify &>/dev/null
+	subfinder &>/dev/null
+	subfinder &>/dev/null
+
+	# Handle failed installations
+	if [[ ${#failed_tools[@]} -ne 0 ]]; then
+		echo -e "\n${red}Failed to install the following Go tools: ${failed_tools[*]}${reset}"
+	fi
+
+	if [[ ${#failed_repos[@]} -ne 0 ]]; then
+		echo -e "\n${red}Failed to clone or update the following repositories:\n${failed_repos[*]}${reset}"
+	fi
 }
 
-banner
-
-show_help() {
-    echo "Usage: $0 [OPTION]"
-    echo "Run the script with specified options."
-    echo ""
-    echo "  -h, --help       Display this help and exit."
-    echo "  --tools          Install the tools before running, useful for upgrading."
-	echo "                                                                          "
-    echo "  ****             Without any arguments, the script will update reconftw"
-    echo "                   and install all dependencies and requirements."
-    exit 0
+# Function to reset git proxy settings
+function reset_git_proxies() {
+	git config --global --unset http.proxy || true
+	git config --global --unset https.proxy || true
 }
 
-printf "\n${bgreen} reconFTW installer/updater script ${reset}\n\n"
-
-# Parse command-line arguments
-while [ $# -gt 0 ]; do
-    case "$1" in
-        -h|--help)
-            show_help
-            ;;
-        --tools)
-            install_tools
-            shift
-            ;;
-        *)
-            echo "Error: Invalid argument '$1'"
-            echo "Use -h or --help for usage information."
-            exit 1
-            ;;
-    esac
-done
-
-printf "${yellow} This may take time. So, go grab a coffee! ${reset}\n\n"
-
-if [[ $(id -u | grep -o '^0$') == "0" ]]; then
-	SUDO=""
-else
-	if sudo -n false 2>/dev/null; then
-		printf "${bred} Is strongly recommended to add your user to sudoers${reset}\n"
-		printf "${bred} This will avoid prompts for sudo password in the middle of the installation${reset}\n"
-		printf "${bred} And more important, in the middle of the scan (needed for nmap SYN scan)${reset}\n\n"
-		printf "${bred} echo \"${USERNAME}  ALL=(ALL:ALL) NOPASSWD: ALL\" > /etc/sudoers.d/reconFTW${reset}\n\n"
+# Function to check for updates
+function check_updates() {
+	echo -e "${bblue}Running: Looking for new reconFTW version${reset}\n"
+
+	if timeout 10 git fetch; then
+		BRANCH=$(git rev-parse --abbrev-ref HEAD)
+		HEADHASH=$(git rev-parse HEAD)
+		UPSTREAMHASH=$(git rev-parse "${BRANCH}@{upstream}")
+
+		if [[ $HEADHASH != "$UPSTREAMHASH" ]]; then
+			echo -e "${yellow}A new version is available. Updating...${reset}\n"
+			if git status --porcelain | grep -q 'reconftw.cfg$'; then
+				mv reconftw.cfg reconftw.cfg_bck
+				echo -e "${yellow}reconftw.cfg has been backed up to reconftw.cfg_bck${reset}\n"
+			fi
+			git reset --hard &>/dev/null
+			git pull &>/dev/null
+			echo -e "${bgreen}Updated! Running the new installer version...${reset}\n"
+		else
+			echo -e "${bgreen}reconFTW is already up to date!${reset}\n"
+		fi
+	else
+		echo -e "\n${bred}[!] Unable to check for updates.${reset}\n"
 	fi
-	SUDO="sudo"
-fi
+}
+
+# Function to install Golang
+function install_golang_version() {
+	local version="go1.20.7"
+	local latest_version
+	latest_version=$(curl -s https://go.dev/VERSION?m=text | head -1 || echo "go1.20.7")
+	if [[ $latest_version == g* ]]; then
+		version="$latest_version"
+	fi
+
+	echo -e "${bblue}Running: Installing/Updating Golang($version) ${reset}\n"
+
+	if [[ $install_golang == "true" ]]; then
+		if command -v go &>/dev/null && [[ $version == "$(go version | awk '{print $3}')" ]]; then
+			echo -e "${bgreen}Golang is already installed and up to date.${reset}\n"
+		else
+			"$SUDO" rm -rf /usr/local/go &>/dev/null || true
+
+			case "$ARCH" in
+			arm64 | aarch64)
+				if [[ $RPI_4 == "True" ]]; then
+					wget "https://dl.google.com/go/${version}.linux-arm64.tar.gz" -O "/tmp/${version}.linux-arm64.tar.gz" &>/dev/null
+					"$SUDO" tar -C /usr/local -xzf "/tmp/${version}.linux-arm64.tar.gz" &>/dev/null
+				elif [[ $RPI_3 == "True" ]]; then
+					wget "https://dl.google.com/go/${version}.linux-armv6l.tar.gz" -O "/tmp/${version}.linux-armv6l.tar.gz" &>/dev/null
+					"$SUDO" tar -C /usr/local -xzf "/tmp/${version}.linux-armv6l.tar.gz" &>/dev/null
+				fi
+				;;
+			*)
+				if [[ $IS_MAC == "True" ]]; then
+					if [[ $IS_ARM == "True" ]]; then
+						wget "https://dl.google.com/go/${version}.darwin-arm64.tar.gz" -O "/tmp/${version}.darwin-arm64.tar.gz" &>/dev/null
+						"$SUDO" tar -C /usr/local -xzf "/tmp/${version}.darwin-arm64.tar.gz" &>/dev/null
+					else
+						wget "https://dl.google.com/go/${version}.darwin-amd64.tar.gz" -O "/tmp/${version}.darwin-amd64.tar.gz" &>/dev/null
+						"$SUDO" tar -C /usr/local -xzf "/tmp/${version}.darwin-amd64.tar.gz" &>/dev/null
+					fi
+				else
+					wget "https://dl.google.com/go/${version}.linux-amd64.tar.gz" -O "/tmp/${version}.linux-amd64.tar.gz" &>/dev/null
+					"$SUDO" tar -C /usr/local -xzf "/tmp/${version}.linux-amd64.tar.gz" &>/dev/null
+				fi
+				;;
+			esac
+
+			"$SUDO" ln -sf /usr/local/go/bin/go /usr/local/bin/
+			export GOROOT=/usr/local/go
+			export GOPATH="${HOME}/go"
+			export PATH="$GOPATH/bin:$GOROOT/bin:$HOME/.local/bin:$PATH"
+
+			# Append Go environment variables to shell profile
+			cat <<EOF >>${HOME}/"${profile_shell}"
+
+# Golang environment variables
+export GOROOT=/usr/local/go
+export GOPATH=\$HOME/go
+export PATH=\$GOPATH/bin:\$GOROOT/bin:\$HOME/.local/bin:\$PATH
+EOF
+		fi
+	else
+		echo -e "${byellow}Golang will not be configured according to the user's preferences (install_golang=false in reconftw.cfg).${reset}\n"
+	fi
+
+	# Validate Go environment variables
+	if [[ -z ${GOPATH-} ]]; then
+		echo -e "${bred}GOPATH environment variable not detected. Add Golang environment variables to your \$HOME/.bashrc or \$HOME/.zshrc:${reset}"
+		echo -e "export GOROOT=/usr/local/go"
+		echo -e 'export GOPATH=$HOME/go'
+		echo -e "export PATH=\$GOPATH/bin:\$GOROOT/bin:\$PATH\n"
+		exit 1
+	fi
+
+	if [[ -z ${GOROOT-} ]]; then
+		echo -e "${bred}GOROOT environment variable not detected. Add Golang environment variables to your \$HOME/.bashrc or \$HOME/.zshrc:${reset}"
+		echo -e "export GOROOT=/usr/local/go"
+		echo -e 'export GOPATH=$HOME/go'
+		echo -e "export PATH=\$GOPATH/bin:\$GOROOT/bin:\$PATH\n"
+		exit 1
+	fi
+}
 
-install_apt() {
-	eval $SUDO apt update -y $DEBUG_STD
-	eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium-browser -y $DEBUG_STD
-	eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install chromium -y $DEBUG_STD
-	eval $SUDO DEBIAN_FRONTEND="noninteractive" apt install python3 python3-pip python3-virtualenv build-essential gcc cmake ruby whois git curl libpcap-dev wget zip python3-dev pv dnsutils libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx medusa xvfb libxml2-utils procps bsdmainutils libdata-hexdump-perl libnss3 libatk1.0-0 libatk-bridge2.0-0 libcups2 libxkbcommon-x11-0 libxcomposite-dev libxdamage1 libxrandr2 libgbm-dev libpangocairo-1.0-0 libasound2 -y $DEBUG_STD
+# Function to install system packages based on OS
+function install_system_packages() {
+
+	if [[ -f /etc/debian_version ]]; then
+		install_apt
+	elif [[ -f /etc/redhat-release ]]; then
+		install_yum
+	elif [[ -f /etc/arch-release ]]; then
+		install_pacman
+	elif [[ $IS_MAC == "True" ]]; then
+		install_brew
+	elif [[ -f /etc/os-release ]]; then
+		install_yum # Assuming RedHat-based
+	else
+		echo -e "${bred}[!] Unsupported OS. Please install dependencies manually.${reset}"
+		exit 1
+	fi
+}
+
+# Function to install required packages for Debian-based systems
+function install_apt() {
+	"$SUDO" apt update -y &>/dev/null
+	"$SUDO" DEBIAN_FRONTEND="noninteractive" apt install -y chromium-browser python3 python3-pip python3-virtualenv build-essential gcc cmake ruby whois git curl libpcap-dev wget zip python3-dev pv dnsutils libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx medusa xvfb libxml2-utils procps bsdmainutils libdata-hexdump-perl libnss3 libatk1.0-0 libatk-bridge2.0-0 libcups2 libxkbcommon-x11-0 libxcomposite-dev libxdamage1 libxrandr2 libgbm-dev libpangocairo-1.0-0 libasound2 &>/dev/null
 	curl https://sh.rustup.rs -sSf | sh -s -- -y >/dev/null 2>&1
-	eval source "${HOME}/.cargo/env $DEBUG_STD"
-	eval cargo install ripgen $DEBUG_STD
+	source "${HOME}/.cargo/env"
+	cargo install ripgen &>/dev/null
 }
 
-install_brew() {
-	if brew --version &>/dev/null; then
-		printf "${bgreen} brew is already installed ${reset}\n\n"
+# Function to install required packages for macOS
+function install_brew() {
+	if command -v brew &>/dev/null; then
+		echo -e "${bgreen}brew is already installed.${reset}\n"
 	else
 		/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
 	fi
-	eval brew update -$DEBUG_STD
-	eval brew install --cask chromium $DEBUG_STD
-	eval brew install bash coreutils python massdns jq gcc cmake ruby git curl libpcap-dev wget zip python3-dev pv dnsutils whois libssl-dev libffi-dev libxml2-dev libxslt1-dev zlib1g-dev nmap jq apt-transport-https lynx medusa xvfb libxml2-utils libdata-hexdump-perl gnu-getopt $DEBUG_STD
-	export PATH="/opt/homebrew/opt/gnu-getopt/bin:$PATH"
-	echo 'export PATH="/opt/homebrew/opt/gnu-getopt/bin:$PATH"' >>~/.zshrc
-	brew install rustup
-	rustup-init
-	eval cargo install ripgen $DEBUG_STD
+	brew update &>/dev/null
+	brew install --cask chromium &>/dev/null
+	brew install bash coreutils python massdns jq gcc cmake ruby git curl libpcap-dev wget zip python3-dev pv dnsutils whois libssl-dev libffi-dev libxml2-dev libxslt-dev zlib libnss3 atk bridge2.0 cups xkbcommon xcomposite xdamage xrandr gbm pangocairo alsa libxml2-utils &>/dev/null
+	brew install rustup &>/dev/null
+	rustup-init -y &>/dev/null
+	cargo install ripgen &>/dev/null
 }
 
-install_yum() {
-	eval $SUDO yum groupinstall "Development Tools" -y $DEBUG_STD
-	eval $SUDO yum install python3 python3-pip gcc cmake ruby git curl libpcap-dev wget whois zip python3-devel pv bind-utils libopenssl-devel libffi-devel libxml2-devel libxslt-devel zlib-devel nmap jq lynx medusa xorg-x11-server-xvfb -y $DEBUG_STD
+# Function to install required packages for RedHat-based systems
+function install_yum() {
+	"$SUDO" yum groupinstall "Development Tools" -y &>/dev/null
+	"$SUDO" yum install -y python3 python3-pip gcc cmake ruby git curl libpcap whois wget zip pv bind-utils openssl-devel libffi-devel libxml2-devel libxslt-devel zlib-devel nmap jq lynx medusa xorg-x11-server-xvfb &>/dev/null
 	curl https://sh.rustup.rs -sSf | sh -s -- -y >/dev/null 2>&1
-	eval source "${HOME}/.cargo/env $DEBUG_STD"
-	eval cargo install ripgen $DEBUG_STD
+	source "${HOME}/.cargo/env"
+	cargo install ripgen &>/dev/null
 }
 
-install_pacman() {
-	eval $SUDO pacman -Sy install python python-pip base-devel gcc cmake ruby git curl libpcap whois wget zip pv bind openssl libffi libxml2 libxslt zlib nmap jq lynx medusa xorg-server-xvfb -y $DEBUG_STD
+# Function to install required packages for Arch-based systems
+function install_pacman() {
+	"$SUDO" pacman -Sy --noconfirm python python-pip base-devel gcc cmake ruby git curl libpcap whois wget zip pv bind openssl libffi libxml2 libxslt zlib nmap jq lynx medusa xorg-server-xvfb &>/dev/null
 	curl https://sh.rustup.rs -sSf | sh -s -- -y >/dev/null 2>&1
-	eval source "${HOME}/.cargo/env $DEBUG_STD"
-	eval cargo install ripgen $DEBUG_STD
+	source "${HOME}/.cargo/env"
+	cargo install ripgen &>/dev/null
 }
 
-eval git config --global --unset http.proxy $DEBUG_STD
-eval git config --global --unset https.proxy $DEBUG_STD
+# Function to perform initial setup
+function initial_setup() {
+	banner
+	reset_git_proxies
 
-printf "${bblue} Running: Looking for new reconFTW version${reset}\n\n"
+	echo -e "${bblue}Running: Checking for updates${reset}\n"
+	check_updates
 
-timeout 10 git fetch
-exit_status=$?
-if [[ ${exit_status} -eq 0 ]]; then
+	echo -e "${bblue}Running: Installing system packages${reset}\n"
+	install_system_packages
 
-	BRANCH=$(git rev-parse --abbrev-ref HEAD)
-	HEADHASH=$(git rev-parse HEAD)
-	UPSTREAMHASH=$(git rev-parse "${BRANCH}@{upstream}")
+	install_golang_version
 
-	if [[ $HEADHASH != "$UPSTREAMHASH" ]]; then
-		printf "${yellow} There is a new version, updating...${reset}\n\n"
-		if git status --porcelain | grep -q 'reconftw.cfg$'; then
-			mv reconftw.cfg reconftw.cfg_bck
-			printf "${yellow} reconftw.cfg has been backed up in reconftw.cfg_bck${reset}\n\n"
-		fi
-		eval git reset --hard $DEBUG_STD
-		eval git pull $DEBUG_STD
-		printf "${bgreen} Updated! Running new installer version...${reset}\n\n"
+	echo -e "${bblue}Running: Installing Python requirements${reset}\n"
+	mkdir -p ${HOME}/.gf
+	mkdir -p "$tools"
+	mkdir -p ${HOME}/.config/notify/
+	mkdir -p ${HOME}/.config/nuclei/
+	touch "${dir}/.github_tokens"
+	touch "${dir}/.gitlab_tokens"
+
+	wget -N -c https://bootstrap.pypa.io/get-pip.py -O /tmp/get-pip.py &>/dev/null
+	python3 /tmp/get-pip.py &>/dev/null
+	rm -f /tmp/get-pip.py
+
+	install_tools
+
+	# Repositorios con configuraciones especiales
+	printf "${bblue}\nRunning: Configuring special repositories${reset}\n"
+
+	# Nuclei Templates
+	if [[ ! -d ${NUCLEI_TEMPLATES_PATH} ]]; then
+		#printf "${yellow}Cloning Nuclei templates...${reset}\n"
+		eval git clone https://github.com/projectdiscovery/nuclei-templates.git "${NUCLEI_TEMPLATES_PATH}" $DEBUG_STD
+		eval git clone https://github.com/geeknik/the-nuclei-templates.git "${NUCLEI_TEMPLATES_PATH}/extra_templates" $DEBUG_STD
+		eval git clone https://github.com/projectdiscovery/fuzzing-templates ${tools}/fuzzing-templates $DEBUG_STD
+		eval nuclei -update-templates update-template-dir "${NUCLEI_TEMPLATES_PATH}" $DEBUG_STD
 	else
-		printf "${bgreen} reconFTW is already up to date!${reset}\n\n"
+		#printf "${yellow}Updating Nuclei templates...${reset}\n"
+		eval git -C "${NUCLEI_TEMPLATES_PATH}" pull $DEBUG_STD
+		eval git -C "${NUCLEI_TEMPLATES_PATH}/extra_templates" pull $DEBUG_STD
+		eval git -C "${tools}/fuzzing-templates" pull $DEBUG_STD
 	fi
-else
-	printf "\n${bred} Unable to check updates ${reset}\n\n"
-fi
 
-printf "${bblue} Running: Installing system packages ${reset}\n\n"
-if [[ -f /etc/debian_version ]]; then
-    install_apt
-elif [[ -f /etc/redhat-release ]]; then
-    install_yum
-elif [[ -f /etc/arch-release ]]; then
-    install_pacman
-elif [[ "True" == "$IS_MAC" ]]; then
-    install_brew
-elif [[ -f /etc/os-release ]]; then
-	install_yum #/etc/os-release fall in yum for some RedHat and Amazon Linux instances
-fi
+	# sqlmap
+	if [[ ! -d "${dir}/sqlmap" ]]; then
+		#printf "${yellow}Cloning sqlmap...${reset}\n"
+		eval git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git "${dir}/sqlmap" $DEBUG_STD
+	else
+		#printf "${yellow}Updating sqlmap...${reset}\n"
+		eval git -C "${dir}/sqlmap" pull $DEBUG_STD
+	fi
 
-# Installing latest Golang version
-version=$(curl -L -s https://golang.org/VERSION?m=text | head -1)
-[[ $version == g* ]] || version="go1.20.7"
-
-printf "${bblue} Running: Installing/Updating Golang ${reset}\n\n"
-if [[ $install_golang == "true" ]]; then
-    if [[ $(eval type go $DEBUG_ERROR | grep -o 'go is') == "go is" ]] && [[ $version == $(go version | cut -d " " -f3) ]]; then
-        printf "${bgreen} Golang is already installed and updated ${reset}\n\n"
-    else
-        eval $SUDO rm -rf /usr/local/go $DEBUG_STD
-        if [[ "True" == "$IS_ARM" ]]; then
-            if [[ "True" == "$RPI_3" ]]; then
-                eval wget "https://dl.google.com/go/${version}.linux-armv6l.tar.gz" -O /tmp/${version}.linux-armv6l.tar.gz $DEBUG_STD
-                eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.linux-armv6l.tar.gz" $DEBUG_STD
-            elif [[ "True" == "$RPI_4" ]]; then
-                eval wget "https://dl.google.com/go/${version}.linux-arm64.tar.gz" -O /tmp/${version}.linux-arm64.tar.gz $DEBUG_STD
-                eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.linux-arm64.tar.gz" $DEBUG_STD
-            fi
-        elif [[ "True" == "$IS_MAC" ]]; then
-            if [[ "True" == "$IS_ARM" ]]; then
-                eval wget "https://dl.google.com/go/${version}.darwin-arm64.tar.gz" -O /tmp/${version}.darwin-arm64.tar.gz $DEBUG_STD
-                eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.darwin-arm64.tar.gz" $DEBUG_STD
-            else
-                eval wget "https://dl.google.com/go/${version}.darwin-amd64.tar.gz" -O /tmp/${version}.darwin-amd64.tar.gz $DEBUG_STD
-                eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.darwin-amd64.tar.gz" $DEBUG_STD
-            fi
-        else
-            eval wget "https://dl.google.com/go/${version}.linux-amd64.tar.gz" -O /tmp/${version}.linux-amd64.tar.gz $DEBUG_STD
-            eval $SUDO tar -C /usr/local -xzf /tmp/"${version}.linux-amd64.tar.gz" $DEBUG_STD
-        fi
-        eval $SUDO ln -sf /usr/local/go/bin/go /usr/local/bin/
-        #rm -rf $version*
-        export GOROOT=/usr/local/go
-        export GOPATH=${HOME}/go
-        export PATH=$GOPATH/bin:$GOROOT/bin:${HOME}/.local/bin:$PATH
-        cat <<EOF >>~/"${profile_shell}"
-
-# Golang vars
-export GOROOT=/usr/local/go
-export GOPATH=\$HOME/go
-export PATH=\$GOPATH/bin:\$GOROOT/bin:\$HOME/.local/bin:\$PATH
-EOF
+	# testssl.sh
+	if [[ ! -d "${dir}/testssl.sh" ]]; then
+		#printf "${yellow}Cloning testssl.sh...${reset}\n"
+		eval git clone --depth 1 https://github.com/drwetter/testssl.sh.git "${dir}/testssl.sh" $DEBUG_STD
+	else
+		#printf "${yellow}Updating testssl.sh...${reset}\n"
+		eval git -C "${dir}/testssl.sh" pull $DEBUG_STD
 	fi
-else
-	printf "${byellow} Golang will not be configured according to the user's prefereneces (reconftw.cfg install_golang var)${reset}\n"
-fi
 
-[ -n "$GOPATH" ] || {
-	printf "${bred} GOPATH env var not detected, add Golang env vars to your \$HOME/.bashrc or \$HOME/.zshrc:\n\n export GOROOT=/usr/local/go\n export GOPATH=\$HOME/go\n export PATH=\$GOPATH/bin:\$GOROOT/bin:\$PATH\n\n"
-	exit 1
-}
-[ -n "$GOROOT" ] || {
-	printf "${bred} GOROOT env var not detected, add Golang env vars to your \$HOME/.bashrc or \$HOME/.zshrc:\n\n export GOROOT=/usr/local/go\n export GOPATH=\$HOME/go\n export PATH=\$GOPATH/bin:\$GOROOT/bin:\$PATH\n\n"
-	exit 1
-}
+	# massdns
+	if [[ ! -d "${dir}/massdns" ]]; then
+		#printf "${yellow}Cloning and compiling massdns...${reset}\n"
+		eval git clone https://github.com/blechschmidt/massdns.git "${dir}/massdns" $DEBUG_STD
+		eval make -C "${dir}/massdns" $DEBUG_STD
+		eval strip -s "${dir}/massdns/bin/massdns" $DEBUG_ERROR
+		eval $SUDO cp "${dir}/massdns/bin/massdns" /usr/local/bin/ $DEBUG_ERROR
+	else
+		#printf "${yellow}Updating massdns...${reset}\n"
+		eval git -C "${dir}/massdns" pull $DEBUG_STD
+	fi
 
-printf "${bblue} Running: Installing requirements ${reset}\n\n"
-
-mkdir -p ~/.gf
-mkdir -p $tools
-mkdir -p ~/.config/notify/
-mkdir -p ~/.config/nuclei/
-touch "${dir}"/.github_tokens
-touch "${dir}"/.gitlab_tokens
-
-eval wget -N -c https://bootstrap.pypa.io/get-pip.py $DEBUG_STD && eval python3 get-pip.py $DEBUG_STD
-eval rm -f get-pip.py $DEBUG_STD
-
-install_tools
-
-printf "${bblue}\n Running: Downloading required files ${reset}\n\n"
-## Downloads
-[[ ! -f ~/.config/notify/provider-config.yaml ]] && wget -q -O ~/.config/notify/provider-config.yaml https://gist.githubusercontent.com/six2dez/23a996bca189a11e88251367e6583053/raw
-#wget -q -O - https://raw.githubusercontent.com/devanshbatham/ParamSpider/master/gf_profiles/potential.json > ~/.gf/potential.json - Removed
-wget -q -O - https://raw.githubusercontent.com/m4ll0k/Bug-Bounty-Toolz/master/getjswords.py >${tools}/getjswords.py
-wget -q -O - https://raw.githubusercontent.com/n0kovo/n0kovo_subdomains/main/n0kovo_subdomains_huge.txt >${subs_wordlist_big}
-wget -q -O - https://gist.githubusercontent.com/six2dez/ae9ed7e5c786461868abd3f2344401b6/raw/trusted_resolvers.txt >${resolvers_trusted}
-wget -q -O - https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt >${resolvers}
-wget -q -O - https://gist.github.com/six2dez/a307a04a222fab5a57466c51e1569acf/raw >${subs_wordlist}
-wget -q -O - https://gist.github.com/six2dez/ffc2b14d283e8f8eff6ac83e20a3c4b4/raw >${tools}/permutations_list.txt
-wget -q -O - https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt >${fuzz_wordlist}
-wget -q -O - https://gist.githubusercontent.com/six2dez/a89a0c7861d49bb61a09822d272d5395/raw >${lfi_wordlist}
-wget -q -O - https://gist.githubusercontent.com/six2dez/ab5277b11da7369bf4e9db72b49ad3c1/raw >${ssti_wordlist}
-wget -q -O - https://gist.github.com/six2dez/d62ab8f8ffd28e1c206d401081d977ae/raw >${tools}/headers_inject.txt
-wget -q -O - https://gist.githubusercontent.com/six2dez/6e2d9f4932fd38d84610eb851014b26e/raw >${tools}/axiom_config.sh
-eval $SUDO chmod +x ${tools}/axiom_config.sh
-
-## Last check
-if [[ $double_check == "true" ]]; then
-	printf "${bblue} Running: Double check for installed tools ${reset}\n\n"
-	go_step=0
-	for gotool in "${!gotools[@]}"; do
-		go_step=$((go_step + 1))
-		eval type -P $gotool $DEBUG_STD || { eval ${gotools[$gotool]} $DEBUG_STD; }
-		exit_status=$?
-	done
-	repos_step=0
-	for repo in "${!repos[@]}"; do
-		repos_step=$((repos_step + 1))
-		eval cd "${dir}"/$repo $DEBUG_STD || { eval git clone https://github.com/${repos[$repo]} "${dir}"/$repo $DEBUG_STD && cd "${dir}"/$repo || {
-			echo "Failed to cd directory '$dir'"
-			exit 1
-		}; }
-		eval git pull $DEBUG_STD
-		exit_status=$?
-        if [[ -s "setup.py" ]]; then
-            eval $SUDO python3 setup.py install $DEBUG_STD
-        fi
-        if [[ "massdns" == "$repo" ]]; then
-            eval make $DEBUG_STD && strip -s bin/massdns && eval $SUDO cp bin/massdns /usr/local/bin/ $DEBUG_ERROR
-        elif [[ "gf" == "$repo" ]]; then
-            eval cp -r examples ~/.gf $DEBUG_ERROR
-        elif [[ "Gf-Patterns" == "$repo" ]]; then
-            eval mv ./*.json ~/.gf $DEBUG_ERROR
-        elif [[ "trufflehog" == "$repo" ]]; then
-            eval go install $DEBUG_STD
-        fi
-        cd "${dir}" || {
-			echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"
-			exit 1
+	# Interlace
+	if [[ ! -d "${dir}/interlace" ]]; then
+		#printf "${yellow}Cloning Interlace...${reset}\n"
+		eval git clone https://github.com/codingo/Interlace.git "${dir}/interlace" $DEBUG_STD
+		eval cd "${dir}/interlace" && eval $SUDO python3 setup.py install $DEBUG_STD
+	else
+		#printf "${yellow}Updating Interlace...${reset}\n"
+		eval git -C "${dir}/interlace" pull $DEBUG_STD
+	fi
+
+	# wafw00f
+	if [[ ! -d "${dir}/wafw00f" ]]; then
+		#printf "${yellow}Cloning wafw00f...${reset}\n"
+		eval git clone https://github.com/EnableSecurity/wafw00f.git "${dir}/wafw00f" $DEBUG_STD
+		eval cd "${dir}/wafw00f" && eval $SUDO python3 setup.py install $DEBUG_STD
+	else
+		#printf "${yellow}Updating wafw00f...${reset}\n"
+		eval git -C "${dir}/wafw00f" pull $DEBUG_STD
+	fi
+
+	# gf patterns
+	if [[ ! -d "$HOME/.gf" ]]; then
+		#printf "${yellow}Installing gf patterns...${reset}\n"
+		eval git clone https://github.com/tomnomnom/gf.git "${dir}/gf" $DEBUG_STD
+		eval cp -r "${dir}/gf/examples" ~/.gf $DEBUG_ERROR
+		eval git clone https://github.com/1ndianl33t/Gf-Patterns "${dir}/Gf-Patterns" $DEBUG_STD
+		eval cp "${dir}/Gf-Patterns"/*.json ~/.gf/ $DEBUG_ERROR
+	else
+		#printf "${yellow}Updating gf patterns...${reset}\n"
+		eval git -C "${dir}/Gf-Patterns" pull $DEBUG_STD
+	fi
+
+	echo -e "\n${bblue}Running: Downloading required files${reset}\n"
+
+	# Download required files with error handling
+	declare -A downloads=(
+		["notify_provider_config"]="https://gist.githubusercontent.com/six2dez/23a996bca189a11e88251367e6583053/raw ${HOME}/.config/notify/provider-config.yaml"
+		["getjswords"]="https://raw.githubusercontent.com/m4ll0k/Bug-Bounty-Toolz/master/getjswords.py ${tools}/getjswords.py"
+		["subdomains_huge"]="https://raw.githubusercontent.com/n0kovo/n0kovo_subdomains/main/n0kovo_subdomains_huge.txt ${subs_wordlist_big}"
+		["trusted_resolvers"]="https://gist.githubusercontent.com/six2dez/ae9ed7e5c786461868abd3f2344401b6/raw ${resolvers_trusted}"
+		["resolvers"]="https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt ${resolvers}"
+		["subs_wordlist"]="https://gist.github.com/six2dez/a307a04a222fab5a57466c51e1569acf/raw ${subs_wordlist}"
+		["permutations_list"]="https://gist.github.com/six2dez/ffc2b14d283e8f8eff6ac83e20a3c4b4/raw ${tools}/permutations_list.txt"
+		["fuzz_wordlist"]="https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt ${fuzz_wordlist}"
+		["lfi_wordlist"]="https://gist.githubusercontent.com/six2dez/a89a0c7861d49bb61a09822d272d5395/raw ${lfi_wordlist}"
+		["ssti_wordlist"]="https://gist.githubusercontent.com/six2dez/ab5277b11da7369bf4e9db72b49ad3c1/raw ${ssti_wordlist}"
+		["headers_inject"]="https://gist.github.com/six2dez/d62ab8f8ffd28e1c206d401081d977ae/raw ${tools}/headers_inject.txt"
+		["axiom_config"]="https://gist.githubusercontent.com/six2dez/6e2d9f4932fd38d84610eb851014b26e/raw ${tools}/axiom_config.sh"
+	)
+
+	for key in "${!downloads[@]}"; do
+		url="${downloads[$key]% *}"
+		destination="${downloads[$key]#* }"
+		wget -q -O "$destination" "$url" || {
+			echo -e "${red}[!] Failed to download $key from $url.${reset}"
+			continue
 		}
 	done
-fi
 
-printf "${bblue} Running: Performing last configurations ${reset}\n\n"
-## Last steps
-if [[ $generate_resolvers == true ]]; then
-    if [[ ! -s $resolvers ]] || [[ $(find "$resolvers" -mtime +1 -print) ]]; then
-        printf "${reset}\n\nChecking resolvers lists...\n Accurate resolvers are the key to great results\n This may take around 10 minutes if it's not updated\n\n"
-        eval rm -f $resolvers 2>>"${LOGFILE}"
-        dnsvalidator -tL https://public-dns.info/nameservers.txt -threads $DNSVALIDATOR_THREADS -o $resolvers &>/dev/null
-        dnsvalidator -tL https://raw.githubusercontent.com/blechschmidt/massdns/master/lists/resolvers.txt -threads $DNSVALIDATOR_THREADS -o tmp_resolvers &>/dev/null
-        [[ -s "tmp_resolvers" ]] && cat tmp_resolvers | anew -q $resolvers
-        [[ -s "tmp_resolvers" ]] && rm -f tmp_resolvers &>/dev/null
-        [[ ! -s $resolvers ]] && wget -q -O - https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt >${resolvers}
-        [[ ! -s $resolvers_trusted ]] && wget -q -O - https://gist.githubusercontent.com/six2dez/ae9ed7e5c786461868abd3f2344401b6/raw/trusted_resolvers.txt >${resolvers_trusted}
-		printf "${yellow} Resolvers updated\n ${reset}\n\n"
-	fi
-	generate_resolvers=false
-else
-	[[ ! -s $resolvers ]] || if [[ $(find "$resolvers" -mtime +1 -print) ]]; then
-		${reset}"\n\nChecking resolvers lists...\n Accurate resolvers are the key to great results\n Downloading new resolvers ${reset}\n\n"
-		wget -q -O - https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt >${resolvers}
-		wget -q -O - https://gist.githubusercontent.com/six2dez/ae9ed7e5c786461868abd3f2344401b6/raw/trusted_resolvers.txt >${resolvers_trusted}
-		printf "${yellow} Resolvers updated\n ${reset}\n\n"
+	# Make axiom_config.sh executable
+	chmod +x "${tools}/axiom_config.sh" || {
+		echo -e "${red}[!] Failed to make axiom_config.sh executable.${reset}"
+	}
+
+	echo -e "${bblue}Running: Performing last configurations${reset}\n"
+
+	# Update resolvers if generate_resolvers is true
+	if [[ $generate_resolvers == true ]]; then
+		if [[ ! -s $resolvers || $(find "$resolvers" -mtime +1 -print) ]]; then
+			echo -e "${yellow}Checking resolvers lists...\nAccurate resolvers are the key to great results.\nThis may take around 10 minutes if it's not updated.${reset}\n"
+			rm -f "$resolvers" &>/dev/null
+			dnsvalidator -tL https://public-dns.info/nameservers.txt -threads "$DNSVALIDATOR_THREADS" -o "$resolvers" &>/dev/null
+			dnsvalidator -tL https://raw.githubusercontent.com/blechschmidt/massdns/master/lists/resolvers.txt -threads "$DNSVALIDATOR_THREADS" -o tmp_resolvers &>/dev/null
+
+			if [[ -s "tmp_resolvers" ]]; then
+				cat tmp_resolvers | anew -q "$resolvers"
+				rm -f tmp_resolvers &>/dev/null
+			fi
+
+			[[ ! -s $resolvers ]] && wget -q -O "$resolvers" https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt
+			[[ ! -s $resolvers_trusted ]] && wget -q -O "$resolvers_trusted" https://gist.githubusercontent.com/six2dez/ae9ed7e5c786461868abd3f2344401b6/raw/trusted_resolvers.txt
+			echo -e "${yellow}Resolvers updated.${reset}\n"
+		fi
+		generate_resolvers=false
+	else
+		if [[ -s $resolvers && $(find "$resolvers" -mtime +1 -print) ]]; then
+			echo -e "${yellow}Checking resolvers lists...\nAccurate resolvers are the key to great results.\nDownloading new resolvers.${reset}\n"
+			wget -q -O "$resolvers" https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt
+			wget -q -O "$resolvers_trusted" https://gist.githubusercontent.com/six2dez/ae9ed7e5c786461868abd3f2344401b6/raw/trusted_resolvers.txt
+			echo -e "${yellow}Resolvers updated.${reset}\n"
+		fi
 	fi
-fi
 
-## Stripping all Go binaries
-eval strip -s "$HOME"/go/bin/* $DEBUG_STD
+	# Strip all Go binaries and copy to /usr/local/bin
+	strip -s "${GOPATH}/bin/"* &>/dev/null || true
+	"$SUDO" cp "${GOPATH}/bin/"* /usr/local/bin/ &>/dev/null || true
+
+	# Final reminders
+	echo -e "${yellow}Remember to set your API keys:\n- subfinder (${HOME}/.config/subfinder/provider-config.yaml)\n- GitHub (${HOME}/Tools/.github_tokens)\n- GitLab (${HOME}/Tools/.gitlab_tokens)\n- SSRF Server (COLLAB_SERVER in reconftw.cfg or env var)\n- Waymore (${HOME}/.config/waymore/config.yml)\n- Blind XSS Server (XSS_SERVER in reconftw.cfg or env var)\n- notify (${HOME}/.config/notify/provider-config.yaml)\n- WHOISXML API (WHOISXML_API in reconftw.cfg or env var)\n${reset}"
+	echo -e "${bgreen}Finished!${reset}\n"
+	echo -e "${bgreen}#######################################################################${reset}"
+}
+
+# Function to display additional help
+function show_additional_help() {
+	echo "Usage: $0 [OPTION]"
+	echo "Run the script with specified options."
+	echo ""
+	echo "  -h, --help       Display this help and exit."
+	echo "  --tools          Install the tools before running, useful for upgrading."
+	echo ""
+	echo "  ****             Without any arguments, the script will update reconftw"
+	echo "                   and install all dependencies and requirements."
+	exit 0
+}
+
+# Function to handle installation arguments
+function handle_install_arguments() {
+	echo -e "\n${bgreen}reconFTW installer/updater script${reset}\n"
+
+	while [[ $# -gt 0 ]]; do
+		case "$1" in
+		-h | --help)
+			show_additional_help
+			;;
+		--tools)
+			install_tools
+			shift
+			;;
+		*)
+			echo -e "${bred}Error: Invalid argument '$1'${reset}"
+			echo "Use -h or --help for usage information."
+			exit 1
+			;;
+		esac
+	done
 
-eval $SUDO cp "$HOME"/go/bin/* /usr/local/bin/ $DEBUG_STD
+	echo -e "${yellow}This may take some time. Grab a coffee!${reset}\n"
 
+	# Determine if the script is run as root
+	if [[ "$(id -u)" -eq 0 ]]; then
+		SUDO=""
+	else
+		if ! sudo -n true 2>/dev/null; then
+			echo -e "${bred}It is strongly recommended to add your user to sudoers.${reset}"
+			echo -e "${bred}This will avoid prompts for sudo password during installation and scans.${reset}"
+			echo -e "${bred}Run the following command to add your user to sudoers:${reset}"
+			echo -e "${bred}echo \"${USER}  ALL=(ALL:ALL) NOPASSWD: ALL\" | sudo tee /etc/sudoers.d/reconFTW${reset}\n"
+		fi
+		SUDO="sudo"
+	fi
+}
 
-printf "${yellow} Remember set your api keys:\n - subfinder (~/.config/subfinder/provider-config.yaml)\n - GitHub (~/Tools/.github_tokens)\n - GitLab (~/Tools/.gitlab_tokens)\n - SSRF Server (COLLAB_SERVER in reconftw.cfg or env var) \n - Waymore ( ~/.config/waymore/config.yml) \n - Blind XSS Server (XSS_SERVER in reconftw.cfg or env var) \n - notify (~/.config/notify/provider-config.yaml) \n - WHOISXML API (WHOISXML_API in reconftw.cfg or env var)\n\n${reset}"
-printf "${bgreen} Finished!${reset}\n\n"
-printf "\n\n${bgreen}#######################################################################${reset}\n"
+# Invoke main functions
+handle_install_arguments "$@"
+initial_setup
diff --git a/reconftw.cfg b/reconftw.cfg
index daeb3c54..34b6e606 100644
--- a/reconftw.cfg
+++ b/reconftw.cfg
@@ -3,7 +3,7 @@
 #############################################
 
 # General values
-tools=~/Tools   # Path installed tools
+tools=$HOME/Tools   # Path installed tools
 SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" # Get current script's path
 profile_shell=".$(basename $(echo $SHELL))rc" # Get current shell profile
 reconftw_version=$(git rev-parse --abbrev-ref HEAD)-$(git describe --tags) # Fetch current reconftw version
@@ -116,6 +116,7 @@ PASSWORD_DICT=true # Generate password dictionary
 PASSWORD_MIN_LENGTH=5 # Min password length
 PASSWORD_MAX_LENGTH=14 # Max password length
 CLOUDHUNTER_PERMUTATION=NORMAL # Options: DEEP (very slow), NORMAL (slow), NONE 
+NUCLEI_FUZZING_TEMPLATES_PATH="${tools}/fuzzing-templates" # Set nuclei templates path
 
 # Vulns
 VULNS_GENERAL=false # Enable or disable the vulnerability module (very intrusive and slow)
diff --git a/reconftw.sh b/reconftw.sh
index eb991e90..7c1b6713 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -12,434 +12,250 @@
 #	   ░        ░  ░░ ░          ░ ░           ░                      ░
 #
 
-# Error Management
-#set -eEuo pipefail
-#function handle_error() {
-#    local lineno="$1"
-#    local msg="$2"
-#    local command="$3"
-#    echo "##### ERROR [Line $lineno] - $msg (Command: $command) #####"
-#}
-#trap 'handle_error ${LINENO} "$BASH_COMMAND" "${FUNCNAME[@]}"' ERR
-
-function banner_graber() {
-	source "${SCRIPTPATH}"/banners.txt
-	randx=$(shuf -i 1-23 -n 1)
-	tmp="banner${randx}"
-	banner_code=${!tmp}
-	echo -e "${banner_code}"
-}
-function banner() {
-	banner_code=$(banner_graber)
-	printf "\n${bgreen}${banner_code}"
-	printf "\n ${reconftw_version}                                 by @six2dez${reset}\n"
+function banner_grabber() {
+	local banner_file="${SCRIPTPATH}/banners.txt"
+
+	# Check if the banner file exists
+	if [[ ! -f $banner_file ]]; then
+		echo "Banner file not found: $banner_file" >&2
+		return 1
+	fi
+
+	# Source the banner file
+	source "$banner_file"
+
+	# Collect all banner variable names
+	mapfile -t banner_vars < <(compgen -A variable | grep '^banner[0-9]\+$')
+
+	# Check if any banners are available
+	if [[ ${#banner_vars[@]} -eq 0 ]]; then
+		echo "No banners found in $banner_file" >&2
+		return 1
+	fi
+
+	# Select a random banner
+	local rand_index=$((RANDOM % ${#banner_vars[@]}))
+	local banner_var="${banner_vars[$rand_index]}"
+	local banner_code="${!banner_var}"
+
+	# Output the banner code
+	printf "%b\n" "$banner_code"
 }
 
-function test_connectivity() {
-	if nc -zw1 google.com 443 2>/dev/null; then
-		echo -e "Connection: ${bgreen}OK${reset}"
+function banner() {
+	local banner_code
+	if banner_code=$(banner_grabber); then
+		printf "\n%b%s" "$bgreen" "$banner_code"
+		printf "\n %s                                 by @six2dez%b\n" "$reconftw_version" "$reset"
 	else
-		echo -e "${bred}[!] Please check your internet connection and then try again...${reset}"
-		exit 1
+		printf "\n%bFailed to load banner.%b\n" "$bgreen" "$reset"
 	fi
 }
+
 ###############################################################################################################
 ################################################### TOOLS #####################################################
 ###############################################################################################################
 
 function check_version() {
-	timeout 10 git fetch || (true && echo "git fetch timeout reached")
-	exit_status=$?
-	if [[ ${exit_status} -eq 0 ]]; then
-		BRANCH=$(git rev-parse --abbrev-ref HEAD)
-		HEADHASH=$(git rev-parse HEAD)
-		UPSTREAMHASH=$(git rev-parse "${BRANCH}"@\{upstream\})
-		if [[ ${HEADHASH} != "${UPSTREAMHASH}" ]]; then
-			printf "\n${yellow} There is a new version, run ./install.sh to get latest version${reset}\n\n"
-		fi
-	else
-		printf "\n${bred} Unable to check updates ${reset}\n\n"
+
+	# Check if git is installed
+	if ! command -v git >/dev/null 2>&1; then
+		printf "\n%bGit is not installed. Cannot check for updates.%b\n\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if current directory is a git repository
+	if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
+		printf "\n%bCurrent directory is not a git repository. Cannot check for updates.%b\n\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Fetch updates with a timeout
+	if ! timeout 10 git fetch >/dev/null 2>&1; then
+		printf "\n%bUnable to check updates (git fetch timed out).%b\n\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Get current branch name
+	local BRANCH
+	BRANCH=$(git rev-parse --abbrev-ref HEAD)
+
+	# Get upstream branch
+	local UPSTREAM
+	UPSTREAM=$(git rev-parse --abbrev-ref --symbolic-full-name "@{u}" 2>/dev/null)
+	if [[ -z $UPSTREAM ]]; then
+		printf "\n%bNo upstream branch set for '%s'. Cannot check for updates.%b\n\n" "$bred" "$BRANCH" "$reset"
+		return 1
+	fi
+
+	# Get local and remote commit hashes
+	local LOCAL REMOTE
+	LOCAL=$(git rev-parse HEAD)
+	REMOTE=$(git rev-parse "$UPSTREAM")
+
+	# Compare local and remote hashes
+	if [[ $LOCAL != "$REMOTE" ]]; then
+		printf "\n%bThere is a new version available. Run ./install.sh to get the latest version.%b\n\n" "$yellow" "$reset"
 	fi
 }
 
 function tools_installed() {
+	# Check if all tools are installed
+	printf "\n\n%b#######################################################################%b\n" "$bgreen" "$reset"
+	printf "%b[%s] Checking installed tools %b\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 
-	printf "\n\n${bgreen}#######################################################################${reset}\n"
-	printf "${bblue}[$(date +'%Y-%m-%d %H:%M:%S')] Checking installed tools ${reset}\n\n"
+	local all_installed=true
+	local missing_tools=()
 
-	allinstalled=true
+	# Check environment variables
+	local env_vars=("GOPATH" "GOROOT" "PATH")
+	for var in "${env_vars[@]}"; do
+		if [[ -z ${!var} ]]; then
+			printf "%b [*] %s variable\t\t[NO]%b\n" "$bred" "$var" "$reset"
+			all_installed=false
+			missing_tools+=("$var environment variable")
+		fi
+	done
 
-	[ -n "$GOPATH" ] || {
-		printf "${bred} [*] GOPATH var			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -n "$GOROOT" ] || {
-		printf "${bred} [*] GOROOT var			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -n "$PATH" ] || {
-		printf "${bred} [*] PATH var			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/dorks_hunter/dorks_hunter.py" ] || {
-		printf "${bred} [*] dorks_hunter		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/fav-up/favUp.py" ] || {
-		printf "${bred} [*] fav-up			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/Corsy/corsy.py" ] || {
-		printf "${bred} [*] Corsy			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/testssl.sh/testssl.sh" ] || {
-		printf "${bred} [*] testssl			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/CMSeeK/cmseek.py" ] || {
-		printf "${bred} [*] CMSeeK			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${fuzz_wordlist}" ] || {
-		printf "${bred} [*] OneListForAll		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${lfi_wordlist}" ] || {
-		printf "${bred} [*] lfi_wordlist		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${ssti_wordlist}" ] || {
-		printf "${bred} [*] ssti_wordlist		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${subs_wordlist}" ] || {
-		printf "${bred} [*] subs_wordlist		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${subs_wordlist_big}" ] || {
-		printf "${bred} [*] subs_wordlist_big		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${resolvers}" ] || {
-		printf "${bred} [*] resolvers		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${resolvers_trusted}" ] || {
-		printf "${bred} [*] resolvers_trusted		[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v brutespray &>/dev/null || {
-		printf "${bred} [*] brutespray		[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v xnLinkFinder &>/dev/null || {
-		printf "${bred} [*] xnLinkFinder		[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v waymore &>/dev/null || {
-		printf "${bred} [*] waymore		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/commix/commix.py" ] || {
-		printf "${bred} [*] commix			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/getjswords.py" ] || {
-		printf "${bred} [*] getjswords   		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/JSA/jsa.py" ] || {
-		printf "${bred} [*] JSA			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/CloudHunter/cloudhunter.py" ] || {
-		printf "${bred} [*] CloudHunter			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/ultimate-nmap-parser/ultimate-nmap-parser.sh" ] || {
-		printf "${bred} [*] nmap-parse-output		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/pydictor/pydictor.py" ] || {
-		printf "${bred} [*] pydictor   		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/urless/urless/urless.py" ] || {
-		printf "${bred} [*] urless			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/smuggler/smuggler.py" ] || {
-		printf "${bred} [*] smuggler			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/regulator/main.py" ] || {
-		printf "${bred} [*] regulator			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/nomore403/nomore403" ] || {
-		printf "${bred} [*] nomore403			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/ffufPostprocessing/ffufPostprocessing" ] || {
-		printf "${bred} [*] ffufPostprocessing	[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/misconfig-mapper/misconfig-mapper" ] || {
-		printf "${bred} [*] misconfig-mapper		[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/Spoofy/spoofy.py" ] || {
-		printf "${bred} [*] spoofy			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/SwaggerSpy/swaggerspy.py" ] || {
-		printf "${bred} [*] swaggerspy			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -f "${tools}/LeakSearch/LeakSearch.py" ] || {
-		printf "${bred} [*] LeakSearch			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v github-endpoints &>/dev/null || {
-		printf "${bred} [*] github-endpoints		[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v github-subdomains &>/dev/null || {
-		printf "${bred} [*] github-subdomains		[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v gitlab-subdomains &>/dev/null || {
-		printf "${bred} [*] gitlab-subdomains		[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v katana &>/dev/null || {
-		printf "${bred} [*] katana			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v wafw00f &>/dev/null || {
-		printf "${bred} [*] wafw00f			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v dnsvalidator &>/dev/null || {
-		printf "${bred} [*] dnsvalidator		[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v metafinder &>/dev/null || {
-		printf "${bred} [*] metafinder			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v whois &>/dev/null || {
-		printf "${bred} [*] whois			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v dnsx &>/dev/null || {
-		printf "${bred} [*] dnsx			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v gotator &>/dev/null || {
-		printf "${bred} [*] gotator			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v nuclei &>/dev/null || {
-		printf "${bred} [*] Nuclei			[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -d ${NUCLEI_TEMPLATES_PATH} ] || {
-		printf "${bred} [*] Nuclei templates	[NO]${reset}\n"
-		allinstalled=false
-	}
-	[ -d ${tools}/fuzzing-templates ] || {
-		printf "${bred} [*] Fuzzing templates	[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v gf &>/dev/null || {
-		printf "${bred} [*] Gf				[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v Gxss &>/dev/null || {
-		printf "${bred} [*] Gxss			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v subjs &>/dev/null || {
-		printf "${bred} [*] subjs			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v ffuf &>/dev/null || {
-		printf "${bred} [*] ffuf			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v massdns &>/dev/null || {
-		printf "${bred} [*] Massdns			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v qsreplace &>/dev/null || {
-		printf "${bred} [*] qsreplace			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v interlace &>/dev/null || {
-		printf "${bred} [*] interlace			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v anew &>/dev/null || {
-		printf "${bred} [*] Anew			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v unfurl &>/dev/null || {
-		printf "${bred} [*] unfurl			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v crlfuzz &>/dev/null || {
-		printf "${bred} [*] crlfuzz			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v httpx &>/dev/null || {
-		printf "${bred} [*] Httpx			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v jq &>/dev/null || {
-		printf "${bred} [*] jq				[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v notify &>/dev/null || {
-		printf "${bred} [*] notify			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v dalfox &>/dev/null || {
-		printf "${bred} [*] dalfox			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v puredns &>/dev/null || {
-		printf "${bred} [*] puredns			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v emailfinder &>/dev/null || {
-		printf "${bred} [*] emailfinder		[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v analyticsrelationships &>/dev/null || {
-		printf "${bred} [*] analyticsrelationships	[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v mapcidr &>/dev/null || {
-		printf "${bred} [*] mapcidr			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v ppmap &>/dev/null || {
-		printf "${bred} [*] ppmap			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v cdncheck &>/dev/null || {
-		printf "${bred} [*] cdncheck			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v interactsh-client &>/dev/null || {
-		printf "${bred} [*] interactsh-client		[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v tlsx &>/dev/null || {
-		printf "${bred} [*] tlsx			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v smap &>/dev/null || {
-		printf "${bred} [*] smap			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v gitdorks_go &>/dev/null || {
-		printf "${bred} [*] gitdorks_go		[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v ripgen &>/dev/null || {
-		printf "${bred} [*] ripgen			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v dsieve &>/dev/null || {
-		printf "${bred} [*] dsieve			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v inscope &>/dev/null || {
-		printf "${bred} [*] inscope			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v enumerepo &>/dev/null || {
-		printf "${bred} [*] enumerepo			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v Web-Cache-Vulnerability-Scanner &>/dev/null || {
-		printf "${bred} [*] Web-Cache-Vulnerability-Scanner [NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v subfinder &>/dev/null || {
-		printf "${bred} [*] subfinder			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v ghauri &>/dev/null || {
-		printf "${bred} [*] ghauri			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v hakip2host &>/dev/null || {
-		printf "${bred} [*] hakip2host			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v gau &>/dev/null || {
-		printf "${bred} [*] gau			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v crt &>/dev/null || {
-		printf "${bred}  [*] crt			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v gitleaks &>/dev/null || {
-		printf "${bred} [*] gitleaks			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v trufflehog &>/dev/null || {
-		printf "${bred} [*] trufflehog			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v s3scanner &>/dev/null || {
-		printf "${bred} [*] s3scanner			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v mantra &>/dev/null || {
-		printf "${bred} [*] mantra			[NO]${reset}\n${reset}"
-		allinstalled=false
-	}
-	command -v nmapurls &>/dev/null || {
-		printf "${bred} [*] nmapurls			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v porch-pirate &>/dev/null || {
-		printf "${bred} [*] porch-pirate			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v shortscan &>/dev/null || {
-		printf "${bred} [*] shortscan			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v sns &>/dev/null || {
-		printf "${bred} [*] sns			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v sourcemapper &>/dev/null || {
-		printf "${bred} [*] sourcemapper			[NO]${reset}\n"
-		allinstalled=false
-	}
-	command -v jsluice &>/dev/null || {
-		printf "${bred} [*] jsluice			[NO]${reset}\n"
-		allinstalled=false
-	}
-	if [[ ${allinstalled} == true ]]; then
-		printf "${bgreen} Good! All installed! ${reset}\n\n"
+	# Define tools and their paths/commands
+	declare -A tools_files=(
+		["dorks_hunter"]="${tools}/dorks_hunter/dorks_hunter.py"
+		["fav-up"]="${tools}/fav-up/favUp.py"
+		["Corsy"]="${tools}/Corsy/corsy.py"
+		["testssl"]="${tools}/testssl.sh/testssl.sh"
+		["CMSeeK"]="${tools}/CMSeeK/cmseek.py"
+		["OneListForAll"]="$fuzz_wordlist"
+		["lfi_wordlist"]="$lfi_wordlist"
+		["ssti_wordlist"]="$ssti_wordlist"
+		["subs_wordlist"]="$subs_wordlist"
+		["subs_wordlist_big"]="$subs_wordlist_big"
+		["resolvers"]="$resolvers"
+		["resolvers_trusted"]="$resolvers_trusted"
+		["commix"]="${tools}/commix/commix.py"
+		["getjswords"]="${tools}/getjswords.py"
+		["JSA"]="${tools}/JSA/jsa.py"
+		["CloudHunter"]="${tools}/CloudHunter/cloudhunter.py"
+		["nmap-parse-output"]="${tools}/ultimate-nmap-parser/ultimate-nmap-parser.sh"
+		["pydictor"]="${tools}/pydictor/pydictor.py"
+		["urless"]="${tools}/urless/urless/urless.py"
+		["smuggler"]="${tools}/smuggler/smuggler.py"
+		["regulator"]="${tools}/regulator/main.py"
+		["nomore403"]="${tools}/nomore403/nomore403"
+		["ffufPostprocessing"]="${tools}/ffufPostprocessing/ffufPostprocessing"
+		["misconfig-mapper"]="${tools}/misconfig-mapper/misconfig-mapper"
+		["spoofy"]="${tools}/Spoofy/spoofy.py"
+		["swaggerspy"]="${tools}/SwaggerSpy/swaggerspy.py"
+		["LeakSearch"]="${tools}/LeakSearch/LeakSearch.py"
+	)
+
+	declare -A tools_folders=(
+		["NUCLEI_TEMPLATES_PATH"]="${NUCLEI_TEMPLATES_PATH}"
+		["NUCLEI_FUZZING_TEMPLATES_PATH"]="${NUCLEI_FUZZING_TEMPLATES_PATH}"
+	)
+
+	declare -A tools_commands=(
+		["brutespray"]="brutespray"
+		["xnLinkFinder"]="xnLinkFinder"
+		["waymore"]="waymore"
+		["github-endpoints"]="github-endpoints"
+		["github-subdomains"]="github-subdomains"
+		["gitlab-subdomains"]="gitlab-subdomains"
+		["katana"]="katana"
+		["wafw00f"]="wafw00f"
+		["dnsvalidator"]="dnsvalidator"
+		["metafinder"]="metafinder"
+		["whois"]="whois"
+		["dnsx"]="dnsx"
+		["gotator"]="gotator"
+		["Nuclei"]="nuclei"
+		["gf"]="gf"
+		["Gxss"]="Gxss"
+		["subjs"]="subjs"
+		["ffuf"]="ffuf"
+		["Massdns"]="massdns"
+		["qsreplace"]="qsreplace"
+		["interlace"]="interlace"
+		["Anew"]="anew"
+		["unfurl"]="unfurl"
+		["crlfuzz"]="crlfuzz"
+		["Httpx"]="httpx"
+		["jq"]="jq"
+		["notify"]="notify"
+		["dalfox"]="dalfox"
+		["puredns"]="puredns"
+		["emailfinder"]="emailfinder"
+		["analyticsrelationships"]="analyticsrelationships"
+		["mapcidr"]="mapcidr"
+		["ppmap"]="ppmap"
+		["cdncheck"]="cdncheck"
+		["interactsh-client"]="interactsh-client"
+		["tlsx"]="tlsx"
+		["smap"]="smap"
+		["gitdorks_go"]="gitdorks_go"
+		["ripgen"]="ripgen"
+		["dsieve"]="dsieve"
+		["inscope"]="inscope"
+		["enumerepo"]="enumerepo"
+		["Web-Cache-Vulnerability-Scanner"]="Web-Cache-Vulnerability-Scanner"
+		["subfinder"]="subfinder"
+		["ghauri"]="ghauri"
+		["hakip2host"]="hakip2host"
+		["gau"]="gau"
+		["crt"]="crt"
+		["gitleaks"]="gitleaks"
+		["trufflehog"]="trufflehog"
+		["s3scanner"]="s3scanner"
+		["mantra"]="mantra"
+		["nmapurls"]="nmapurls"
+		["porch-pirate"]="porch-pirate"
+		["shortscan"]="shortscan"
+		["sns"]="sns"
+		["sourcemapper"]="sourcemapper"
+		["jsluice"]="jsluice"
+	)
+
+	# Check for tool files
+	for tool in "${!tools_files[@]}"; do
+		if [[ ! -f ${tools_files[$tool]} ]]; then
+			#			printf "%b [*] %s\t\t[NO]%b\n" "$bred" "$tool" "$reset"
+			all_installed=false
+			missing_tools+=("$tool")
+		fi
+	done
+
+	# Check for tool folders
+	for folder in "${!tools_folders[@]}"; do
+		if [[ ! -d ${tools_folders[$folder]} ]]; then
+			# printf "%b [*] %s\t\t[NO]%b\n" "$bred" "$folder" "$reset"
+			all_installed=false
+			missing_tools+=("$folder") # Correctly pushing the folder name
+		fi
+	done
+
+	# Check for tool commands
+	for tool in "${!tools_commands[@]}"; do
+		if ! command -v "${tools_commands[$tool]}" >/dev/null 2>&1; then
+			#			printf "%b [*] %s\t\t[NO]%b\n" "$bred" "$tool" "$reset"
+			all_installed=false
+			missing_tools+=("$tool")
+		fi
+	done
+
+	if [[ $all_installed == true ]]; then
+		printf "%b\n Good! All tools are installed! %b\n\n" "$bgreen" "$reset"
 	else
-		printf "\n${yellow} Try running the installer script again ./install.sh"
-		printf "\n${yellow} If it fails for any reason try to install manually the tools missed"
-		printf "\n${yellow} Finally remember to set the ${bred}\${tools}${yellow} variable at the start of this script"
-		printf "\n${yellow} If nothing works and the world is gonna end you can always ping me :D ${reset}\n\n"
+		printf "\n%bSome tools or directories are missing:%b\n\n" "$yellow" "$reset"
+		for tool in "${missing_tools[@]}"; do
+			printf "%b - %s %b\n" "$bred" "$tool" "$reset"
+		done
+		printf "\n%bTry running the installer script again: ./install.sh%b\n" "$yellow" "$reset"
+		printf "%bIf it fails, try installing the missing tools manually.%b\n" "$yellow" "$reset"
+		printf "%bEnsure that the %b\$tools%b variable is correctly set at the start of this script.%b\n" "$yellow" "$bred" "$yellow" "$reset"
+		printf "%bIf you need assistance, feel free to contact me! :D%b\n\n" "$yellow" "$reset"
 	fi
 
-	printf "${bblue}[$(date +'%Y-%m-%d %H:%M:%S')] Tools check finished\n"
-	printf "${bgreen}#######################################################################\n${reset}"
+	printf "%b[%s] Tools check finished%b\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+	printf "%b#######################################################################\n%b" "$bgreen" "$reset"
 }
 
 #####################################################################cc##########################################
@@ -448,312 +264,450 @@ function tools_installed() {
 
 function google_dorks() {
 	mkdir -p osint
+
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $GOOGLE_DORKS == true ]] && [[ $OSINT == true ]]; then
-		start_func "${FUNCNAME[0]}" "Google Dorks in process"
-		python3 ${tools}/dorks_hunter/dorks_hunter.py -d "$domain" -o osint/dorks.txt || {
-			echo "dorks_hunter command failed"
-			exit 1
-		}
+		start_func "${FUNCNAME[0]}" "Running: Google Dorks in process"
+
+		python3 "${tools}/dorks_hunter/dorks_hunter.py" -d "$domain" -o "osint/dorks.txt"
 		end_func "Results are saved in $domain/osint/dorks.txt" "${FUNCNAME[0]}"
 	else
 		if [[ $GOOGLE_DORKS == false ]] || [[ $OSINT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or configuration settings.%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} are already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s %b\n\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "${FUNCNAME[0]}" "$reset"
 		fi
 	fi
-
 }
 
 function github_dorks() {
 	mkdir -p osint
+
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $GITHUB_DORKS == true ]] && [[ $OSINT == true ]]; then
-		start_func "${FUNCNAME[0]}" "Github Dorks in process"
-		if [[ -s ${GITHUB_TOKENS} ]]; then
+		start_func "${FUNCNAME[0]}" "Running: Github Dorks in process"
+
+		if [[ -s $GITHUB_TOKENS ]]; then
 			if [[ $DEEP == true ]]; then
-				gitdorks_go -gd ${tools}/gitdorks_go/Dorks/medium_dorks.txt -nws 20 -target "$domain" -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || {
-					echo "gitdorks_go/anew command failed"
-					exit 1
-				}
+				if ! gitdorks_go -gd "${tools}/gitdorks_go/Dorks/medium_dorks.txt" -nws 20 -target "$domain" -tf "$GITHUB_TOKENS" -ew 3 | anew -q osint/gitdorks.txt; then
+					printf "%b[!] gitdorks_go command failed.%b\n" "$bred" "$reset"
+					return 1
+				fi
 			else
-				gitdorks_go -gd ${tools}/gitdorks_go/Dorks/smalldorks.txt -nws 20 -target $domain -tf "${GITHUB_TOKENS}" -ew 3 | anew -q osint/gitdorks.txt || {
-					echo "gitdorks_go/anew command failed"
-					exit 1
-				}
+				if ! gitdorks_go -gd "${tools}/gitdorks_go/Dorks/smalldorks.txt" -nws 20 -target "$domain" -tf "$GITHUB_TOKENS" -ew 3 | anew -q osint/gitdorks.txt; then
+					printf "%b[!] gitdorks_go command failed.%b\n" "$bred" "$reset"
+					return 1
+				fi
 			fi
 		else
-			printf "\n${bred}[$(date +'%Y-%m-%d %H:%M:%S')] Required file ${GITHUB_TOKENS} not exists or empty${reset}\n"
+			printf "\n%b[%s] Required file %s does not exist or is empty.%b\n" "$bred" "$(date +'%Y-%m-%d %H:%M:%S')" "$GITHUB_TOKENS" "$reset"
+			return 1
 		fi
 		end_func "Results are saved in $domain/osint/gitdorks.txt" "${FUNCNAME[0]}"
 	else
 		if [[ $GITHUB_DORKS == false ]] || [[ $OSINT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or configuration settings.%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s %b\n\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "${FUNCNAME[0]}" "$reset"
 		fi
 	fi
-
 }
 
 function github_repos() {
-
 	mkdir -p .tmp
+
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $GITHUB_REPOS == true ]] && [[ $OSINT == true ]]; then
 		start_func "${FUNCNAME[0]}" "Github Repos analysis in process"
 
-		if [[ -s ${GITHUB_TOKENS} ]]; then
-			GH_TOKEN=$(cat ${GITHUB_TOKENS} | head -1)
-			echo $domain | unfurl format %r >.tmp/company_name.txt
-			enumerepo -token-string "${GH_TOKEN}" -usernames .tmp/company_name.txt -o .tmp/company_repos.txt 2>>"$LOGFILE" >/dev/null
-			[ -s ".tmp/company_repos.txt" ] && jq -r '.[].repos[]|.url' <.tmp/company_repos.txt >.tmp/company_repos_url.txt 2>>"$LOGFILE"
-			mkdir -p .tmp/github_repos 2>>"$LOGFILE" >>"$LOGFILE"
-			mkdir -p .tmp/github 2>>"$LOGFILE" >>"$LOGFILE"
-			[ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "git clone _target_  .tmp/github_repos/_cleantarget_" 2>>"$LOGFILE" >/dev/null 2>&1
-			[ -d ".tmp/github/" ] && ls .tmp/github_repos >.tmp/github_repos_folders.txt
-			[ -s ".tmp/github_repos_folders.txt" ] && interlace -tL .tmp/github_repos_folders.txt -threads ${INTERLACE_THREADS} -c "gitleaks detect --source .tmp/github_repos/_target_ --no-banner --no-color -r .tmp/github/gh_secret_cleantarget_.json" 2>>"$LOGFILE" >/dev/null
-			[ -s ".tmp/company_repos_url.txt" ] && interlace -tL .tmp/company_repos_url.txt -threads ${INTERLACE_THREADS} -c "trufflehog git _target_ -j 2>&1 | jq -c > _output_/_cleantarget_" -o .tmp/github/ >>"$LOGFILE" 2>&1
+		if [[ -s $GITHUB_TOKENS ]]; then
+			GH_TOKEN=$(head -n 1 "$GITHUB_TOKENS")
+			echo "$domain" | unfurl format %r >.tmp/company_name.txt
+
+			if ! enumerepo -token-string "$GH_TOKEN" -usernames .tmp/company_name.txt -o .tmp/company_repos.txt 2>>"$LOGFILE" >/dev/null; then
+				printf "%b[!] enumerepo command failed.%b\n" "$bred" "$reset"
+				return 1
+			fi
+
+			if [[ -s ".tmp/company_repos.txt" ]]; then
+				if ! jq -r '.[].repos[]|.url' <.tmp/company_repos.txt >.tmp/company_repos_url.txt 2>>"$LOGFILE"; then
+					printf "%b[!] jq command failed.%b\n" "$bred" "$reset"
+					return 1
+				fi
+			else
+				printf "%b[!] No repositories found for the company.%b\n" "$yellow" "$reset"
+				return 1
+			fi
+
+			mkdir -p .tmp/github_repos 2>>"$LOGFILE"
+			mkdir -p .tmp/github 2>>"$LOGFILE"
+
+			if [[ -s ".tmp/company_repos_url.txt" ]]; then
+				if ! interlace -tL .tmp/company_repos_url.txt -threads "$INTERLACE_THREADS" -c "git clone _target_ .tmp/github_repos/_cleantarget_" 2>>"$LOGFILE" >/dev/null; then
+					printf "%b[!] interlace git clone command failed.%b\n" "$bred" "$reset"
+					return 1
+				fi
+			else
+				printf "%b[!] No repository URLs found to clone.%b\n" "$yellow" "$reset"
+				end_func "Results are saved in $domain/osint/github_company_secrets.json" "${FUNCNAME[0]}"
+				return 1
+			fi
+
+			if [[ -d ".tmp/github_repos/" ]]; then
+				ls .tmp/github_repos >.tmp/github_repos_folders.txt
+			else
+				printf "%b[!] No repositories cloned.%b\n" "$yellow" "$reset"
+				end_func "Results are saved in $domain/osint/github_company_secrets.json" "${FUNCNAME[0]}"
+				return 1
+			fi
+
+			if [[ -s ".tmp/github_repos_folders.txt" ]]; then
+				if ! interlace -tL .tmp/github_repos_folders.txt -threads "$INTERLACE_THREADS" -c "gitleaks detect --source .tmp/github_repos/_target_ --no-banner --no-color -r .tmp/github/gh_secret_cleantarget_.json" 2>>"$LOGFILE" >/dev/null; then
+					printf "%b[!] interlace gitleaks command failed.%b\n" "$bred" "$reset"
+					end_func "Results are saved in $domain/osint/github_company_secrets.json" "${FUNCNAME[0]}"
+					return 1
+				fi
+			else
+				printf "%b[!] No repository folders found for gitleaks.%b\n" "$yellow" "$reset"
+				end_func "Results are saved in $domain/osint/github_company_secrets.json" "${FUNCNAME[0]}"
+				return 1
+			fi
+
+			if [[ -s ".tmp/company_repos_url.txt" ]]; then
+				if ! interlace -tL .tmp/company_repos_url.txt -threads "$INTERLACE_THREADS" -c "trufflehog git _target_ -j 2>&1 | jq -c > _output_/_cleantarget_" -o .tmp/github/ 2>>"$LOGFILE" >/dev/null; then
+					printf "%b[!] interlace trufflehog command failed.%b\n" "$bred" "$reset"
+					return 1
+				fi
+			fi
+
 			if [[ -d ".tmp/github/" ]]; then
-				cat .tmp/github/* 2>/dev/null | jq -c | jq -r >osint/github_company_secrets.json 2>>"$LOGFILE"
+				if ! cat .tmp/github/* 2>/dev/null | jq -c | jq -r >"osint/github_company_secrets.json" 2>>"$LOGFILE"; then
+					printf "%b[!] Error combining results.%b\n" "$bred" "$reset"
+					return 1
+				fi
+			else
+				printf "%b[!] No secrets found to compile.%b\n" "$yellow" "$reset"
+				end_func "Results are saved in $domain/osint/github_company_secrets.json" "${FUNCNAME[0]}"
+				return 1
 			fi
+
+			end_func "Results are saved in $domain/osint/github_company_secrets.json" "${FUNCNAME[0]}"
 		else
-			printf "\n${bred}[$(date +'%Y-%m-%d %H:%M:%S')] Required file ${GITHUB_TOKENS} not exists or empty${reset}\n"
+			printf "\n%s[%s] Required file %s does not exist or is empty.%b\n" "$bred" "$(date +'%Y-%m-%d %H:%M:%S')" "$GITHUB_TOKENS" "$reset"
+			return 1
 		fi
-		end_func "Results are saved in $domain/osint/github_company_secrets.json" ${FUNCNAME[0]}
 	else
 		if [[ $GITHUB_REPOS == false ]] || [[ $OSINT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or configuration settings.%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s %b\n\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "${FUNCNAME[0]}" "$reset"
 		fi
 	fi
-
 }
 
 function metadata() {
-
 	mkdir -p osint
-	if { [[ ! -f "${called_fn_dir}/.${FUNCNAME[0]}" ]] || [[ ${DIFF} == true ]]; } && [[ ${METADATA} == true ]] && [[ ${OSINT} == true ]] && ! [[ ${domain} =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-		start_func ${FUNCNAME[0]} "Scanning metadata in public files"
-		metafinder -d "$domain" -l $METAFINDER_LIMIT -o osint -go -bi &>>"$LOGFILE" || {
-			echo "metafinder command failed"
-			exit 1
-		}
-		mv "osint/${domain}/"*".txt" "osint/" 2>>"$LOGFILE"
-		rm -rf "osint/${domain}" 2>>"$LOGFILE"
-		end_func "Results are saved in $domain/osint/[software/authors/metadata_results].txt" ${FUNCNAME[0]}
+
+	# Check if the function should run
+	if { [[ ! -f "${called_fn_dir}/.${FUNCNAME[0]}" ]] || [[ ${DIFF} == true ]]; } && [[ ${METADATA} == true ]] && [[ ${OSINT} == true ]] && ! [[ ${domain} =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+		start_func "${FUNCNAME[0]}" "Scanning metadata in public files"
+
+		# Run metafinder and check for errors
+		if ! metafinder -d "${domain}" -l "${METAFINDER_LIMIT}" -o osint -go -bi &>>"${LOGFILE}"; then
+			printf "%b[!] metafinder command failed.%b\n" "${bred}" "${reset}"
+			return 1
+		fi
+
+		# Move result files and check for errors
+		if [ -d "osint/${domain}" ] && [ "$(ls -A "osint/${domain}")" ]; then
+			if ! mv "osint/${domain}/"*.txt "osint/" 2>>"${LOGFILE}"; then
+				printf "%b[!] Failed to move metadata files.%b\n" "${bred}" "${reset}"
+				return 1
+			fi
+		fi
+
+		# Remove temporary directory and check for errors
+		if ! rm -rf "osint/${domain}" 2>>"${LOGFILE}"; then
+			printf "%b[!] Failed to remove temporary directory.%b\n" "${bred}" "${reset}"
+			return 1
+		fi
+
+		end_func "Results are saved in ${domain}/osint/[software/authors/metadata_results].txt" "${FUNCNAME[0]}"
 	else
-		if [[ $METADATA == false ]] || [[ $OSINT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+		if [[ ${METADATA} == false ]] || [[ ${OSINT} == false ]]; then
+			printf "\n%b[%s] %s skipped due to mode or configuration settings.%b\n" "${yellow}" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "${reset}"
+		elif [[ ${domain} =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 			return
 		else
-			if [[ $METADATA == false ]] || [[ $OSINT == false ]]; then
-				printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-			else
-				printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
-			fi
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s %b\n\n" "${yellow}" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "${called_fn_dir}" "${FUNCNAME[0]}" "${reset}"
 		fi
 	fi
-
 }
 
 function apileaks() {
-
 	mkdir -p osint
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $API_LEAKS == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-		start_func ${FUNCNAME[0]} "Scanning for leaks in APIs public directories"
 
-		porch-pirate -s "$domain" --dump 2>>"$LOGFILE" >${dir}/osint/postman_leaks.txt || {
-			echo "porch-pirate command failed (probably by rate limit)"
-		}
-		pushd "${tools}/SwaggerSpy" >/dev/null || {
-			echo "Failed to pushd to ${tools}/SwaggerSpy in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
-		python3 swaggerspy.py $domain 2>>"$LOGFILE" | grep -i "[*]\|URL" >${dir}/osint/swagger_leaks.txt
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } &&
+		[[ $API_LEAKS == true ]] && [[ $OSINT == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 
-		popd >/dev/null || {
-			echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
+		start_func "${FUNCNAME[0]}" "Scanning for leaks in public API directories"
 
-		[ -s "osint/postman_leaks.txt" ] && trufflehog filesystem ${dir}/osint/postman_leaks.txt -j 2>/dev/null | jq -c | anew -q ${dir}/osint/postman_leaks_trufflehog.json
-		[ -s "osint/swagger_leaks.txt" ] && trufflehog filesystem ${dir}/osint/swagger_leaks.txt -j 2>/dev/null | jq -c | anew -q ${dir}/osint/swagger_leaks_trufflehog.json
+		# Run porch-pirate and handle errors
+		porch-pirate -s "$domain" --dump 2>>"$LOGFILE" >"${dir}/osint/postman_leaks.txt"
+
+		# Change directory to SwaggerSpy
+		if ! pushd "${tools}/SwaggerSpy" >/dev/null; then
+			printf "%b[!] Failed to change directory to %s in %s at line %s.%b\n" "$bred" "${tools}/SwaggerSpy" "${FUNCNAME[0]}" "$LINENO" "$reset"
+			return 1
+		fi
+
+		# Run swaggerspy.py and handle errors
+		python3 swaggerspy.py "$domain" 2>>"$LOGFILE" | grep -i "[*]\|URL" >"${dir}/osint/swagger_leaks.txt"
+
+		# Return to the previous directory
+		if ! popd >/dev/null; then
+			printf "%b[!] Failed to return to the previous directory in %s at line %s.%b\n" "$bred" "${FUNCNAME[0]}" "$LINENO" "$reset"
+			return 1
+		fi
+
+		# Analyze leaks with trufflehog
+		if [[ -s "${dir}/osint/postman_leaks.txt" ]]; then
+			trufflehog filesystem "${dir}/osint/postman_leaks.txt" -j 2>/dev/null | jq -c | anew -q "${dir}/osint/postman_leaks_trufflehog.json"
+		fi
 
-		end_func "Results are saved in $domain/osint/[software/authors/metadata_results].txt" ${FUNCNAME[0]}
+		if [[ -s "${dir}/osint/swagger_leaks.txt" ]]; then
+			trufflehog filesystem "${dir}/osint/swagger_leaks.txt" -j 2>/dev/null | jq -c | anew -q "${dir}/osint/swagger_leaks_trufflehog.json"
+		fi
+
+		end_func "Results are saved in $domain/osint/[postman_leaks_trufflehog.json, swagger_leaks_trufflehog.json]" "${FUNCNAME[0]}"
 	else
 		if [[ $API_LEAKS == false ]] || [[ $OSINT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+			printf "\n%b[%s] %s skipped due to mode or configuration settings.%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 			return
 		else
-			if [[ $API_LEAKS == false ]] || [[ $OSINT == false ]]; then
-				printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-			else
-				printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
-			fi
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s %b\n\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "${FUNCNAME[0]}" "$reset"
 		fi
 	fi
-
 }
 
 function emails() {
+	mkdir -p .tmp osint
 
-	mkdir -p {.tmp,osint}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $EMAILS == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-		start_func ${FUNCNAME[0]} "Searching emails/users/passwords leaks"
-		emailfinder -d $domain 2>>"$LOGFILE" | anew -q .tmp/emailfinder.txt || {
-			echo "emailfinder command failed"
-			exit 1
-		}
-		[ -s ".tmp/emailfinder.txt" ] && cat .tmp/emailfinder.txt | grep "@" | grep -iv "|_" | anew -q osint/emails.txt
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } &&
+		[[ $EMAILS == true ]] && [[ $OSINT == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 
-		pushd "${tools}/LeakSearch" >/dev/null || {
-			echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
+		start_func "${FUNCNAME[0]}" "Searching for emails/users/passwords leaks"
 
-		python3 LeakSearch.py -k $domain -o ${dir}/.tmp/passwords.txt 1>>"$LOGFILE" || {
-			echo "LeakSearch command failed"
-		}
+		# Run emailfinder and handle errors
+		emailfinder -d "$domain" 2>>"$LOGFILE" | anew -q .tmp/emailfinder.txt
 
-		popd >/dev/null || {
-			echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
+		# Process emailfinder results
+		if [[ -s ".tmp/emailfinder.txt" ]]; then
+			grep "@" .tmp/emailfinder.txt | grep -iv "|_" | anew -q osint/emails.txt
+		fi
+
+		# Change directory to LeakSearch
+		if ! pushd "${tools}/LeakSearch" >/dev/null; then
+			printf "%b[!] Failed to change directory to %s in %s at line %s.%b\n" "$bred" "${tools}/LeakSearch" "${FUNCNAME[0]}" "$LINENO" "$reset"
+			return 1
+		fi
+
+		# Run LeakSearch.py and handle errors
+		python3 LeakSearch.py -k "$domain" -o "${dir}/.tmp/passwords.txt" 1>>"$LOGFILE"
+
+		# Return to the previous directory
+		if ! popd >/dev/null; then
+			printf "%b[!] Failed to return to the previous directory in %s at line %s.%b\n" "$bred" "${FUNCNAME[0]}" "$LINENO" "$reset"
+			return 1
+		fi
 
-		[ -s ".tmp/passwords.txt" ] && cat .tmp/passwords.txt | anew -q osint/passwords.txt
+		# Process passwords.txt
+		if [[ -s "${dir}/.tmp/passwords.txt" ]]; then
+			anew -q osint/passwords.txt <"${dir}/.tmp/passwords.txt"
+		fi
 
-		end_func "Results are saved in $domain/osint/emails|passwords.txt" ${FUNCNAME[0]}
+		end_func "Results are saved in $domain/osint/emails.txt and passwords.txt" "${FUNCNAME[0]}"
 	else
 		if [[ $EMAILS == false ]] || [[ $OSINT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+			printf "\n%b[%s] %s skipped due to mode or configuration settings.%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 			return
 		else
-			if [[ $EMAILS == false ]] || [[ $OSINT == false ]]; then
-				printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-			else
-				printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
-			fi
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
-
 }
 
 function domain_info() {
 
 	mkdir -p osint
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $DOMAIN_INFO == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-		start_func ${FUNCNAME[0]} "Searching domain info (whois, registrant name/email domains)"
-		whois -H $domain >osint/domain_info_general.txt || { echo "whois command failed"; }
 
-		curl -s "https://aadinternals.azurewebsites.net/api/tenantinfo?domainName=${domain}" -H "Origin: https://aadinternals.com" | jq -r .domains[].name >osint/azure_tenant_domains.txt
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } &&
+		[[ $DOMAIN_INFO == true ]] && [[ $OSINT == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Searching domain info (whois, registrant name/email domains)"
+
+		# Run whois command and check for errors
+		whois -H "$domain" >"osint/domain_info_general.txt"
+
+		# Fetch tenant info using curl and check for errors
+		curl -s "https://aadinternals.azurewebsites.net/api/tenantinfo?domainName=${domain}" \
+			-H "Origin: https://aadinternals.com" \
+			-H "Referer: https://aadinternals.com/" \
+			-H "User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" |
+			jq -r '.domains[].name' >"osint/azure_tenant_domains.txt"
+
+		end_func "Results are saved in ${domain}/osint/domain_info_[general/azure_tenant_domains].txt" "${FUNCNAME[0]}"
 
-		end_func "Results are saved in $domain/osint/domain_info_[general/name/email/ip].txt" ${FUNCNAME[0]}
 	else
 		if [[ $DOMAIN_INFO == false ]] || [[ $OSINT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+			printf "\n%b[%s] %s skipped due to mode or configuration settings.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 			return
 		else
-			if [[ $DOMAIN_INFO == false ]] || [[ $OSINT == false ]]; then
-				printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-			else
-				printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
-			fi
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
-
 }
 
 function third_party_misconfigs() {
-
 	mkdir -p osint
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $THIRD_PARTIES == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-		start_func ${FUNCNAME[0]} "Searching for third parties misconfigurations"
-		company_name=$(echo $domain | unfurl format %r)
 
-		pushd "${tools}/misconfig-mapper" >/dev/null || {
-			echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
-		./misconfig-mapper -target $company_name -service "*" | grep -v "\[-\]" >${dir}/osint/3rdparts_misconfigurations.txt
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } &&
+		[[ $THIRD_PARTIES == true ]] && [[ $OSINT == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 
-		popd >/dev/null || {
-			echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
+		start_func "${FUNCNAME[0]}" "Searching for third parties misconfigurations"
 
-		end_func "Results are saved in $domain/osint/3rdparts_misconfigurations.txt" ${FUNCNAME[0]}
+		# Extract company name from domain
+		company_name=$(unfurl format %r <<<"$domain")
+
+		# Change directory to misconfig-mapper tool
+		if ! pushd "${tools}/misconfig-mapper" >/dev/null; then
+			printf "%b[!] Failed to change directory to %s in %s at line %s.%b\n" \
+				"$bred" "${tools}/misconfig-mapper" "${FUNCNAME[0]}" "$LINENO" "$reset"
+			return 1
+		fi
+
+		# Run misconfig-mapper and handle errors
+		./misconfig-mapper -target "$company_name" -service "*" | grep -v "\-\]" >"${dir}/osint/3rdparts_misconfigurations.txt"
+
+		# Return to the previous directory
+		if ! popd >/dev/null; then
+			printf "%b[!] Failed to return to previous directory in %s at line %s.%b\n" \
+				"$bred" "${FUNCNAME[0]}" "$LINENO" "$reset"
+			return 1
+		fi
+
+		end_func "Results are saved in $domain/osint/3rdparts_misconfigurations.txt" "${FUNCNAME[0]}"
 
 	else
 		if [[ $THIRD_PARTIES == false ]] || [[ $OSINT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+			printf "\n%b[%s] %s skipped due to mode or configuration settings.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 			return
 		else
-			if [[ $THIRD_PARTIES == false ]] || [[ $OSINT == false ]]; then
-				printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-			else
-				printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
-			fi
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
-
 }
 
 function spoof() {
-
 	mkdir -p osint
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SPOOF == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-		start_func ${FUNCNAME[0]} "Searching for spoofable domains"
 
-		pushd "${tools}/Spoofy" >/dev/null || {
-			echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
-		./spoofy.py -d $domain >${dir}/osint/spoof.txt
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } &&
+		[[ $SPOOF == true ]] && [[ $OSINT == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 
-		popd >/dev/null || {
-			echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
+		start_func "${FUNCNAME[0]}" "Searching for spoofable domains"
 
-		end_func "Results are saved in $domain/osint/spoof.txt" ${FUNCNAME[0]}
+		# Change directory to Spoofy tool
+		if ! pushd "${tools}/Spoofy" >/dev/null; then
+			printf "%b[!] Failed to change directory to %s in %s at line %s.%b\n" \
+				"$bred" "${tools}/Spoofy" "${FUNCNAME[0]}" "$LINENO" "$reset"
+			return 1
+		fi
+
+		# Run spoofy.py and handle errors
+		./spoofy.py -d "$domain" >"${dir}/osint/spoof.txt"
+
+		# Return to the previous directory
+		if ! popd >/dev/null; then
+			printf "%b[!] Failed to return to previous directory in %s at line %s.%b\n" \
+				"$bred" "${FUNCNAME[0]}" "$LINENO" "$reset"
+			return 1
+		fi
+
+		end_func "Results are saved in $domain/osint/spoof.txt" "${FUNCNAME[0]}"
 
 	else
 		if [[ $SPOOF == false ]] || [[ $OSINT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+			printf "\n%b[%s] %s skipped due to mode or configuration settings.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 			return
 		else
-			if [[ $SPOOF == false ]] || [[ $OSINT == false ]]; then
-				printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-			else
-				printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
-			fi
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
-
 }
 
 function ip_info() {
 
 	mkdir -p osint
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $IP_INFO == true ]] && [[ $OSINT == true ]] && [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-		start_func ${FUNCNAME[0]} "Searching ip info"
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } &&
+		[[ $IP_INFO == true ]] && [[ $OSINT == true ]] &&
+		[[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Searching IP info"
+
 		if [[ -n $WHOISXML_API ]]; then
-			curl "https://reverse-ip.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ip=${domain}" 2>/dev/null | jq -r '.result[].name' 2>>"$LOGFILE" | sed -e "s/$/ ${domain}/" | anew -q osint/ip_${domain}_relations.txt
-			curl "https://www.whoisxmlapi.com/whoisserver/WhoisService?apiKey=${WHOISXML_API}&domainName=${domain}&outputFormat=json&da=2&registryRawText=1&registrarRawText=1&ignoreRawTexts=1" 2>/dev/null | jq 2>>"$LOGFILE" | anew -q osint/ip_${domain}_whois.txt
-			curl "https://ip-geolocation.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ipAddress=${domain}" 2>/dev/null | jq -r '.ip,.location' 2>>"$LOGFILE" | anew -q osint/ip_${domain}_location.txt
-			end_func "Results are saved in $domain/osint/ip_[domain_relations|whois|location].txt" ${FUNCNAME[0]}
+
+			# Reverse IP lookup
+			curl -s "https://reverse-ip.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ip=${domain}" |
+				jq -r '.result[].name' 2>>"$LOGFILE" |
+				sed -e "s/$/ ${domain}/" |
+				anew -q "osint/ip_${domain}_relations.txt"
+
+			# WHOIS lookup
+			curl -s "https://www.whoisxmlapi.com/whoisserver/WhoisService?apiKey=${WHOISXML_API}&domainName=${domain}&outputFormat=json&da=2&registryRawText=1&registrarRawText=1&ignoreRawTexts=1" |
+				jq 2>>"$LOGFILE" |
+				anew -q "osint/ip_${domain}_whois.txt"
+
+			# IP Geolocation
+			curl -s "https://ip-geolocation.whoisxmlapi.com/api/v1?apiKey=${WHOISXML_API}&ipAddress=${domain}" |
+				jq -r '.ip,.location' 2>>"$LOGFILE" |
+				anew -q "osint/ip_${domain}_location.txt"
+
+			end_func "Results are saved in ${domain}/osint/ip_[domain_relations|whois|location].txt" "${FUNCNAME[0]}"
+
 		else
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] No WHOISXML_API var defined, skipping function ${reset}\n"
+			printf "\n%s[%s] WHOISXML_API variable is not defined. Skipping function.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 		fi
+
 	else
 		if [[ $IP_INFO == false ]] || [[ $OSINT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		elif [[ ! $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+			printf "\n%b[%s] %s skipped due to mode or configuration settings.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
+		elif ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 			return
 		else
-			if [[ $IP_INFO == false ]] || [[ $OSINT == false ]]; then
-				printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-			else
-				printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
-			fi
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -765,88 +719,170 @@ function ip_info() {
 
 function subdomains_full() {
 
-	mkdir -p {.tmp,webs,subdomains}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs subdomains; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
 	NUMOFLINES_subs="0"
 	NUMOFLINES_probed="0"
-	printf "${bgreen}#######################################################################\n\n"
-	! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && printf "${bblue}[$(date +'%Y-%m-%d %H:%M:%S')] Subdomain Enumeration $domain\n\n"
-	[[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && printf "${bblue}[$(date +'%Y-%m-%d %H:%M:%S')] Scanning IP $domain\n\n"
-	[ -s "subdomains/subdomains.txt" ] && cp subdomains/subdomains.txt .tmp/subdomains_old.txt
-	[ -s "webs/webs.txt" ] && cp webs/webs.txt .tmp/probed_old.txt
 
-	if ([[ ! -f "$called_fn_dir/.sub_active" ]] || [[ ! -f "$called_fn_dir/.sub_brute" ]] || [[ ! -f "$called_fn_dir/.sub_permut" ]] || [[ ! -f "$called_fn_dir/.sub_recursive_brute" ]]) || [[ $DIFF == true ]]; then
-		resolvers_update
+	printf "%b#######################################################################%b\n\n" "$bgreen" "$reset"
+
+	# Check if domain is an IP address
+	if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+		printf "%b[%s] Scanning IP %s%s\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$domain" "$reset"
+	else
+		printf "%b[%s] Subdomain Enumeration %s%b\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$domain" "$reset"
+	fi
+
+	# Backup existing subdomains and webs
+	if [[ -s "subdomains/subdomains.txt" ]]; then
+		if ! cp "subdomains/subdomains.txt" ".tmp/subdomains_old.txt"; then
+			printf "%b[!] Failed to backup subdomains.txt.%b\n" "$bred" "$reset"
+		fi
+	fi
+
+	if [[ -s "webs/webs.txt" ]]; then
+		if ! cp "webs/webs.txt" ".tmp/probed_old.txt"; then
+			printf "%b[!] Failed to backup webs.txt.%b\n" "$bred" "$reset"
+		fi
+	fi
+
+	# Update resolvers if necessary
+	if { [[ ! -f "$called_fn_dir/.sub_active" ]] || [[ ! -f "$called_fn_dir/.sub_brute" ]] || [[ ! -f "$called_fn_dir/.sub_permut" ]] || [[ ! -f "$called_fn_dir/.sub_recursive_brute" ]]; } || [[ $DIFF == true ]]; then
+		if ! resolvers_update; then
+			printf "%b[!] Failed to update resolvers.%b\n" "$bred" "$reset"
+			return 1
+		fi
 	fi
 
-	[ -s "${inScope_file}" ] && cat ${inScope_file} | anew -q subdomains/subdomains.txt
+	# Add in-scope subdomains
+	if [[ -s $inScope_file ]]; then
+		if ! cat "$inScope_file" | anew -q subdomains/subdomains.txt; then
+			printf "%b[!] Failed to update subdomains.txt with in-scope domains.%b\n" "$bred" "$reset"
+		fi
+	fi
 
-	if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]] && [[ $SUBDOMAINS_GENERAL == true ]]; then
+	# Subdomain enumeration
+	if [[ ! $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]] && [[ $SUBDOMAINS_GENERAL == true ]]; then
 		sub_passive
 		sub_crt
 		sub_active
+		sub_tls
 		sub_noerror
 		sub_brute
 		sub_permut
 		sub_regex_permut
-		#sub_gpt
+		# sub_gpt (commented out)
 		sub_recursive_passive
 		sub_recursive_brute
 		sub_dns
 		sub_scraping
 		sub_analytics
 	else
-		notification "IP/CIDR detected, subdomains search skipped" info
-		echo $domain | anew -q subdomains/subdomains.txt
+		notification "IP/CIDR detected, subdomains search skipped" "info"
+		if ! printf "%b\n" "$domain" | anew -q subdomains/subdomains.txt; then
+			printf "%b[!] Failed to add domain to subdomains.txt.%b\n" "$bred" "$reset"
+		fi
+	fi
+
+	# Web probing
+	if ! webprobe_simple; then
+		printf "%b[!] webprobe_simple function failed.%b\n" "$bred" "$reset"
+	fi
+
+	# Process subdomains
+	if [[ -s "subdomains/subdomains.txt" ]]; then
+		if [[ -s $outOfScope_file ]]; then
+			if ! deleteOutScoped "$outOfScope_file" "subdomains/subdomains.txt"; then
+				printf "%b[!] Failed to remove out-of-scope subdomains.%b\n" "$bred" "$reset"
+			fi
+		fi
+		if ! NUMOFLINES_subs=$(cat "subdomains/subdomains.txt" 2>>"$LOGFILE" | anew ".tmp/subdomains_old.txt" | sed '/^$/d' | wc -l); then
+			printf "%b[!] Failed to count new subdomains.%b\n" "$bred" "$reset"
+			NUMOFLINES_subs="0"
+		fi
+	fi
+
+	# Process webs
+	if [[ -s "webs/webs.txt" ]]; then
+		if [[ -s $outOfScope_file ]]; then
+			if ! deleteOutScoped "$outOfScope_file" "webs/webs.txt"; then
+				printf "%b[!] Failed to remove out-of-scope webs.%b\n" "$bred" "$reset"
+			fi
+		fi
+		if ! NUMOFLINES_probed=$(cat "webs/webs.txt" 2>>"$LOGFILE" | anew ".tmp/probed_old.txt" | sed '/^$/d' | wc -l); then
+			printf "%b[!] Failed to count new probed webs.%b\n" "$bred" "$reset"
+			NUMOFLINES_probed="0"
+		fi
 	fi
 
-	webprobe_simple
+	# Display results
+	printf "%b\n[%s] Total subdomains:%s\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+	notification "- ${NUMOFLINES_subs} alive" "good"
+
 	if [[ -s "subdomains/subdomains.txt" ]]; then
-		[ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file subdomains/subdomains.txt
-		NUMOFLINES_subs=$(cat subdomains/subdomains.txt 2>>"$LOGFILE" | anew .tmp/subdomains_old.txt | sed '/^$/d' | wc -l)
+		if ! sort "subdomains/subdomains.txt"; then
+			printf "%b[!] Failed to sort subdomains.txt.%b\n" "$bred" "$reset"
+		fi
 	fi
+
+	notification "- ${NUMOFLINES_probed} new web probed" "good"
+
 	if [[ -s "webs/webs.txt" ]]; then
-		[ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file webs/webs.txt
-		NUMOFLINES_probed=$(cat webs/webs.txt 2>>"$LOGFILE" | anew .tmp/probed_old.txt | sed '/^$/d' | wc -l)
+		if ! sort "webs/webs.txt"; then
+			printf "%b[!] Failed to sort webs.txt.%b\n" "$bred" "$reset"
+		fi
 	fi
-	printf "${bblue}\n[$(date +'%Y-%m-%d %H:%M:%S')] Total subdomains: ${reset}\n\n"
-	notification "- ${NUMOFLINES_subs} alive" good
-	[ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | sort
-	notification "- ${NUMOFLINES_probed} new web probed" good
-	[ -s "webs/webs.txt" ] && cat webs/webs.txt | sort
-	notification "Subdomain Enumeration Finished" good
-	printf "${bblue}[$(date +'%Y-%m-%d %H:%M:%S')] Results are saved in $domain/subdomains/subdomains.txt and webs/webs.txt${reset}\n"
-	printf "${bgreen}#######################################################################\n\n"
+
+	notification "Subdomain Enumeration Finished" "good"
+	printf "%b[%s] Results are saved in %s/subdomains/subdomains.txt and webs/webs.txt%s\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$domain" "$reset"
+	printf "%b#######################################################################%b\n\n" "$bgreen" "$reset"
+
 }
 
 function sub_passive() {
 
 	mkdir -p .tmp
+
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBPASSIVE == true ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Passive Subdomain Enumeration"
-		subfinder -all -d "$domain" -max-time ${SUBFINDER_ENUM_TIMEOUT} -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null
+		start_subfunc "${FUNCNAME[0]}" "Running: Passive Subdomain Enumeration"
 
-		if [[ -s ${GITHUB_TOKENS} ]]; then
+		# Run subfinder and check for errors
+		subfinder -all -d "$domain" -max-time "$SUBFINDER_ENUM_TIMEOUT" -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null
+
+		# Run github-subdomains if GITHUB_TOKENS is set and file is not empty
+		if [[ -s $GITHUB_TOKENS ]]; then
 			if [[ $DEEP == true ]]; then
-				github-subdomains -d $domain -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null
+				github-subdomains -d "$domain" -t "$GITHUB_TOKENS" -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null
 			else
-				github-subdomains -d $domain -k -q -t $GITHUB_TOKENS -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null
+				github-subdomains -d "$domain" -k -q -t "$GITHUB_TOKENS" -o .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null
 			fi
 		fi
-		if [[ -s ${GITLAB_TOKENS} ]]; then
+
+		# Run gitlab-subdomains if GITLAB_TOKENS is set and file is not empty
+		if [[ -s $GITLAB_TOKENS ]]; then
 			gitlab-subdomains -d "$domain" -t "$GITLAB_TOKENS" 2>>"$LOGFILE" | tee .tmp/gitlab_subdomains_psub.txt >/dev/null
 		fi
+
+		# Check if INSCOPE is true and run check_inscope
 		if [[ $INSCOPE == true ]]; then
 			check_inscope .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null
 			check_inscope .tmp/github_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null
 			check_inscope .tmp/gitlab_subdomains_psub.txt 2>>"$LOGFILE" >/dev/null
 		fi
-		NUMOFLINES=$(find .tmp -type f -iname "*_psub.txt" -exec cat {} + | sed "s/*.//" | anew .tmp/passive_subs.txt | sed '/^$/d' | wc -l)
-		end_subfunc "${NUMOFLINES} new subs (passive)" ${FUNCNAME[0]}
+
+		# Combine results and count new lines
+		NUMOFLINES=$(find .tmp -type f -iname "*_psub.txt" -exec cat {} + | sed "s/^\*\.//" | anew .tmp/passive_subs.txt | sed '/^$/d' | wc -l)
+		end_subfunc "${NUMOFLINES} new subs (passive)" "${FUNCNAME[0]}"
+
 	else
 		if [[ $SUBPASSIVE == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or configuration settings.%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -854,140 +890,466 @@ function sub_passive() {
 
 function sub_crt() {
 
-	mkdir -p {.tmp,subdomains}
+	mkdir -p .tmp subdomains
+
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBCRT == true ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Crtsh Subdomain Enumeration"
-		crt -s -json -l ${CTR_LIMIT} $domain 2>>"$LOGFILE" | jq -r '.[].subdomain' 2>>"$LOGFILE" | sed -e 's/^\*\.//' | anew -q .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null
-		[[ $INSCOPE == true ]] && check_inscope .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null
-		NUMOFLINES=$(cat .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" | sed 's/\*.//g' | anew .tmp/crtsh_subs.txt | sed '/^$/d' | wc -l)
-		end_subfunc "${NUMOFLINES} new subs (cert transparency)" ${FUNCNAME[0]}
+		start_subfunc "${FUNCNAME[0]}" "Running: Crtsh Subdomain Enumeration"
+
+		# Run crt command and check for errors
+		crt -s -json -l "${CTR_LIMIT}" "$domain" 2>>"$LOGFILE" |
+			jq -r '.[].subdomain' 2>>"$LOGFILE" |
+			sed -e 's/^\*\.//' >.tmp/crtsh_subdomains.txt
+
+		# Use anew to get new subdomains
+		cat .tmp/crtsh_subdomains.txt | anew -q .tmp/crtsh_subs_tmp.txt
+
+		# If INSCOPE is true, check inscope
+		if [[ $INSCOPE == true ]]; then
+			if ! check_inscope .tmp/crtsh_subs_tmp.txt 2>>"$LOGFILE" >/dev/null; then
+				printf "%b[!] check_inscope command failed.%b\n" "$bred" "$reset"
+				return 1
+			fi
+		fi
+
+		# Process subdomains and append new ones to .tmp/crtsh_subs.txt, count new lines
+		NUMOFLINES=$(sed 's/^\*\.//' .tmp/crtsh_subs_tmp.txt | sed '/^$/d' | anew .tmp/crtsh_subs.txt | wc -l)
+
+		end_subfunc "${NUMOFLINES} new subs (cert transparency)" "${FUNCNAME[0]}"
 	else
 		if [[ $SUBCRT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
-
 }
 
 function sub_active() {
 
-	mkdir -p {.tmp,subdomains}
+	mkdir -p .tmp subdomains
+
 	if [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Active Subdomain Enumeration"
-		find .tmp -type f -iname "*_subs.txt" -exec cat {} + | anew -q .tmp/subs_no_resolved.txt
-		[ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/subs_no_resolved.txt
+		start_subfunc "${FUNCNAME[0]}" "Running: Active Subdomain Enumeration"
+
+		# Combine subdomain files into subs_no_resolved.txt
+		if ! find .tmp -type f -iname "*_subs.txt" -exec cat {} + | anew -q .tmp/subs_no_resolved.txt; then
+			printf "%b[!] Failed to collect subdomains into subs_no_resolved.txt.%b\n" "$bred" "$reset"
+			return 1
+		fi
+
+		# Delete out-of-scope domains if outOfScope_file exists
+		if [[ -s $outOfScope_file ]]; then
+			if ! deleteOutScoped "$outOfScope_file" .tmp/subs_no_resolved.txt; then
+				printf "%b[!] deleteOutScoped command failed.%b\n" "$bred" "$reset"
+				return 1
+			fi
+		fi
+
 		if [[ $AXIOM != true ]]; then
-			resolvers_update_quick_local
-			[ -s ".tmp/subs_no_resolved.txt" ] && puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+			# Update resolvers locally
+			if ! resolvers_update_quick_local; then
+				printf "%b[!] resolvers_update_quick_local command failed.%b\n" "$bred" "$reset"
+				return 1
+			fi
+
+			# Resolve subdomains using puredns
+			if [[ -s ".tmp/subs_no_resolved.txt" ]]; then
+				puredns resolve .tmp/subs_no_resolved.txt -w .tmp/subdomains_tmp.txt \
+					-r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" \
+					--wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					2>>"$LOGFILE" >/dev/null
+			fi
 		else
-			resolvers_update_quick_axiom
-			[ -s ".tmp/subs_no_resolved.txt" ] && axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			# Update resolvers using axiom
+			if ! resolvers_update_quick_axiom; then
+				printf "%b[!] resolvers_update_quick_axiom command failed.%b\n" "$bred" "$reset"
+				return 1
+			fi
+
+			# Resolve subdomains using axiom-scan
+			if [[ -s ".tmp/subs_no_resolved.txt" ]]; then
+				axiom-scan .tmp/subs_no_resolved.txt -m puredns-resolve \
+					-r /home/op/lists/resolvers.txt \
+					--resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" \
+					--wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					-o .tmp/subdomains_tmp.txt $AXIOM_EXTRA_ARGS \
+					2>>"$LOGFILE" >/dev/null
+			fi
+		fi
+
+		# Add the domain itself to the list if it resolves
+		echo "$domain" | dnsx -retry 3 -silent -r "$resolvers_trusted" \
+			2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt
+
+		# If INSCOPE is true, check inscope
+		if [[ $INSCOPE == true ]]; then
+			if ! check_inscope .tmp/subdomains_tmp.txt 2>>"$LOGFILE" >/dev/null; then
+				printf "%b[!] check_inscope command failed.%b\n" "$bred" "$reset"
+				return 1
+			fi
 		fi
-		echo $domain | dnsx -retry 3 -silent -r $resolvers_trusted 2>>"$LOGFILE" | anew -q .tmp/subdomains_tmp.txt
+
+		# Process subdomains and append new ones to subdomains.txt, count new lines
+		if ! NUMOFLINES=$(grep "\.$domain$\|^$domain$" .tmp/subdomains_tmp.txt 2>>"$LOGFILE" |
+			grep -E '^([a-zA-Z0-9\.\-]+\.)+[a-zA-Z]{1,}$' |
+			anew subdomains/subdomains.txt | sed '/^$/d' | wc -l); then
+			printf "%b[!] Failed to process subdomains.%b\n" "$bred" "$reset"
+			return 1
+		fi
+
+		end_subfunc "${NUMOFLINES} subs DNS resolved from passive" "${FUNCNAME[0]}"
+	else
+		printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+			"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" \
+			"$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
+	fi
+}
+
+function sub_tls() {
+	mkdir -p .tmp subdomains
+
+	if [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; then
+		start_subfunc "${FUNCNAME[0]}" "Running: TLS Active Subdomain Enumeration"
+
 		if [[ $DEEP == true ]]; then
-			cat .tmp/subdomains_tmp.txt | tlsx -san -cn -silent -ro -c $TLSX_THREADS -p $TLS_PORTS | anew -q .tmp/subdomains_tmp.txt
+			if [[ $AXIOM != true ]]; then
+				tlsx -san -cn -silent -ro -c "$TLSX_THREADS" \
+					-p "$TLS_PORTS" -q .tmp/subdomains_tlsx.txt <subdomains/subdomains.txt \
+					2>>"$LOGFILE" >/dev/null
+			else
+				axiom-scan subdomains/subdomains.txt -m tlsx \
+					-san -cn -silent -ro -c "$TLSX_THREADS" -p "$TLS_PORTS" \
+					-o .tmp/subdomains_tlsx.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			fi
+		else
+			if [[ $AXIOM != true ]]; then
+				tlsx -san -cn -silent -ro -c "$TLSX_THREADS" <subdomains/subdomains.txt >.tmp/subdomains_tlsx.txt 2>>"$LOGFILE"
+			else
+				axiom-scan subdomains/subdomains.txt -m tlsx \
+					-san -cn -silent -ro -c "$TLSX_THREADS" \
+					-o .tmp/subdomains_tlsx.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			fi
+		fi
+
+		if [[ -s ".tmp/subdomains_tlsx.txt" ]]; then
+			grep "\.$domain$\|^$domain$" .tmp/subdomains_tlsx.txt |
+				grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' |
+				sed "s/|__ //" | anew -q .tmp/subdomains_tlsx_clean.txt
+		else
+			printf "%b[!] No subdomains found in tlsx output.%b\n" "$yellow" "$reset"
+			return 0
+		fi
+
+		if [[ $AXIOM != true ]]; then
+			if ! resolvers_update_quick_local; then
+				printf "%b[!] resolvers_update_quick_local command failed.%b\n" "$bred" "$reset"
+				return 1
+			fi
+			if [[ -s ".tmp/subdomains_tlsx_clean.txt" ]]; then
+				puredns resolve .tmp/subdomains_tlsx_clean.txt -w .tmp/subdomains_tlsx_resolved.txt \
+					-r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					2>>"$LOGFILE" >/dev/null
+			else
+				printf "%b[!] No subdomains to resolve.%b\n" "$yellow" "$reset"
+				return 0
+			fi
 		else
-			cat .tmp/subdomains_tmp.txt | tlsx -san -cn -silent -ro -c $TLSX_THREADS | anew -q .tmp/subdomains_tmp.txt
+			if ! resolvers_update_quick_axiom; then
+				printf "%b[!] resolvers_update_quick_axiom command failed.%b\n" "$bred" "$reset"
+				return 1
+			fi
+			if [[ -s ".tmp/subdomains_tlsx_clean.txt" ]]; then
+				axiom-scan .tmp/subdomains_tlsx_clean.txt -m puredns-resolve \
+					-r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					-o .tmp/subdomains_tlsx_resolved.txt $AXIOM_EXTRA_ARGS \
+					2>>"$LOGFILE" >/dev/null
+			else
+				printf "%b[!] No subdomains to resolve.%b\n" "$yellow" "$reset"
+				return 0
+			fi
+		fi
+
+		if [[ $INSCOPE == true ]]; then
+			if ! check_inscope .tmp/subdomains_tlsx_resolved.txt 2>>"$LOGFILE" >/dev/null; then
+				printf "%b[!] check_inscope command failed.%b\n" "$bred" "$reset"
+				return 1
+			fi
+		fi
+
+		if ! NUMOFLINES=$(anew subdomains/subdomains.txt <.tmp/subdomains_tlsx_resolved.txt | sed '/^$/d' | wc -l); then
+			printf "%b[!] Counting new subdomains failed.%b\n" "$bred" "$reset"
+			return 1
 		fi
-		[[ $INSCOPE == true ]] && check_inscope .tmp/subdomains_tmp.txt 2>>"$LOGFILE" >/dev/null
-		NUMOFLINES=$(cat .tmp/subdomains_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
-		end_subfunc "${NUMOFLINES} subs DNS resolved from passive" ${FUNCNAME[0]}
+
+		end_subfunc "${NUMOFLINES} new subs (tls active enum)" "${FUNCNAME[0]}"
 	else
-		printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+		printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+			"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" \
+			"$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 	fi
-
 }
 
 function sub_noerror() {
 
-	mkdir -p {.tmp,subdomains}
+	mkdir -p .tmp subdomains
+
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBNOERROR == true ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Checking NOERROR DNS response"
-		if [[ $(echo "${RANDOM}thistotallynotexist${RANDOM}.$domain" | dnsx -r $resolvers -rcode noerror,nxdomain -retry 3 -silent | cut -d' ' -f2) == "[NXDOMAIN]" ]]; then
-			resolvers_update_quick_local
+		start_subfunc "${FUNCNAME[0]}" "Running: Checking NOERROR DNS response"
+
+		# Check for DNSSEC black lies
+		random_subdomain="${RANDOM}thistotallynotexist${RANDOM}.$domain"
+		dns_response=$(echo "$random_subdomain" | dnsx -r "$resolvers" -rcode noerror,nxdomain -retry 3 -silent | cut -d' ' -f2)
+
+		if [[ $dns_response == "[NXDOMAIN]" ]]; then
+			if ! resolvers_update_quick_local; then
+				printf "%b[!] Failed to update resolvers.%b\n" "$bred" "$reset"
+				return 1
+			fi
+
+			# Determine wordlist based on DEEP setting
 			if [[ $DEEP == true ]]; then
-				dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist_big | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null
+				wordlist="$subs_wordlist_big"
 			else
-				dnsx -d $domain -r $resolvers -silent -rcode noerror -w $subs_wordlist | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null
+				wordlist="$subs_wordlist"
+			fi
+
+			# Run dnsx and check for errors
+			dnsx -d "$domain" -r "$resolvers" -silent \
+				-rcode noerror -w "$wordlist" \
+				2>>"$LOGFILE" | cut -d' ' -f1 | anew -q .tmp/subs_noerror.txt >/dev/null
+
+			# Check inscope if INSCOPE is true
+			if [[ $INSCOPE == true ]]; then
+				if ! check_inscope .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null; then
+					printf "%b[!] check_inscope command failed.%b\n" "$bred" "$reset"
+					return 1
+				fi
+			fi
+
+			# Process subdomains and append new ones to subdomains.txt, count new lines
+			if ! NUMOFLINES=$(grep "\.$domain$\|^$domain$" .tmp/subs_noerror.txt 2>>"$LOGFILE" |
+				grep -E '^([a-zA-Z0-9\.\-]+\.)+[a-zA-Z]{1,}$' |
+				sed 's/^\*\.//' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l); then
+				printf "%b[!] Failed to process subdomains.%b\n" "$bred" "$reset"
+				return 1
 			fi
-			[[ $INSCOPE == true ]] && check_inscope .tmp/subs_noerror.txt 2>>"$LOGFILE" >/dev/null
-			NUMOFLINES=$(cat .tmp/subs_noerror.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
-			end_subfunc "${NUMOFLINES} new subs (DNS noerror)" ${FUNCNAME[0]}
+
+			end_subfunc "${NUMOFLINES} new subs (DNS noerror)" "${FUNCNAME[0]}"
+
 		else
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Detected DNSSEC black lies, skipping this technique ${reset}\n"
+			printf "\n%s[%s] Detected DNSSEC black lies, skipping this technique.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 		fi
+
 	else
 		if [[ $SUBNOERROR == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" \
+				"$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
 }
 
 function sub_dns() {
+	mkdir -p .tmp subdomains
 
-	mkdir -p {.tmp,subdomains}
 	if [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : DNS Subdomain Enumeration and PTR search"
+		start_subfunc "${FUNCNAME[0]}" "Running: DNS Subdomain Enumeration and PTR search"
+
 		if [[ $AXIOM != true ]]; then
-			[ -s "subdomains/subdomains.txt" ] && cat subdomains/subdomains.txt | dnsx -r $resolvers_trusted -a -aaaa -cname -ns -ptr -mx -soa -silent -retry 3 -json -o subdomains/subdomains_dnsregs.json 2>>"$LOGFILE" >/dev/null
-			[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' 2>/dev/null | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subdomains_dns.txt
-			[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | hakip2host | cut -d' ' -f 3 | unfurl -u domains | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subdomains_dns.txt
-			[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try "\(.host) - \(.a[])"' 2>/dev/null | sort -u -k2 | anew -q subdomains/subdomains_ips.txt
-			resolvers_update_quick_local
-			[ -s ".tmp/subdomains_dns.txt" ] && puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+			if [[ -s "subdomains/subdomains.txt" ]]; then
+				dnsx -r "$resolvers_trusted" -a -aaaa -cname -ns -ptr -mx -soa -silent -retry 3 -json \
+					-o "subdomains/subdomains_dnsregs.json" <"subdomains/subdomains.txt" 2>>"$LOGFILE" >/dev/null
+			fi
+
+			if [[ -s "subdomains/subdomains_dnsregs.json" ]]; then
+				# Extract various DNS records and process them
+				jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' \
+					<"subdomains/subdomains_dnsregs.json" 2>/dev/null |
+					grep "\.$domain$" |
+					grep -E '^([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}$' |
+					anew -q .tmp/subdomains_dns.txt
+
+				jq -r 'try .a[]' <"subdomains/subdomains_dnsregs.json" | sort -u |
+					hakip2host | awk '{print $3}' | unfurl -u domains |
+					sed -e 's/^\*\.//' -e 's/\.$//' -e '/\./!d' |
+					grep "\.$domain$" |
+					grep -E '^([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}$' |
+					anew -q .tmp/subdomains_dns.txt
+
+				jq -r 'try "\(.host) - \(.a[])"' <"subdomains/subdomains_dnsregs.json" 2>/dev/null |
+					sort -u -k2 | anew -q "subdomains/subdomains_ips.txt"
+			fi
+
+			if ! resolvers_update_quick_local; then
+				printf "%b[!] Failed to update resolvers.%b\n" "$bred" "$reset"
+			fi
+
+			if [[ -s ".tmp/subdomains_dns.txt" ]]; then
+				puredns resolve .tmp/subdomains_dns.txt -w .tmp/subdomains_dns_resolved.txt \
+					-r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					2>>"$LOGFILE" >/dev/null
+			fi
 		else
-			[ -s "subdomains/subdomains.txt" ] && axiom-scan subdomains/subdomains.txt -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -json -o subdomains/subdomains_dnsregs.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-			[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | anew -q .tmp/subdomains_dns_a_records.txt
-			[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[]' | sort -u | hakip2host | cut -d' ' -f 3 | unfurl -u domains | sed -e 's/*\.//' -e 's/\.$//' -e '/\./!d' | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subdomains_dns.txt
-			[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' 2>/dev/null | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subdomains_dns.txt
-			[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try "\(.host) - \(.a[])"' 2>/dev/null | sort -u -k2 | anew -q subdomains/subdomains_ips.txt
-			resolvers_update_quick_axiom
-			[ -s ".tmp/subdomains_dns.txt" ] && axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subdomains_dns_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			if [[ -s "subdomains/subdomains.txt" ]]; then
+				axiom-scan "subdomains/subdomains.txt" -m dnsx -retry 3 -a -aaaa -cname -ns -ptr -mx -soa -json \
+					-o "subdomains/subdomains_dnsregs.json" "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+			fi
+
+			if [[ -s "subdomains/subdomains_dnsregs.json" ]]; then
+				jq -r 'try .a[]' <"subdomains/subdomains_dnsregs.json" | sort -u |
+					anew -q .tmp/subdomains_dns_a_records.txt
+
+				jq -r 'try .a[]' <"subdomains/subdomains_dnsregs.json" | sort -u |
+					hakip2host | awk '{print $3}' | unfurl -u domains |
+					sed -e 's/^\*\.//' -e 's/\.$//' -e '/\./!d' |
+					grep "\.$domain$" |
+					grep -E '^([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}$' |
+					anew -q .tmp/subdomains_dns.txt
+
+				jq -r 'try .a[], try .aaaa[], try .cname[], try .ns[], try .ptr[], try .mx[], try .soa[]' \
+					<"subdomains/subdomains_dnsregs.json" 2>/dev/null |
+					grep "\.$domain$" |
+					grep -E '^([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}$' |
+					anew -q .tmp/subdomains_dns.txt
+
+				jq -r 'try "\(.host) - \(.a[])"' <"subdomains/subdomains_dnsregs.json" 2>/dev/null |
+					sort -u -k2 | anew -q "subdomains/subdomains_ips.txt"
+			fi
+
+			if ! resolvers_update_quick_axiom; then
+				printf "%b[!] Failed to update resolvers.%b\n" "$bred" "$reset"
+			fi
+
+			if [[ -s ".tmp/subdomains_dns.txt" ]]; then
+				axiom-scan .tmp/subdomains_dns.txt -m puredns-resolve \
+					-r "/home/op/lists/resolvers.txt" --resolvers-trusted "/home/op/lists/resolvers_trusted.txt" \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					-o .tmp/subdomains_dns_resolved.txt "$AXIOM_EXTRA_ARGS" \
+					2>>"$LOGFILE" >/dev/null
+			fi
+		fi
+
+		if [[ $INSCOPE == true ]]; then
+			if ! check_inscope .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" >/dev/null; then
+				printf "%b[!] check_inscope command failed.%b\n" "$bred" "$reset"
+			fi
+		fi
+
+		if ! NUMOFLINES=$(grep "\.$domain$\|^$domain$" .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" |
+			grep -E '^([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}$' |
+			anew subdomains/subdomains.txt | sed '/^$/d' | wc -l); then
+			printf "%b[!] Failed to count new subdomains.%b\n" "$bred" "$reset"
+			return 1
 		fi
-		[[ $INSCOPE == true ]] && check_inscope .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" >/dev/null
-		NUMOFLINES=$(cat .tmp/subdomains_dns_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
-		end_subfunc "${NUMOFLINES} new subs (dns resolution)" ${FUNCNAME[0]}
+
+		end_subfunc "${NUMOFLINES} new subs (dns resolution)" "${FUNCNAME[0]}"
 	else
-		printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+		printf "\n%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+			"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 	fi
-
 }
 
 function sub_brute() {
 
-	mkdir -p {.tmp,subdomains}
+	mkdir -p .tmp subdomains
+
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBBRUTE == true ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Bruteforce Subdomain Enumeration"
+		start_subfunc "${FUNCNAME[0]}" "Running: Bruteforce Subdomain Enumeration"
+
 		if [[ $AXIOM != true ]]; then
-			resolvers_update_quick_local
-			if [[ $DEEP == true ]]; then
-				puredns bruteforce $subs_wordlist_big $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+			if ! resolvers_update_quick_local; then
+				printf "%b[!] Failed to update resolvers.%b\n" "$bred" "$reset"
+				return 1
+			fi
+
+			wordlist="$subs_wordlist"
+			[[ $DEEP == true ]] && wordlist="$subs_wordlist_big"
+
+			# Run puredns bruteforce
+			puredns bruteforce "$wordlist" "$domain" -w .tmp/subs_brute.txt -r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+				-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+				--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+				2>>"$LOGFILE" >/dev/null
+
+			# Resolve the subdomains
+			if [[ -s ".tmp/subs_brute.txt" ]]; then
+				puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					2>>"$LOGFILE" >/dev/null
 			else
-				puredns bruteforce $subs_wordlist $domain -w .tmp/subs_brute.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+				printf "%b[!] No subdomains found during bruteforce.%b\n" "$yellow" "$reset"
+				return 0
 			fi
-			[ -s ".tmp/subs_brute.txt" ] && puredns resolve .tmp/subs_brute.txt -w .tmp/subs_brute_valid.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+
 		else
-			resolvers_update_quick_axiom
-			if [[ $DEEP == true ]]; then
-				axiom-scan $subs_wordlist_big -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			if ! resolvers_update_quick_axiom; then
+				printf "%b[!] Failed to update resolvers on axiom.%b\n" "$bred" "$reset"
+				return 1
+			fi
+
+			wordlist="$subs_wordlist"
+			[[ $DEEP == true ]] && wordlist="$subs_wordlist_big"
+
+			# Run axiom-scan with puredns-single
+			axiom-scan "$wordlist" -m puredns-single "$domain" -r /home/op/lists/resolvers.txt \
+				--resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+				--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+				-o .tmp/subs_brute.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+
+			# Resolve the subdomains using axiom-scan
+			if [[ -s ".tmp/subs_brute.txt" ]]; then
+				axiom-scan .tmp/subs_brute.txt -m puredns-resolve -r /home/op/lists/resolvers.txt \
+					--resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					-o .tmp/subs_brute_valid.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
 			else
-				axiom-scan $subs_wordlist -m puredns-single $domain -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				printf "%b[!] No subdomains found during bruteforce.%b\n" "$yellow" "$reset"
+				return 0
+			fi
+		fi
+
+		# Check inscope if INSCOPE is true
+		if [[ $INSCOPE == true ]]; then
+			if ! check_inscope .tmp/subs_brute_valid.txt 2>>"$LOGFILE" >/dev/null; then
+				printf "%b[!] check_inscope command failed.%b\n" "$bred" "$reset"
+				return 1
 			fi
-			[ -s ".tmp/subs_brute.txt" ] && axiom-scan .tmp/subs_brute.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/subs_brute_valid.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
 		fi
-		[[ $INSCOPE == true ]] && check_inscope .tmp/subs_brute_valid.txt 2>>"$LOGFILE" >/dev/null
-		NUMOFLINES=$(cat .tmp/subs_brute_valid.txt 2>>"$LOGFILE" | sed "s/*.//" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
-		end_subfunc "${NUMOFLINES} new subs (bruteforce)" ${FUNCNAME[0]}
+
+		# Process subdomains and append new ones to subdomains.txt, count new lines
+		if ! NUMOFLINES=$(grep "\.$domain$\|^$domain$" .tmp/subs_brute_valid.txt 2>>"$LOGFILE" |
+			grep -E '^([a-zA-Z0-9\.\-]+\.)+[a-zA-Z]{1,}$' |
+			sed 's/^\*\.//' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l); then
+			printf "%b[!] Failed to process subdomains.%b\n" "$bred" "$reset"
+			return 1
+		fi
+
+		end_subfunc "${NUMOFLINES} new subs (bruteforce)" "${FUNCNAME[0]}"
+
 	else
 		if [[ $SUBBRUTE == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" \
+				"$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -995,59 +1357,214 @@ function sub_brute() {
 
 function sub_scraping() {
 
-	mkdir -p {.tmp,subdomains}
+	# Create necessary directories
+	if ! mkdir -p .tmp subdomains; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBSCRAPING == true ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Source code scraping subdomain search"
-		touch .tmp/scrap_subs.txt
-		[[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt"
+		start_subfunc "${FUNCNAME[0]}" "Running: Source code scraping subdomain search"
+
+		# Initialize scrap_subs.txt
+		if ! touch .tmp/scrap_subs.txt; then
+			printf "%b[!] Failed to create .tmp/scrap_subs.txt.%b\n" "$bred" "$reset"
+			return 1
+		fi
+
+		# If in multi mode and subdomains.txt doesn't exist, create it
+		if [[ -n $multi ]] && [[ ! -f "$dir/subdomains/subdomains.txt" ]]; then
+			if ! printf "%b\n" "$domain" >"$dir/subdomains/subdomains.txt"; then
+				printf "%b[!] Failed to create subdomains.txt.%b\n" "$bred" "$reset"
+				return 1
+			fi
+		fi
+
+		# Check if subdomains.txt exists and is not empty
 		if [[ -s "$dir/subdomains/subdomains.txt" ]]; then
-			if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]] || [[ $DEEP == true ]]; then
+
+			subdomains_count=$(wc -l <"$dir/subdomains/subdomains.txt")
+			if [[ $subdomains_count -le $DEEP_LIMIT ]] || [[ $DEEP == true ]]; then
+
 				if [[ $AXIOM != true ]]; then
-					resolvers_update_quick_local
-					cat subdomains/subdomains.txt | httpx -follow-host-redirects -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt 2>>"$LOGFILE" >/dev/null
-					[ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
-					[ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m httpx -l .tmp/probed_tmp_scrap.txt -tls-grab -tls-probe -csp-probe -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color -json -o .tmp/web_full_info2.txt 2>>"$LOGFILE" >/dev/null || (true && echo "Httpx TLS & CSP grab timeout reached")
-					[ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
+					if ! resolvers_update_quick_local; then
+						printf "%b[!] Failed to update resolvers locally.%b\n" "$bred" "$reset"
+						return 1
+					fi
 
-					if [[ $DEEP == true ]]; then
-						[ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
-					else
-						[ -s ".tmp/probed_tmp_scrap.txt" ] && katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
+					# Run httpx to gather web info
+					httpx -follow-host-redirects -status-code -threads "$HTTPX_THREADS" -rl "$HTTPX_RATELIMIT" \
+						-timeout "$HTTPX_TIMEOUT" -silent -retries 2 -title -web-server -tech-detect -location \
+						-no-color -json -o .tmp/web_full_info1.txt \
+						<subdomains/subdomains.txt 2>>"$LOGFILE" >/dev/null
+
+					if [[ -s ".tmp/web_full_info1.txt" ]]; then
+						cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null |
+							grep "$domain" |
+							grep -E '^((http|https):\/\/)?([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}(\/.*)?$' |
+							sed "s/^\*\.//" |
+							anew .tmp/probed_tmp_scrap.txt |
+							unfurl -u domains 2>>"$LOGFILE" |
+							anew -q .tmp/scrap_subs.txt
+					fi
+
+					if [[ -s ".tmp/probed_tmp_scrap.txt" ]]; then
+						timeout -k 1m 10m httpx -l .tmp/probed_tmp_scrap.txt -tls-grab -tls-probe -csp-probe \
+							-status-code -threads "$HTTPX_THREADS" -rl "$HTTPX_RATELIMIT" -timeout "$HTTPX_TIMEOUT" \
+							-silent -retries 2 -no-color -json -o .tmp/web_full_info2.txt \
+							2>>"$LOGFILE" >/dev/null
+					fi
+
+					if [[ -s ".tmp/web_full_info2.txt" ]]; then
+						cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[], try .csp.domains[], try .url' 2>/dev/null |
+							grep "$domain" |
+							grep -E '^((http|https):\/\/)?([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}(\/.*)?$' |
+							sed "s/^\*\.//" |
+							sort -u |
+							httpx -silent |
+							anew .tmp/probed_tmp_scrap.txt |
+							unfurl -u domains 2>>"$LOGFILE" |
+							anew -q .tmp/scrap_subs.txt
+					fi
+
+					if [[ -s ".tmp/probed_tmp_scrap.txt" ]]; then
+						if [[ $DEEP == true ]]; then
+							katana_depth=3
+						else
+							katana_depth=2
+						fi
+
+						katana -silent -list .tmp/probed_tmp_scrap.txt -jc -kf all -c "$KATANA_THREADS" -d "$katana_depth" \
+							-fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
 					fi
+
 				else
-					resolvers_update_quick_axiom
-					axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-					[ -s ".tmp/web_full_info1.txt" ] && cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
-					[ -s ".tmp/probed_tmp_scrap.txt" ] && timeout -k 1m 10m axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null || (true && echo "Httpx  timeout reached")
-					[ -s ".tmp/web_full_info2.txt" ] && cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[],try .csp.domains[],try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | sort -u | httpx -silent | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
-					if [[ $DEEP == true ]]; then
-						[ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 3 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-					else
-						[ -s ".tmp/probed_tmp_scrap.txt" ] && axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d 2 -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+					# AXIOM mode
+					if ! resolvers_update_quick_axiom; then
+						printf "%b[!] Failed to update resolvers on axiom.%b\n" "$bred" "$reset"
+						return 1
+					fi
+
+					axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -random-agent -status-code \
+						-threads "$HTTPX_THREADS" -rl "$HTTPX_RATELIMIT" -timeout "$HTTPX_TIMEOUT" -silent -retries 2 \
+						-title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info1.txt \
+						$AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+
+					if [[ -s ".tmp/web_full_info1.txt" ]]; then
+						cat .tmp/web_full_info1.txt | jq -r 'try .url' 2>/dev/null |
+							grep "$domain" |
+							grep -E '^((http|https):\/\/)?([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}(\/.*)?$' |
+							sed "s/^\*\.//" |
+							anew .tmp/probed_tmp_scrap.txt |
+							unfurl -u domains 2>>"$LOGFILE" |
+							anew -q .tmp/scrap_subs.txt
+					fi
+
+					if [[ -s ".tmp/probed_tmp_scrap.txt" ]]; then
+						timeout -k 1m 10m axiom-scan .tmp/probed_tmp_scrap.txt -m httpx -tls-grab -tls-probe -csp-probe \
+							-random-agent -status-code -threads "$HTTPX_THREADS" -rl "$HTTPX_RATELIMIT" -timeout "$HTTPX_TIMEOUT" \
+							-silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info2.txt \
+							$AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+					fi
+
+					if [[ -s ".tmp/web_full_info2.txt" ]]; then
+						cat .tmp/web_full_info2.txt | jq -r 'try ."tls-grab"."dns_names"[], try .csp.domains[], try .url' 2>/dev/null |
+							grep "$domain" |
+							grep -E '^((http|https):\/\/)?([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}(\/.*)?$' |
+							sed "s/^\*\.//" |
+							sort -u |
+							httpx -silent |
+							anew .tmp/probed_tmp_scrap.txt |
+							unfurl -u domains 2>>"$LOGFILE" |
+							anew -q .tmp/scrap_subs.txt
+					fi
+
+					if [[ -s ".tmp/probed_tmp_scrap.txt" ]]; then
+						if [[ $DEEP == true ]]; then
+							katana_depth=3
+						else
+							katana_depth=2
+						fi
+
+						axiom-scan .tmp/probed_tmp_scrap.txt -m katana -jc -kf all -d "$katana_depth" -fs rdn \
+							-o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
 					fi
 				fi
-				[ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt
-				[ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | unfurl -u domains 2>>"$LOGFILE" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/scrap_subs.txt
-				[ -s ".tmp/scrap_subs.txt" ] && puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+
+				if [[ -s ".tmp/katana.txt" ]]; then
+					sed -i '/^.\{2048\}./d' .tmp/katana.txt
+
+					cat .tmp/katana.txt | unfurl -u domains 2>>"$LOGFILE" |
+						grep "\.$domain$" |
+						grep -E '^([a-zA-Z0-9\.\-]+\.)+[a-zA-Z]{1,}$' |
+						anew -q .tmp/scrap_subs.txt
+				fi
+
+				if [[ -s ".tmp/scrap_subs.txt" ]]; then
+					puredns resolve .tmp/scrap_subs.txt -w .tmp/scrap_subs_resolved.txt -r "$resolvers" \
+						--resolvers-trusted "$resolvers_trusted" -l "$PUREDNS_PUBLIC_LIMIT" \
+						--rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" --wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" \
+						--wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" 2>>"$LOGFILE" >/dev/null
+				fi
+
 				if [[ $INSCOPE == true ]]; then
-					check_inscope .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" >/dev/null
+					if ! check_inscope .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" >/dev/null; then
+						printf "%b[!] check_inscope command failed.%b\n" "$bred" "$reset"
+					fi
+				fi
+
+				if [[ -s ".tmp/scrap_subs_resolved.txt" ]]; then
+					if ! NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" |
+						grep "\.$domain$\|^$domain$" |
+						grep -E '^([a-zA-Z0-9\.\-]+\.)+[a-zA-Z]{1,}$' |
+						anew subdomains/subdomains.txt |
+						tee .tmp/diff_scrap.txt |
+						sed '/^$/d' | wc -l); then
+						printf "%b[!] Failed to count new subdomains.%b\n" "$bred" "$reset"
+						NUMOFLINES=0
+					fi
+				else
+					NUMOFLINES=0
+				fi
+
+				if [[ -s ".tmp/diff_scrap.txt" ]]; then
+					httpx -follow-host-redirects -random-agent -status-code -threads "$HTTPX_THREADS" \
+						-rl "$HTTPX_RATELIMIT" -timeout "$HTTPX_TIMEOUT" -silent -retries 2 -title -web-server \
+						-tech-detect -location -no-color -json -o .tmp/web_full_info3.txt \
+						<.tmp/diff_scrap.txt 2>>"$LOGFILE" >/dev/null
+
+					if [[ -s ".tmp/web_full_info3.txt" ]]; then
+						cat .tmp/web_full_info3.txt | jq -r 'try .url' 2>/dev/null |
+							grep "$domain" |
+							grep -E '^((http|https):\/\/)?([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}(\/.*)?$' |
+							sed "s/^\*\.//" |
+							anew .tmp/probed_tmp_scrap.txt |
+							unfurl -u domains 2>>"$LOGFILE" |
+							anew -q .tmp/scrap_subs.txt
+					fi
 				fi
-				NUMOFLINES=$(cat .tmp/scrap_subs_resolved.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | tee .tmp/diff_scrap.txt | sed '/^$/d' | wc -l)
-				[ -s ".tmp/diff_scrap.txt" ] && cat .tmp/diff_scrap.txt | httpx -follow-host-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info3.txt 2>>"$LOGFILE" >/dev/null
-				[ -s ".tmp/web_full_info3.txt" ] && cat .tmp/web_full_info3.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew .tmp/probed_tmp_scrap.txt | unfurl -u domains 2>>"$LOGFILE" | anew -q .tmp/scrap_subs.txt
-				cat .tmp/web_full_info1.txt .tmp/web_full_info2.txt .tmp/web_full_info3.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" >.tmp/web_full_info.txt
-				end_subfunc "${NUMOFLINES} new subs (code scraping)" ${FUNCNAME[0]}
+
+				cat .tmp/web_full_info1.txt .tmp/web_full_info2.txt .tmp/web_full_info3.txt 2>>"$LOGFILE" |
+					jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" >.tmp/web_full_info.txt
+
+				end_subfunc "${NUMOFLINES} new subs (code scraping)" "${FUNCNAME[0]}"
+
 			else
-				end_subfunc "Skipping Subdomains Web Scraping: Too Many Subdomains" ${FUNCNAME[0]}
+				end_subfunc "Skipping Subdomains Web Scraping: Too Many Subdomains" "${FUNCNAME[0]}"
 			fi
 		else
-			end_subfunc "No subdomains to search (code scraping)" ${FUNCNAME[0]}
+			end_subfunc "No subdomains to search (code scraping)" "${FUNCNAME[0]}"
 		fi
+
 	else
 		if [[ $SUBSCRAPING == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" \
+				"$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -1055,90 +1572,227 @@ function sub_scraping() {
 
 function sub_analytics() {
 
-	mkdir -p {.tmp,subdomains}
+	# Create necessary directories
+	if ! mkdir -p .tmp subdomains; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBANALYTICS == true ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Analytics Subdomain Enumeration"
+		start_subfunc "${FUNCNAME[0]}" "Running: Analytics Subdomain Enumeration"
+
 		if [[ -s ".tmp/probed_tmp_scrap.txt" ]]; then
+			# Run analyticsrelationships and check for errors
 			analyticsrelationships -ch <.tmp/probed_tmp_scrap.txt >>.tmp/analytics_subs_tmp.txt 2>>"$LOGFILE"
 
-			[ -s ".tmp/analytics_subs_tmp.txt" ] && cat .tmp/analytics_subs_tmp.txt | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/|__ //" | anew -q .tmp/analytics_subs_clean.txt
-			if [[ $AXIOM != true ]]; then
-				resolvers_update_quick_local
-				[ -s ".tmp/analytics_subs_clean.txt" ] && puredns resolve .tmp/analytics_subs_clean.txt -w .tmp/analytics_subs_resolved.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+			if [[ -s ".tmp/analytics_subs_tmp.txt" ]]; then
+				grep "\.$domain$\|^$domain$" .tmp/analytics_subs_tmp.txt |
+					grep -E '^([a-zA-Z0-9\.\-]+\.)+[a-zA-Z]{1,}$' |
+					sed "s/|__ //" | anew -q .tmp/analytics_subs_clean.txt
+
+				if [[ $AXIOM != true ]]; then
+					if ! resolvers_update_quick_local; then
+						printf "%b[!] Failed to update resolvers locally.%b\n" "$bred" "$reset"
+						return 1
+					fi
+
+					if [[ -s ".tmp/analytics_subs_clean.txt" ]]; then
+						puredns resolve .tmp/analytics_subs_clean.txt -w .tmp/analytics_subs_resolved.txt \
+							-r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+							-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+							--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+							2>>"$LOGFILE" >/dev/null
+					fi
+				else
+					if ! resolvers_update_quick_axiom; then
+						printf "%b[!] Failed to update resolvers on Axiom.%b\n" "$bred" "$reset"
+						return 1
+					fi
+
+					if [[ -s ".tmp/analytics_subs_clean.txt" ]]; then
+						axiom-scan .tmp/analytics_subs_clean.txt -m puredns-resolve \
+							-r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+							--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+							-o .tmp/analytics_subs_resolved.txt $AXIOM_EXTRA_ARGS \
+							2>>"$LOGFILE" >/dev/null
+					fi
+				fi
 			else
-				resolvers_update_quick_axiom
-				[ -s ".tmp/analytics_subs_clean.txt" ] && axiom-scan .tmp/analytics_subs_clean.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/analytics_subs_resolved.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				printf "%b[!] No analytics subdomains found.%b\n" "$yellow" "$reset"
+			fi
+		else
+			printf "%b[!] File .tmp/probed_tmp_scrap.txt does not exist or is empty.%b\n" "$yellow" "$reset"
+		fi
+
+		if [[ $INSCOPE == true ]]; then
+			if ! check_inscope .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" >/dev/null; then
+				printf "%b[!] check_inscope command failed.%b\n" "$bred" "$reset"
 			fi
 		fi
-		[[ $INSCOPE == true ]] && check_inscope .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" >/dev/null
-		NUMOFLINES=$(cat .tmp/analytics_subs_resolved.txt 2>>"$LOGFILE" | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
-		end_subfunc "${NUMOFLINES} new subs (analytics relationship)" ${FUNCNAME[0]}
+
+		if ! NUMOFLINES=$(anew subdomains/subdomains.txt <.tmp/analytics_subs_resolved.txt | sed '/^$/d' | wc -l); then
+			printf "%b[!] Failed to count new subdomains.%b\n" "$bred" "$reset"
+			NUMOFLINES=0
+		fi
+
+		end_subfunc "${NUMOFLINES} new subs (analytics relationship)" "${FUNCNAME[0]}"
+
 	else
 		if [[ $SUBANALYTICS == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
-
 }
 
 function sub_permut() {
 
-	mkdir -p {.tmp,subdomains}
+	mkdir -p .tmp subdomains
+
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBPERMUTE == true ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Permutations Subdomain Enumeration"
-		[[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt"
-		if [[ $DEEP == true ]] || [[ "$(cat subdomains/subdomains.txt | wc -l)" -le $DEEP_LIMIT ]]; then
+		start_subfunc "${FUNCNAME[0]}" "Running: Permutations Subdomain Enumeration"
+
+		# If in multi mode and subdomains.txt doesn't exist, create it with the domain
+		if [[ -n $multi ]] && [[ ! -f "$dir/subdomains/subdomains.txt" ]]; then
+			echo "$domain" >"$dir/subdomains/subdomains.txt"
+		fi
+
+		# Determine the number of subdomains
+		subdomain_count=$(wc -l <subdomains/subdomains.txt)
+
+		# Check if DEEP mode is enabled or subdomains are within DEEP_LIMIT
+		if [[ $DEEP == true ]] || [[ $subdomain_count -le $DEEP_LIMIT ]]; then
+
+			# Select the permutations tool
 			if [[ $PERMUTATIONS_OPTION == "gotator" ]]; then
-				[ -s "subdomains/subdomains.txt" ] && gotator -sub subdomains/subdomains.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt
+				if [[ -s "subdomains/subdomains.txt" ]]; then
+					gotator -sub subdomains/subdomains.txt -perm "${tools}/permutations_list.txt" $GOTATOR_FLAGS \
+						-silent 2>>"$LOGFILE" | head -c "$PERMUTATIONS_LIMIT" >.tmp/gotator1.txt
+				fi
 			else
-				[ -s "subdomains/subdomains.txt" ] && ripgen -d subdomains/subdomains.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt
+				if [[ -s "subdomains/subdomains.txt" ]]; then
+					ripgen -d subdomains/subdomains.txt -w "${tools}/permutations_list.txt" \
+						2>>"$LOGFILE" | head -c "$PERMUTATIONS_LIMIT" >.tmp/gotator1.txt
+				fi
 			fi
-		elif [[ "$(cat .tmp/subs_no_resolved.txt | wc -l)" -le $DEEP_LIMIT2 ]]; then
+
+		elif [[ "$(wc -l <.tmp/subs_no_resolved.txt)" -le $DEEP_LIMIT2 ]]; then
+
 			if [[ $PERMUTATIONS_OPTION == "gotator" ]]; then
-				[ -s ".tmp/subs_no_resolved.txt" ] && gotator -sub .tmp/subs_no_resolved.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt
+				if [[ -s ".tmp/subs_no_resolved.txt" ]]; then
+					gotator -sub .tmp/subs_no_resolved.txt -perm "${tools}/permutations_list.txt" $GOTATOR_FLAGS \
+						-silent 2>>"$LOGFILE" | head -c "$PERMUTATIONS_LIMIT" >.tmp/gotator1.txt
+				fi
 			else
-				[ -s ".tmp/subs_no_resolved.txt" ] && ripgen -d .tmp/subs_no_resolved.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1.txt
+				if [[ -s ".tmp/subs_no_resolved.txt" ]]; then
+					ripgen -d .tmp/subs_no_resolved.txt -w "${tools}/permutations_list.txt" \
+						2>>"$LOGFILE" | head -c "$PERMUTATIONS_LIMIT" >.tmp/gotator1.txt
+				fi
 			fi
+
 		else
-			end_subfunc "Skipping Permutations: Too Many Subdomains" ${FUNCNAME[0]}
-			return 1
+			end_subfunc "Skipping Permutations: Too Many Subdomains" "${FUNCNAME[0]}"
+			return 0
 		fi
+
+		# Resolve the permutations
 		if [[ $AXIOM != true ]]; then
-			resolvers_update_quick_local
-			[ -s ".tmp/gotator1.txt" ] && puredns resolve .tmp/gotator1.txt -w .tmp/permute1.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+			if ! resolvers_update_quick_local; then
+				printf "%b[!] Failed to update resolvers.%b\n" "$bred" "$reset"
+				return 1
+			fi
+			if [[ -s ".tmp/gotator1.txt" ]]; then
+				puredns resolve .tmp/gotator1.txt -w .tmp/permute1.txt -r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					2>>"$LOGFILE" >/dev/null
+			fi
 		else
-			resolvers_update_quick_axiom
-			[ -s ".tmp/gotator1.txt" ] && axiom-scan .tmp/gotator1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			if ! resolvers_update_quick_axiom; then
+				printf "%b[!] Failed to update resolvers on axiom.%b\n" "$bred" "$reset"
+				return 1
+			fi
+			if [[ -s ".tmp/gotator1.txt" ]]; then
+				axiom-scan .tmp/gotator1.txt -m puredns-resolve -r /home/op/lists/resolvers.txt \
+					--resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					-o .tmp/permute1.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			fi
 		fi
 
+		# Generate second round of permutations
 		if [[ $PERMUTATIONS_OPTION == "gotator" ]]; then
-			[ -s ".tmp/permute1.txt" ] && gotator -sub .tmp/permute1.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2.txt
+			if [[ -s ".tmp/permute1.txt" ]]; then
+				gotator -sub .tmp/permute1.txt -perm "${tools}/permutations_list.txt" \
+					$GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c "$PERMUTATIONS_LIMIT" >.tmp/gotator2.txt
+			fi
 		else
-			[ -s ".tmp/permute1.txt" ] && ripgen -d .tmp/permute1.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2.txt
+			if [[ -s ".tmp/permute1.txt" ]]; then
+				ripgen -d .tmp/permute1.txt -w "${tools}/permutations_list.txt" \
+					2>>"$LOGFILE" | head -c "$PERMUTATIONS_LIMIT" >.tmp/gotator2.txt
+			fi
 		fi
 
+		# Resolve the second round of permutations
 		if [[ $AXIOM != true ]]; then
-			[ -s ".tmp/gotator2.txt" ] && puredns resolve .tmp/gotator2.txt -w .tmp/permute2.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+			if [[ -s ".tmp/gotator2.txt" ]]; then
+				puredns resolve .tmp/gotator2.txt -w .tmp/permute2.txt -r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					2>>"$LOGFILE" >/dev/null
+			fi
 		else
-			[ -s ".tmp/gotator2.txt" ] && axiom-scan .tmp/gotator2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			if [[ -s ".tmp/gotator2.txt" ]]; then
+				axiom-scan .tmp/gotator2.txt -m puredns-resolve -r /home/op/lists/resolvers.txt \
+					--resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					-o .tmp/permute2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			fi
 		fi
-		cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt
 
-		if [[ -s ".tmp/permute_subs.txt" ]]; then
-			[ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/permute_subs.txt
-			[[ $INSCOPE == true ]] && check_inscope .tmp/permute_subs.txt 2>>"$LOGFILE" >/dev/null
-			NUMOFLINES=$(cat .tmp/permute_subs.txt 2>>"$LOGFILE" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
+		# Combine results
+		if [[ -s ".tmp/permute1.txt" ]] || [[ -s ".tmp/permute2.txt" ]]; then
+			cat .tmp/permute1.txt .tmp/permute2.txt 2>>"$LOGFILE" | anew -q .tmp/permute_subs.txt
+
+			# Remove out-of-scope domains if applicable
+			if [[ -s $outOfScope_file ]]; then
+				if ! deleteOutScoped "$outOfScope_file" .tmp/permute_subs.txt; then
+					printf "%b[!] deleteOutScoped command failed.%b\n" "$bred" "$reset"
+				fi
+			fi
+
+			# Check inscope if INSCOPE is true
+			if [[ $INSCOPE == true ]]; then
+				if ! check_inscope .tmp/permute_subs.txt 2>>"$LOGFILE" >/dev/null; then
+					printf "%b[!] check_inscope command failed.%b\n" "$bred" "$reset"
+				fi
+			fi
+
+			# Process subdomains and append new ones to subdomains.txt, count new lines
+			if ! NUMOFLINES=$(grep "\.$domain$\|^$domain$" .tmp/permute_subs.txt 2>>"$LOGFILE" |
+				grep -E '^([a-zA-Z0-9\.\-]+\.)+[a-zA-Z]{1,}$' |
+				anew subdomains/subdomains.txt | sed '/^$/d' | wc -l); then
+				printf "%b[!] Failed to process subdomains.%b\n" "$bred" "$reset"
+				return 1
+			fi
 		else
 			NUMOFLINES=0
 		fi
-		end_subfunc "${NUMOFLINES} new subs (permutations)" ${FUNCNAME[0]}
+
+		end_subfunc "${NUMOFLINES} new subs (permutations)" "${FUNCNAME[0]}"
+
 	else
 		if [[ $SUBPERMUTE == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" \
+				"$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -1146,41 +1800,100 @@ function sub_permut() {
 
 function sub_regex_permut() {
 
-	mkdir -p {.tmp,subdomains}
+	# Create necessary directories
+	if ! mkdir -p .tmp subdomains; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBREGEXPERMUTE == true ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Permutations by regex analysis"
+		start_subfunc "${FUNCNAME[0]}" "Running: Permutations by regex analysis"
 
-		pushd "${tools}/regulator" >/dev/null || {
-			echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
-		[[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt"
-		python3 main.py -t $domain -f ${dir}/subdomains/subdomains.txt -o ${dir}/.tmp/${domain}.brute
+		# Change to the regulator directory
+		if ! pushd "${tools}/regulator" >/dev/null; then
+			printf "%b[!] Failed to change directory to %s.%b\n" "$bred" "${tools}/regulator" "$reset"
+			return 1
+		fi
 
-		popd >/dev/null || {
-			echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
+		# If in multi mode and subdomains.txt doesn't exist, create it
+		if [[ -n $multi ]] && [[ ! -f "$dir/subdomains/subdomains.txt" ]]; then
+			printf "%b\n" "$domain" >"$dir/subdomains/subdomains.txt"
+		fi
+
+		# Run the main.py script
+		python3 main.py -t "$domain" -f "${dir}/subdomains/subdomains.txt" -o "${dir}/.tmp/${domain}.brute" \
+			2>>"$LOGFILE" >/dev/null
+
+		# Return to the previous directory
+		if ! popd >/dev/null; then
+			printf "%b[!] Failed to return to previous directory.%b\n" "$bred" "$reset"
+			return 1
+		fi
 
+		# Resolve the generated domains
 		if [[ $AXIOM != true ]]; then
-			resolvers_update_quick_local
-			[ -s ".tmp/${domain}.brute" ] && puredns resolve .tmp/${domain}.brute -w .tmp/regulator.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+			if ! resolvers_update_quick_local; then
+				printf "%b[!] Failed to update resolvers locally.%b\n" "$bred" "$reset"
+				return 1
+			fi
+
+			if [[ -s ".tmp/${domain}.brute" ]]; then
+				puredns resolve ".tmp/${domain}.brute" -w .tmp/regulator.txt -r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					2>>"$LOGFILE" >/dev/null
+			fi
 		else
-			resolvers_update_quick_axiom
-			[ -s ".tmp/${domain}.brute" ] && axiom-scan .tmp/${domain}.brute -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/regulator.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			if ! resolvers_update_quick_axiom; then
+				printf "%b[!] Failed to update resolvers on Axiom.%b\n" "$bred" "$reset"
+				return 1
+			fi
+
+			if [[ -s ".tmp/${domain}.brute" ]]; then
+				axiom-scan ".tmp/${domain}.brute" -m puredns-resolve \
+					-r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					-o .tmp/regulator.txt $AXIOM_EXTRA_ARGS \
+					2>>"$LOGFILE" >/dev/null
+			fi
 		fi
 
+		# Process the resolved domains
 		if [[ -s ".tmp/regulator.txt" ]]; then
-			[ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/regulator.txt
-			[[ $INSCOPE == true ]] && check_inscope .tmp/regulator.txt 2>>"$LOGFILE" >/dev/null
-			NUMOFLINES=$(cat .tmp/regulator.txt 2>>"$LOGFILE" | grep ".$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew subdomains/subdomains.txt | sed '/^$/d' | wc -l)
+			if [[ -s $outOfScope_file ]]; then
+				if ! deleteOutScoped "$outOfScope_file" .tmp/regulator.txt; then
+					printf "%b[!] deleteOutScoped command failed.%b\n" "$bred" "$reset"
+				fi
+			fi
+
+			if [[ $INSCOPE == true ]]; then
+				if ! check_inscope .tmp/regulator.txt 2>>"$LOGFILE" >/dev/null; then
+					printf "%b[!] check_inscope command failed.%b\n" "$bred" "$reset"
+				fi
+			fi
+
+			if ! NUMOFLINES=$(grep "\.$domain$\|^$domain$" .tmp/regulator.txt 2>>"$LOGFILE" |
+				grep -E '^([a-zA-Z0-9\.\-]+\.)+[a-zA-Z]{1,}$' |
+				anew subdomains/subdomains.txt |
+				sed '/^$/d' |
+				wc -l); then
+				printf "%b[!] Failed to count new subdomains.%b\n" "$bred" "$reset"
+				NUMOFLINES=0
+			fi
 		else
 			NUMOFLINES=0
 		fi
-		end_subfunc "${NUMOFLINES} new subs (permutations by regex)" ${FUNCNAME[0]}
+
+		end_subfunc "${NUMOFLINES} new subs (permutations by regex)" "${FUNCNAME[0]}"
+
 	else
 		if [[ $SUBREGEXPERMUTE == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -1188,136 +1901,400 @@ function sub_regex_permut() {
 
 function sub_recursive_passive() {
 
+	# Create necessary directories
+	if ! mkdir -p .tmp subdomains; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUB_RECURSIVE_PASSIVE == true ]] && [[ -s "subdomains/subdomains.txt" ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Subdomains recursive search passive"
-		[[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt"
+		start_subfunc "${FUNCNAME[0]}" "Running: Subdomains recursive search passive"
+
+		# If in multi mode and subdomains.txt doesn't exist, create it with the domain
+		if [[ -n $multi ]] && [[ ! -f "$dir/subdomains/subdomains.txt" ]]; then
+			printf "%b\n" "$domain" >"$dir/subdomains/subdomains.txt"
+		fi
+
 		# Passive recursive
-		[ -s "subdomains/subdomains.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE >.tmp/subdomains_recurs_top.txt
+		if [[ -s "subdomains/subdomains.txt" ]]; then
+			dsieve -if subdomains/subdomains.txt -f 3 -top "$DEEP_RECURSIVE_PASSIVE" >.tmp/subdomains_recurs_top.txt
+		else
+			printf "%b[!] No subdomains to process.%b\n" "$yellow" "$reset"
+			return 1
+		fi
+
 		if [[ $AXIOM != true ]]; then
-			resolvers_update_quick_local
-			[ -s ".tmp/subdomains_recurs_top.txt" ] && subfinder -all -dL .tmp/subdomains_recurs_top.txt -max-time ${SUBFINDER_ENUM_TIMEOUT} -silent -o .tmp/passive_recursive_tmp.txt 2>>"$LOGFILE" || (true && echo "Subfinder recursive timeout reached")
-			[ -s ".tmp/passive_recursive_tmp.txt" ] && cat .tmp/passive_recursive_tmp.txt | anew -q .tmp/passive_recursive.txt
-			[ -s ".tmp/passive_recursive.txt" ] && puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+			if ! resolvers_update_quick_local; then
+				printf "%b[!] Failed to update resolvers locally.%b\n" "$bred" "$reset"
+				return 1
+			fi
+
+			if [[ -s ".tmp/subdomains_recurs_top.txt" ]]; then
+				subfinder -all -dL .tmp/subdomains_recurs_top.txt -max-time "${SUBFINDER_ENUM_TIMEOUT}" \
+					-silent -o .tmp/passive_recursive_tmp.txt 2>>"$LOGFILE"
+			else
+				printf "%b[!] No top subdomains to process.%b\n" "$yellow" "$reset"
+				return 1
+			fi
+
+			if [[ -s ".tmp/passive_recursive_tmp.txt" ]]; then
+				cat .tmp/passive_recursive_tmp.txt| anew -q .tmp/passive_recursive.txt
+			else
+				printf "%b[!] No passive recursive subdomains found.%b\n" "$yellow" "$reset"
+			fi
+
+			if [[ -s ".tmp/passive_recursive.txt" ]]; then
+				puredns resolve .tmp/passive_recursive.txt -w .tmp/passive_recurs_tmp.txt -r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					2>>"$LOGFILE" >/dev/null
+			else
+				printf "%b[!] No subdomains to resolve.%b\n" "$yellow" "$reset"
+			fi
+
 		else
-			resolvers_update_quick_axiom
-			[ -s ".tmp/subdomains_recurs_top.txt" ] && axiom-scan .tmp/subdomains_recurs_top.txt -m subfinder -all -o .tmp/subfinder_prec.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-			[ -s ".tmp/subfinder_prec.txt" ] && cat .tmp/subfinder_prec.txt | anew -q .tmp/passive_recursive.txt
-			[ -s ".tmp/passive_recursive.txt" ] && axiom-scan .tmp/passive_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/passive_recurs_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			if ! resolvers_update_quick_axiom; then
+				printf "%b[!] Failed to update resolvers on Axiom.%b\n" "$bred" "$reset"
+				return 1
+			fi
+
+			if [[ -s ".tmp/subdomains_recurs_top.txt" ]]; then
+				axiom-scan .tmp/subdomains_recurs_top.txt -m subfinder -all -o .tmp/subfinder_prec.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+			else
+				printf "%b[!] No top subdomains to process.%b\n" "$yellow" "$reset"
+				return 1
+			fi
+
+			if [[ -s ".tmp/subfinder_prec.txt" ]]; then
+				cat .tmp/subfinder_prec.txt | anew -q .tmp/passive_recursive.txt
+			else
+				printf "%b[!] No passive recursive subdomains found.%b\n" "$yellow" "$reset"
+			fi
+
+			if [[ -s ".tmp/passive_recursive.txt" ]]; then
+				axiom-scan .tmp/passive_recursive.txt -m puredns-resolve \
+					-r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					-o .tmp/passive_recurs_tmp.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+			else
+				printf "%b[!] No subdomains to resolve.%b\n" "$yellow" "$reset"
+			fi
+		fi
+
+		if [[ $INSCOPE == true ]]; then
+			if ! check_inscope .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" >/dev/null; then
+				printf "%b[!] check_inscope command failed.%b\n" "$bred" "$reset"
+			fi
+		fi
+
+		if [[ -s ".tmp/passive_recurs_tmp.txt" ]]; then
+			if ! NUMOFLINES=$(grep "\.$domain$\|^$domain$" .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" |
+				grep -E '^([a-zA-Z0-9\.\-]+\.)+[a-zA-Z]{1,}$' |
+				sed '/^$/d' |
+				anew subdomains/subdomains.txt |
+				wc -l); then
+				printf "%b[!] Failed to count new subdomains.%b\n" "$bred" "$reset"
+				NUMOFLINES=0
+			fi
+		else
+			NUMOFLINES=0
 		fi
-		[[ $INSCOPE == true ]] && check_inscope .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" >/dev/null
-		NUMOFLINES=$(cat .tmp/passive_recurs_tmp.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l)
-		end_subfunc "${NUMOFLINES} new subs (recursive)" ${FUNCNAME[0]}
+
+		end_subfunc "${NUMOFLINES} new subs (recursive)" "${FUNCNAME[0]}"
+
 	else
 		if [[ $SUB_RECURSIVE_PASSIVE == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
+		elif [[ ! -s "subdomains/subdomains.txt" ]]; then
+			printf "\n%s[%s] No subdomains to process.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
 }
 
 function sub_recursive_brute() {
+	# Create necessary directories
+	if ! mkdir -p .tmp subdomains; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
 
-	mkdir -p {.tmp,subdomains}
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUB_RECURSIVE_BRUTE == true ]] && [[ -s "subdomains/subdomains.txt" ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Subdomains recursive search active"
-		[[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt"
-		if [[ $(cat subdomains/subdomains.txt | wc -l) -le $DEEP_LIMIT ]]; then
-			[ ! -s ".tmp/subdomains_recurs_top.txt" ] && dsieve -if subdomains/subdomains.txt -f 3 -top $DEEP_RECURSIVE_PASSIVE >.tmp/subdomains_recurs_top.txt
-			ripgen -d .tmp/subdomains_recurs_top.txt -w $subs_wordlist >.tmp/brute_recursive_wordlist.txt
+		start_subfunc "${FUNCNAME[0]}" "Running: Subdomains recursive search active"
+
+		# If in multi mode and subdomains.txt doesn't exist, create it with the domain
+		if [[ -n $multi ]] && [[ ! -f "$dir/subdomains/subdomains.txt" ]]; then
+			echo "$domain" >"$dir/subdomains/subdomains.txt"
+		fi
+
+		# Check the number of subdomains
+		subdomain_count=$(wc -l <subdomains/subdomains.txt)
+		if [[ $subdomain_count -le $DEEP_LIMIT ]]; then
+			# Generate top subdomains if not already done
+			if [[ ! -s ".tmp/subdomains_recurs_top.txt" ]]; then
+				dsieve -if subdomains/subdomains.txt -f 3 -top "$DEEP_RECURSIVE_PASSIVE" >.tmp/subdomains_recurs_top.txt
+			fi
+
+			# Generate brute recursive wordlist
+			ripgen -d .tmp/subdomains_recurs_top.txt -w "$subs_wordlist" >.tmp/brute_recursive_wordlist.txt
+
 			if [[ $AXIOM != true ]]; then
-				resolvers_update_quick_local
-				[ -s ".tmp/brute_recursive_wordlist.txt" ] && puredns resolve .tmp/brute_recursive_wordlist.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -w .tmp/brute_recursive_result.txt 2>>"$LOGFILE" >/dev/null
+				if ! resolvers_update_quick_local; then
+					printf "%b[!] Failed to update resolvers locally.%b\n" "$bred" "$reset"
+					return 1
+				fi
+
+				if [[ -s ".tmp/brute_recursive_wordlist.txt" ]]; then
+					puredns resolve .tmp/brute_recursive_wordlist.txt -r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+						-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+						--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+						-w .tmp/brute_recursive_result.txt 2>>"$LOGFILE" >/dev/null
+				fi
 			else
-				resolvers_update_quick_axiom
-				[ -s ".tmp/brute_recursive_wordlist.txt" ] && axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_recursive_result.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				if ! resolvers_update_quick_axiom; then
+					printf "%b[!] Failed to update resolvers on axiom.%b\n" "$bred" "$reset"
+					return 1
+				fi
+
+				if [[ -s ".tmp/brute_recursive_wordlist.txt" ]]; then
+					axiom-scan .tmp/brute_recursive_wordlist.txt -m puredns-resolve \
+						-r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+						--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+						-o .tmp/brute_recursive_result.txt $AXIOM_EXTRA_ARGS \
+						2>>"$LOGFILE" >/dev/null
+				fi
+			fi
+
+			if [[ -s ".tmp/brute_recursive_result.txt" ]]; then
+				cat .tmp/brute_recursive.txt | anew -q .tmp/brute_recursive_result.txt
 			fi
-			[ -s ".tmp/brute_recursive_result.txt" ] && cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt
 
+			# Generate permutations
 			if [[ $PERMUTATIONS_OPTION == "gotator" ]]; then
-				[ -s ".tmp/brute_recursive.txt" ] && gotator -sub .tmp/brute_recursive.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1_recursive.txt
+				if [[ -s ".tmp/brute_recursive.txt" ]]; then
+					gotator -sub .tmp/brute_recursive.txt -perm "${tools}/permutations_list.txt" $GOTATOR_FLAGS -silent \
+						2>>"$LOGFILE" | head -c "$PERMUTATIONS_LIMIT" >.tmp/gotator1_recursive.txt
+				fi
 			else
-				[ -s ".tmp/brute_recursive.txt" ] && ripgen -d .tmp/brute_recursive.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator1_recursive.txt
+				if [[ -s ".tmp/brute_recursive.txt" ]]; then
+					ripgen -d .tmp/brute_recursive.txt -w "${tools}/permutations_list.txt" \
+						2>>"$LOGFILE" | head -c "$PERMUTATIONS_LIMIT" >.tmp/gotator1_recursive.txt
+				fi
 			fi
 
+			# Resolve permutations
 			if [[ $AXIOM != true ]]; then
-				[ -s ".tmp/gotator1_recursive.txt" ] && puredns resolve .tmp/gotator1_recursive.txt -w .tmp/permute1_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+				if [[ -s ".tmp/gotator1_recursive.txt" ]]; then
+					puredns resolve .tmp/gotator1_recursive.txt -w .tmp/permute1_recursive.txt \
+						-r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+						-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+						--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+						2>>"$LOGFILE" >/dev/null
+				fi
 			else
-				[ -s ".tmp/gotator1_recursive.txt" ] && axiom-scan .tmp/gotator1_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute1_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				if [[ -s ".tmp/gotator1_recursive.txt" ]]; then
+					axiom-scan .tmp/gotator1_recursive.txt -m puredns-resolve \
+						-r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+						--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+						-o .tmp/permute1_recursive.txt $AXIOM_EXTRA_ARGS \
+						2>>"$LOGFILE" >/dev/null
+				fi
 			fi
 
+			# Second round of permutations
 			if [[ $PERMUTATIONS_OPTION == "gotator" ]]; then
-				[ -s ".tmp/permute1_recursive.txt" ] && gotator -sub .tmp/permute1_recursive.txt -perm ${tools}/permutations_list.txt $GOTATOR_FLAGS -silent 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2_recursive.txt
+				if [[ -s ".tmp/permute1_recursive.txt" ]]; then
+					gotator -sub .tmp/permute1_recursive.txt -perm "${tools}/permutations_list.txt" $GOTATOR_FLAGS -silent \
+						2>>"$LOGFILE" | head -c "$PERMUTATIONS_LIMIT" >.tmp/gotator2_recursive.txt
+				fi
 			else
-				[ -s ".tmp/permute1_recursive.txt" ] && ripgen -d .tmp/permute1_recursive.txt -w ${tools}/permutations_list.txt 2>>"$LOGFILE" | head -c $PERMUTATIONS_LIMIT >.tmp/gotator2_recursive.txt
+				if [[ -s ".tmp/permute1_recursive.txt" ]]; then
+					ripgen -d .tmp/permute1_recursive.txt -w "${tools}/permutations_list.txt" \
+						2>>"$LOGFILE" | head -c "$PERMUTATIONS_LIMIT" >.tmp/gotator2_recursive.txt
+				fi
 			fi
 
+			# Resolve second round of permutations
 			if [[ $AXIOM != true ]]; then
-				[ -s ".tmp/gotator2_recursive.txt" ] && puredns resolve .tmp/gotator2_recursive.txt -w .tmp/permute2_recursive.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+				if [[ -s ".tmp/gotator2_recursive.txt" ]]; then
+					puredns resolve .tmp/gotator2_recursive.txt -w .tmp/permute2_recursive.txt \
+						-r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+						-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+						--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+						2>>"$LOGFILE" >/dev/null
+				fi
 			else
-				[ -s ".tmp/gotator2_recursive.txt" ] && axiom-scan .tmp/gotator2_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/permute2_recursive.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				if [[ -s ".tmp/gotator2_recursive.txt" ]]; then
+					axiom-scan .tmp/gotator2_recursive.txt -m puredns-resolve \
+						-r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+						--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+						-o .tmp/permute2_recursive.txt $AXIOM_EXTRA_ARGS \
+						2>>"$LOGFILE" >/dev/null
+				fi
+			fi
+
+			# Combine permutations
+			if [[ -s ".tmp/permute1_recursive.txt" ]] || [[ -s ".tmp/permute2_recursive.txt" ]]; then
+				cat .tmp/permute1_recursive.txt .tmp/permute2_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/permute_recursive.txt
 			fi
-			cat .tmp/permute1_recursive.txt .tmp/permute2_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/permute_recursive.txt
 		else
-			end_subfunc "skipped in this mode or defined in reconftw.cfg" ${FUNCNAME[0]}
+			end_subfunc "Skipping recursive search: Too many subdomains" "${FUNCNAME[0]}"
+			return 0
 		fi
+
+		# Check inscope if applicable
 		if [[ $INSCOPE == true ]]; then
-			check_inscope .tmp/permute_recursive.txt 2>>"$LOGFILE" >/dev/null
-			check_inscope .tmp/brute_recursive.txt 2>>"$LOGFILE" >/dev/null
+			if [[ -s ".tmp/permute_recursive.txt" ]]; then
+				if ! check_inscope .tmp/permute_recursive.txt 2>>"$LOGFILE" >/dev/null; then
+					printf "%b[!] check_inscope command failed on permute_recursive.txt.%b\n" "$bred" "$reset"
+				fi
+			fi
+			if [[ -s ".tmp/brute_recursive.txt" ]]; then
+				if ! check_inscope .tmp/brute_recursive.txt 2>>"$LOGFILE" >/dev/null; then
+					printf "%b[!] check_inscope command failed on brute_recursive.txt.%b\n" "$bred" "$reset"
+				fi
+			fi
 		fi
 
-		# Last validation
-		cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/brute_perm_recursive.txt
+		# Combine results for final validation
+		if [[ -s ".tmp/permute_recursive.txt" ]] || [[ -s ".tmp/brute_recursive.txt" ]]; then
+			if ! cat .tmp/permute_recursive.txt .tmp/brute_recursive.txt 2>>"$LOGFILE" | anew -q .tmp/brute_perm_recursive.txt; then
+				printf "%b[!] Failed to combine final results.%b\n" "$bred" "$reset"
+				return 1
+			fi
+		fi
+
+		# Final resolve
 		if [[ $AXIOM != true ]]; then
-			[ -s ".tmp/brute_recursive.txt" ] && puredns resolve .tmp/brute_perm_recursive.txt -w .tmp/brute_perm_recursive_final.txt -r $resolvers --resolvers-trusted $resolvers_trusted -l $PUREDNS_PUBLIC_LIMIT --rate-limit-trusted $PUREDNS_TRUSTED_LIMIT --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT 2>>"$LOGFILE" >/dev/null
+			if [[ -s ".tmp/brute_perm_recursive.txt" ]]; then
+				puredns resolve .tmp/brute_perm_recursive.txt -w .tmp/brute_perm_recursive_final.txt \
+					-r "$resolvers" --resolvers-trusted "$resolvers_trusted" \
+					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					2>>"$LOGFILE" >/dev/null
+			fi
+		else
+			if [[ -s ".tmp/brute_perm_recursive.txt" ]]; then
+				axiom-scan .tmp/brute_perm_recursive.txt -m puredns-resolve \
+					-r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt \
+					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
+					-o .tmp/brute_perm_recursive_final.txt $AXIOM_EXTRA_ARGS \
+					2>>"$LOGFILE" >/dev/null
+			fi
+		fi
+
+		# Process final results
+		if [[ -s ".tmp/brute_perm_recursive_final.txt" ]]; then
+			if ! NUMOFLINES=$(grep "\.$domain$\|^$domain$" .tmp/brute_perm_recursive_final.txt 2>>"$LOGFILE" |
+				grep -E '^([a-zA-Z0-9\.\-]+\.)+[a-zA-Z]{1,}$' |
+				sed '/^$/d' |
+				anew subdomains/subdomains.txt |
+				wc -l); then
+				printf "%b[!] Failed to count new subdomains.%b\n" "$bred" "$reset"
+				NUMOFLINES=0
+			fi
 		else
-			[ -s ".tmp/brute_recursive.txt" ] && axiom-scan .tmp/brute_perm_recursive.txt -m puredns-resolve -r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt --wildcard-tests $PUREDNS_WILDCARDTEST_LIMIT --wildcard-batch $PUREDNS_WILDCARDBATCH_LIMIT -o .tmp/brute_perm_recursive_final.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			NUMOFLINES=0
 		fi
 
-		NUMOFLINES=$(cat .tmp/brute_perm_recursive_final.txt 2>>"$LOGFILE" | grep "\.$domain$\|^$domain$" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed '/^$/d' | anew subdomains/subdomains.txt | wc -l)
-		end_subfunc "${NUMOFLINES} new subs (recursive active)" ${FUNCNAME[0]}
+		end_subfunc "${NUMOFLINES} new subs (recursive active)" "${FUNCNAME[0]}"
+
 	else
 		if [[ $SUB_RECURSIVE_BRUTE == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
+		elif [[ ! -s "subdomains/subdomains.txt" ]]; then
+			printf "\n%s[%s] No subdomains to process.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" \
+				"$called_fn_dir" "/.${FUNCNAME[0]}" "$reset"
 		fi
 	fi
-
 }
 
 function subtakeover() {
 
-	mkdir -p {.tmp,webs,subdomains}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs subdomains; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SUBTAKEOVER == true ]]; then
-		start_func ${FUNCNAME[0]} "Looking for possible subdomain and DNS takeover"
-		touch .tmp/tko.txt
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		start_func "${FUNCNAME[0]}" "Looking for possible subdomain and DNS takeover"
+
+		# Initialize takeover file
+		if ! touch .tmp/tko.txt; then
+			printf "%b[!] Failed to create .tmp/tko.txt.%b\n" "$bred" "$reset"
+			return 1
+		fi
+
+		# Combine webs.txt and webs_uncommon_ports.txt if webs_all.txt doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		fi
+
 		if [[ $AXIOM != true ]]; then
-			nuclei -update 2>>"$LOGFILE" >/dev/null
-			cat subdomains/subdomains.txt webs/webs_all.txt 2>/dev/null | nuclei -silent -nh -tags takeover -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -t ${NUCLEI_TEMPLATES_PATH} -o .tmp/tko.txt
+			if ! nuclei -update 2>>"$LOGFILE" >/dev/null; then
+				printf "%b[!] Failed to update nuclei.%b\n" "$bred" "$reset"
+			fi
+			cat subdomains/subdomains.txt webs/webs_all.txt 2>/dev/null | nuclei -silent -nh -tags takeover \
+				-severity info,low,medium,high,critical -retries 3 -rl "$NUCLEI_RATELIMIT" \
+				-t "${NUCLEI_TEMPLATES_PATH}" -o .tmp/tko.txt
 		else
 			cat subdomains/subdomains.txt webs/webs_all.txt 2>>"$LOGFILE" | sed '/^$/d' | anew -q .tmp/webs_subs.txt
-			[ -s ".tmp/webs_subs.txt" ] && axiom-scan .tmp/webs_subs.txt -m nuclei --nuclei-templates ${NUCLEI_TEMPLATES_PATH} -tags takeover -nh -severity info,low,medium,high,critical -retries 3 -rl $NUCLEI_RATELIMIT -t ${NUCLEI_TEMPLATES_PATH} -o .tmp/tko.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			if [[ -s ".tmp/webs_subs.txt" ]]; then
+				axiom-scan .tmp/webs_subs.txt -m nuclei --nuclei-templates "${NUCLEI_TEMPLATES_PATH}" \
+					-tags takeover -nh -severity info,low,medium,high,critical -retries 3 -rl "$NUCLEI_RATELIMIT" \
+					-t "${NUCLEI_TEMPLATES_PATH}" -o .tmp/tko.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			else
+				printf "%b[!] No web subdomains to scan.%b\n" "$yellow" "$reset"
+			fi
 		fi
 
-		# DNS_TAKEOVER
-		cat .tmp/subs_no_resolved.txt .tmp/subdomains_dns.txt .tmp/scrap_subs.txt .tmp/analytics_subs_clean.txt .tmp/passive_recursive.txt 2>/dev/null | anew -q .tmp/subs_dns_tko.txt
-		cat .tmp/subs_dns_tko.txt 2>/dev/null | dnstake -c $DNSTAKE_THREADS -s 2>>"$LOGFILE" | sed '/^$/d' | anew -q .tmp/tko.txt
+		# DNS Takeover
+		cat .tmp/subs_no_resolved.txt .tmp/subdomains_dns.txt .tmp/scrap_subs.txt \
+			.tmp/analytics_subs_clean.txt .tmp/passive_recursive.txt 2>/dev/null | anew -q .tmp/subs_dns_tko.txt
+
+		if [[ -s ".tmp/subs_dns_tko.txt" ]]; then
+			cat .tmp/subs_dns_tko.txt 2>/dev/null | dnstake -c "$DNSTAKE_THREADS" -s 2>>"$LOGFILE" |
+				sed '/^$/d' | anew -q .tmp/tko.txt
+		else
+			printf "%b[!] No subdomains for DNS takeover scan.%b\n" "$yellow" "$reset"
+		fi
 
+		# Remove empty lines from tko.txt
 		sed -i '/^$/d' .tmp/tko.txt
 
-		NUMOFLINES=$(cat .tmp/tko.txt 2>>"$LOGFILE" | anew webs/takeover.txt | sed '/^$/d' | wc -l)
+		# Count new takeover entries
+		if ! NUMOFLINES=$(cat .tmp/tko.txt 2>>"$LOGFILE" | anew webs/takeover.txt | sed '/^$/d' | wc -l); then
+			printf "%b[!] Failed to count takeover entries.%b\n" "$bred" "$reset"
+			NUMOFLINES=0
+		fi
+
 		if [[ $NUMOFLINES -gt 0 ]]; then
 			notification "${NUMOFLINES} new possible takeovers found" info
 		fi
-		end_func "Results are saved in $domain/webs/takeover.txt" ${FUNCNAME[0]}
+
+		end_func "Results are saved in $domain/webs/takeover.txt" "${FUNCNAME[0]}"
+
 	else
 		if [[ $SUBTAKEOVER == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s %b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" ".${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -1325,54 +2302,113 @@ function subtakeover() {
 
 function zonetransfer() {
 
-	mkdir -p subdomains
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $ZONETRANSFER == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-		start_func ${FUNCNAME[0]} "Zone transfer check"
-		for ns in $(dig +short ns "$domain"); do dig axfr "$domain" @"$ns" >>subdomains/zonetransfer.txt; done
+	# Create necessary directories
+	if ! mkdir -p subdomains; then
+		printf "%b[!] Failed to create subdomains directory.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $ZONETRANSFER == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+		start_func "${FUNCNAME[0]}" "Zone transfer check"
+
+		# Initialize output file
+		if ! : >"subdomains/zonetransfer.txt"; then
+			printf "%b[!] Failed to create zonetransfer.txt.%b\n" "$bred" "$reset"
+			return 1
+		fi
+
+		# Perform zone transfer check
+		for ns in $(dig +short ns "$domain"); do
+			dig axfr "$domain" @"$ns" >>"subdomains/zonetransfer.txt" 2>>"$LOGFILE"
+		done
+
+		# Check if zone transfer was successful
 		if [[ -s "subdomains/zonetransfer.txt" ]]; then
-			if ! grep -q "Transfer failed" subdomains/zonetransfer.txt; then notification "Zone transfer found on ${domain}!" info; fi
+			if ! grep -q "Transfer failed" "subdomains/zonetransfer.txt"; then
+				notification "Zone transfer found on ${domain}!" "info"
+			fi
+		else
+			printf "%b[!] No zone transfer data collected.%b\n" "$yellow" "$reset"
 		fi
-		end_func "Results are saved in $domain/subdomains/zonetransfer.txt" ${FUNCNAME[0]}
+
+		end_func "Results are saved in $domain/subdomains/zonetransfer.txt" "${FUNCNAME[0]}"
+
 	else
 		if [[ $ZONETRANSFER == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-			return
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			printf "\n%s[%s] Domain is an IP address; skipping zone transfer.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 		else
-			if [[ $ZONETRANSFER == false ]]; then
-				printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-			else
-				printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
-			fi
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" ".${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
 }
 
 function s3buckets() {
-	mkdir -p {.tmp,subdomains}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $S3BUCKETS == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-		start_func ${FUNCNAME[0]} "AWS S3 buckets search"
-		[[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt"
+	# Create necessary directories
+	if ! mkdir -p .tmp subdomains; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $S3BUCKETS == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+		start_func "${FUNCNAME[0]}" "AWS S3 buckets search"
+
+		# If in multi mode and subdomains.txt doesn't exist, create it
+		if [[ -n $multi ]] && [[ ! -f "$dir/subdomains/subdomains.txt" ]]; then
+			if ! printf "%b\n" "$domain" >"$dir/subdomains/subdomains.txt"; then
+				printf "%b[!] Failed to create subdomains.txt.%b\n" "$bred" "$reset"
+				return 1
+			fi
+		fi
 
 		# Debug: Print current directory and tools variable
-		echo "Current directory: $(pwd)" >>"$LOGFILE"
-		echo "Tools directory: $tools" >>"$LOGFILE"
+		printf "Current directory: %s\n" "$(pwd)" >>"$LOGFILE"
+		printf "Tools directory: %s\n" "$tools" >>"$LOGFILE"
 
 		# S3Scanner
 		if [[ $AXIOM != true ]]; then
-			[ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt
+			if [[ -s "subdomains/subdomains.txt" ]]; then
+				s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt
+			else
+				printf "%b[!] No subdomains to scan with s3scanner.%b\n" "$yellow" "$reset"
+			fi
 		else
-			axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-			[ -s ".tmp/s3buckets_tmp.txt" ] && cat .tmp/s3buckets_tmp.txt .tmp/s3buckets_tmp2.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt && sed -i '/^$/d' .tmp/s3buckets.txt
+			axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+
+			if [[ -s ".tmp/s3buckets_tmp.txt" ]]; then
+				if ! cat .tmp/s3buckets_tmp.txt .tmp/s3buckets_tmp2.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt; then
+					printf "%b[!] Failed to process s3buckets_tmp.txt.%b\n" "$bred" "$reset"
+				fi
+				if ! sed -i '/^$/d' .tmp/s3buckets.txt; then
+					printf "%b[!] Failed to clean s3buckets.txt.%b\n" "$bred" "$reset"
+				fi
+			else
+				printf "%b[!] No s3buckets_tmp.txt found.%b\n" "$yellow" "$reset"
+			fi
 		fi
 
 		# Include root domain in the process
-		echo "$domain" >webs/full_webs.txt
-		cat webs/webs_all.txt >>webs/full_webs.txt
+		if ! printf "%b\n" "$domain" >webs/full_webs.txt; then
+			printf "%b[!] Failed to create webs/full_webs.txt.%b\n" "$bred" "$reset"
+		fi
+
+		if [[ -s "webs/webs_all.txt" ]]; then
+			if ! cat webs/webs_all.txt >>webs/full_webs.txt; then
+				printf "%b[!] Failed to append webs_all.txt to full_webs.txt.%b\n" "$bred" "$reset"
+			fi
+		fi
 
 		# Initialize the output file in the subdomains folder
-		>subdomains/cloudhunter_open_buckets.txt # Create or clear the output file
+		if ! : >subdomains/cloudhunter_open_buckets.txt; then
+			printf "%b[!] Failed to initialize cloudhunter_open_buckets.txt.%b\n" "$bred" "$reset"
+		fi
 
 		# Determine the CloudHunter permutations flag based on the config
 		PERMUTATION_FLAG=""
@@ -1387,85 +2423,118 @@ function s3buckets() {
 			PERMUTATION_FLAG=""
 			;;
 		*)
-			echo "Invalid value for CloudHunter_Permutations: $CLOUDHUNTER_PERMUTATION" >>"$LOGFILE"
-			exit 1
+			printf "%b[!] Invalid value for CLOUDHUNTER_PERMUTATION: %s.%b\n" "$bred" "$CLOUDHUNTER_PERMUTATION" "$reset"
+			return 1
 			;;
 		esac
 
 		# Debug: Print the full CloudHunter command
-		echo "CloudHunter command: python3 $tools/CloudHunter/cloudhunter.py $PERMUTATION_FLAG -r $tools/CloudHunter/resolvers.txt -t 50 [URL]" >>"$LOGFILE"
+		printf "CloudHunter command: python3 %s/cloudhunter.py %s -r %s/resolvers.txt -t 50 [URL]\n" "$tools/CloudHunter" "$PERMUTATION_FLAG" "$tools/CloudHunter" >>"$LOGFILE"
 
 		# Debug: Check if files exist
 		if [[ -f "$tools/CloudHunter/cloudhunter.py" ]]; then
-			echo "cloudhunter.py exists" >>"$LOGFILE"
+			printf "cloudhunter.py exists\n" >>"$LOGFILE"
 		else
-			echo "cloudhunter.py not found" >>"$LOGFILE"
+			printf "cloudhunter.py not found\n" >>"$LOGFILE"
 		fi
 
 		if [[ -n $PERMUTATION_FLAG ]]; then
-			if [[ -f ${PERMUTATION_FLAG#-p } ]]; then
-				echo "Permutations file exists" >>"$LOGFILE"
+			permutation_file="${PERMUTATION_FLAG#-p }"
+			if [[ -f $permutation_file ]]; then
+				printf "Permutations file exists\n" >>"$LOGFILE"
 			else
-				echo "Permutations file not found: ${PERMUTATION_FLAG#-p }" >>"$LOGFILE"
+				printf "Permutations file not found: %s\n" "$permutation_file" >>"$LOGFILE"
 			fi
 		fi
 
 		if [[ -f "$tools/CloudHunter/resolvers.txt" ]]; then
-			echo "resolvers.txt exists" >>"$LOGFILE"
+			printf "resolvers.txt exists\n" >>"$LOGFILE"
 		else
-			echo "resolvers.txt not found" >>"$LOGFILE"
+			printf "resolvers.txt not found\n" >>"$LOGFILE"
 		fi
 
 		# Run CloudHunter on each URL in webs/full_webs.txt and append the output to the file in the subdomains folder
 		while IFS= read -r url; do
-			echo "Processing URL: $url" >>"$LOGFILE"
+			printf "Processing URL: %s\n" "$url" >>"$LOGFILE"
 			(
-				cd "$tools/CloudHunter" || {
-					echo "Failed to cd to $tools/CloudHunter" >>"$LOGFILE"
+				if ! cd "$tools/CloudHunter"; then
+					printf "%b[!] Failed to cd to %s.%b\n" "$bred" "$tools/CloudHunter" "$reset"
 					return 1
-				}
-				python3 ./cloudhunter.py ${PERMUTATION_FLAG#-p } -r ./resolvers.txt -t 50 "$url"
+				fi
+				if ! python3 ./cloudhunter.py ${PERMUTATION_FLAG#-p } -r ./resolvers.txt -t 50 "$url"; then
+					printf "%b[!] CloudHunter command failed for URL %s.%b\n" "$bred" "$url" "$reset"
+				fi
 			) >>"$dir/subdomains/cloudhunter_open_buckets.txt" 2>>"$LOGFILE"
 		done <webs/full_webs.txt
 
 		# Remove the full_webs.txt file after CloudHunter processing
-		rm webs/full_webs.txt
+		if ! rm webs/full_webs.txt; then
+			printf "%b[!] Failed to remove webs/full_webs.txt.%b\n" "$bred" "$reset"
+		fi
 
-		NUMOFLINES1=$(cat subdomains/cloudhunter_open_buckets.txt 2>>"$LOGFILE" | anew subdomains/cloud_assets.txt | wc -l)
-		if [[ $NUMOFLINES1 -gt 0 ]]; then
-			notification "${NUMOFLINES1} new cloud assets found" info
+		# Process CloudHunter results
+		if [[ -s "subdomains/cloudhunter_open_buckets.txt" ]]; then
+			if ! NUMOFLINES1=$(cat subdomains/cloudhunter_open_buckets.txt 2>>"$LOGFILE" | anew subdomains/cloud_assets.txt | wc -l); then
+				printf "%b[!] Failed to process cloudhunter_open_buckets.txt.%b\n" "$bred" "$reset"
+				NUMOFLINES1=0
+			fi
+			if [[ $NUMOFLINES1 -gt 0 ]]; then
+				notification "${NUMOFLINES1} new cloud assets found" "info"
+			fi
+		else
+			NUMOFLINES1=0
+			printf "%b[!] No cloudhunter_open_buckets.txt found or it is empty.%b\n" "$yellow" "$reset"
 		fi
 
-		NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -aiv "not_exist" | grep -aiv "Warning:" | grep -aiv "invalid_name" | grep -aiv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l)
-		if [[ $NUMOFLINES2 -gt 0 ]]; then
-			notification "${NUMOFLINES2} new S3 buckets found" info
+		# Process s3buckets results
+		if [[ -s ".tmp/s3buckets.txt" ]]; then
+			if ! NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -aiv "not_exist" | grep -aiv "Warning:" | grep -aiv "invalid_name" | grep -aiv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l); then
+				printf "%b[!] Failed to process s3buckets.txt.%b\n" "$bred" "$reset"
+				NUMOFLINES2=0
+			fi
+			if [[ $NUMOFLINES2 -gt 0 ]]; then
+				notification "${NUMOFLINES2} new S3 buckets found" "info"
+			fi
+		else
+			NUMOFLINES2=0
+			printf "%b[!] No s3buckets.txt found or it is empty.%b\n" "$yellow" "$reset"
 		fi
 
-		[ -s "subdomains/s3buckets.txt" ] && for i in $(cat subdomains/s3buckets.txt); do
-			trufflehog s3 --bucket="$i" -j 2>/dev/null | jq -c | anew -q subdomains/s3buckets_trufflehog.txt
-		done
+		# Run trufflehog for S3 buckets
+		if [[ -s "subdomains/s3buckets.txt" ]]; then
+			while IFS= read -r bucket; do
+				trufflehog s3 --bucket="$bucket" -j 2>/dev/null | jq -c | anew -q subdomains/s3buckets_trufflehog.txt
+			done <subdomains/s3buckets.txt
+		else
+			printf "%b[!] No S3 buckets to scan with trufflehog.%b\n" "$yellow" "$reset"
+		fi
 
 		# Run trufflehog for open buckets found by CloudHunter
-		[ -s "subdomains/cloudhunter_open_buckets.txt" ] && while IFS= read -r line; do
-			if echo "$line" | grep -q "Aws Cloud"; then
-				# AWS S3 Bucket
-				bucket_name=$(echo "$line" | awk '{print $3}')
-				trufflehog s3 --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt
-			elif echo "$line" | grep -q "Google Cloud"; then
-				# Google Cloud Storage
-				bucket_name=$(echo "$line" | awk '{print $3}')
-				trufflehog gcs --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt
-			fi
-		done <subdomains/cloudhunter_open_buckets.txt
+		if [[ -s "subdomains/cloudhunter_open_buckets.txt" ]]; then
+			while IFS= read -r line; do
+				if echo "$line" | grep -q "Aws Cloud"; then
+					# AWS S3 Bucket
+					bucket_name=$(echo "$line" | awk '{print $3}')
+					trufflehog s3 --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt
+				elif echo "$line" | grep -q "Google Cloud"; then
+					# Google Cloud Storage
+					bucket_name=$(echo "$line" | awk '{print $3}')
+					trufflehog gcs --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt
+				fi
+			done <subdomains/cloudhunter_open_buckets.txt
+		else
+			printf "%b[!] No cloudhunter_open_buckets.txt found or it is empty.%b\n" "$yellow" "$reset"
+		fi
 
-		end_func "Results are saved in subdomains/s3buckets.txt, subdomains/cloud_assets.txt, subdomains/s3buckets_trufflehog.txt, and subdomains/cloudhunter_buckets_trufflehog.txt" ${FUNCNAME[0]}
+		end_func "Results are saved in subdomains/s3buckets.txt, subdomains/cloud_assets.txt, subdomains/s3buckets_trufflehog.txt, and subdomains/cloudhunter_buckets_trufflehog.txt" "${FUNCNAME[0]}"
 	else
 		if [[ $S3BUCKETS == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-			return
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			printf "\n%s[%s] Domain is an IP address; skipping S3 buckets search.%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			return 0
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" ".${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 }
@@ -1476,30 +2545,64 @@ function s3buckets() {
 
 function geo_info() {
 
-	mkdir -p hosts
+	# Create necessary directories
+	if ! mkdir -p hosts; then
+		printf "%b[!] Failed to create hosts directory.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $GEO_INFO == true ]]; then
-		start_func ${FUNCNAME[0]} "Running: ipinfo"
+		start_func "${FUNCNAME[0]}" "Running: ipinfo"
+
 		ips_file="${dir}/hosts/ips.txt"
-		if [ ! -f $ips_file ]; then
-			if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-				[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try . | "\(.host) \(.a[0])"' | anew -q .tmp/subs_ips.txt
-				[ -s ".tmp/subs_ips.txt" ] && awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt
-				[ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt
+
+		# Check if ips.txt exists or is empty; if so, attempt to generate it
+		if [[ ! -s $ips_file ]]; then
+			# Attempt to generate hosts/ips.txt
+			if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+				if [[ -s "subdomains/subdomains_dnsregs.json" ]]; then
+					jq -r 'try . | "\(.host) \(.a[0])"' "subdomains/subdomains_dnsregs.json" | anew -q .tmp/subs_ips.txt
+				fi
+				if [[ -s ".tmp/subs_ips.txt" ]]; then
+					awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt
+				fi
+				if [[ -s "hosts/subs_ips_vhosts.txt" ]]; then
+					cut -d ' ' -f1 hosts/subs_ips_vhosts.txt |
+						grep -aEiv "^(127|10|169\.254|172\.1[6-9]|172\.2[0-9]|172\.3[0-1]|192\.168)\." |
+						grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" |
+						anew -q hosts/ips.txt
+				else
+					printf "%b[!] No valid IPs found in subs_ips_vhosts.txt.%b\n" "$yellow" "$reset"
+				fi
 			else
-				echo $domain | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt
+				printf "%b\n" "$domain" |
+					grep -aEiv "^(127|10|169\.254|172\.1[6-9]|172\.2[0-9]|172\.3[0-1]|192\.168)\." |
+					grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" |
+					anew -q hosts/ips.txt
 			fi
+		fi
+
+		if [[ -s $ips_file ]]; then
+			if ! touch "${dir}/hosts/ipinfo.txt"; then
+				printf "%b[!] Failed to create ipinfo.txt.%b\n" "$bred" "$reset"
+			fi
+
+			while IFS= read -r ip; do
+				curl -s "https://ipinfo.io/widget/demo/$ip" >>"${dir}/hosts/ipinfo.txt"
+			done <"$ips_file"
 		else
-			touch ${dir}/hosts/ipinfo.txt
-			for ip in $(cat "$ips_file"); do
-				curl -s https://ipinfo.io/widget/demo/$ip >> ${dir}/hosts/ipinfo.txt
-			done
+			printf "%b[!] No IPs to process in %s.%b\n" "$yellow" "$ips_file" "$reset"
 		fi
-		end_func "Results are saved in hosts/ipinfo.txt" ${FUNCNAME[0]}
+
+		end_func "Results are saved in hosts/ipinfo.txt" "${FUNCNAME[0]}"
 	else
 		if [[ $GEO_INFO == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" ".${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -1511,31 +2614,84 @@ function geo_info() {
 
 function webprobe_simple() {
 
-	mkdir -p {.tmp,webs,subdomains}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs subdomains; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WEBPROBESIMPLE == true ]]; then
-		start_subfunc ${FUNCNAME[0]} "Running : Http probing $domain"
-		[[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt" && touch .tmp/web_full_info.txt webs/web_full_info.txt
+		start_subfunc "${FUNCNAME[0]}" "Running: HTTP probing $domain"
+
+		# If in multi mode and subdomains.txt doesn't exist, create it
+		if [[ -n $multi ]] && [[ ! -f "$dir/subdomains/subdomains.txt" ]]; then
+			printf "%b\n" "$domain" >"$dir/subdomains/subdomains.txt"
+			touch .tmp/web_full_info.txt webs/web_full_info.txt
+		fi
+
+		# Run httpx or axiom-scan
 		if [[ $AXIOM != true ]]; then
-			cat subdomains/subdomains.txt | httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt 2>>"$LOGFILE" >/dev/null
+			httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads "$HTTPX_THREADS" -rl "$HTTPX_RATELIMIT" \
+				-retries 2 -timeout "$HTTPX_TIMEOUT" -o .tmp/web_full_info_probe.txt \
+				<subdomains/subdomains.txt 2>>"$LOGFILE" >/dev/null
+		else
+			axiom-scan subdomains/subdomains.txt -m httpx ${HTTPX_FLAGS} -no-color -json -random-agent \
+				-threads "$HTTPX_THREADS" -rl "$HTTPX_RATELIMIT" -retries 2 -timeout "$HTTPX_TIMEOUT" \
+				-o .tmp/web_full_info_probe.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+		fi
+
+		# Merge web_full_info files
+		cat .tmp/web_full_info.txt .tmp/web_full_info_probe.txt webs/web_full_info.txt 2>>"$LOGFILE" |
+			jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" >webs/web_full_info.txt
+
+		# Extract URLs
+		if [[ -s "webs/web_full_info.txt" ]]; then
+			jq -r 'try .url' webs/web_full_info.txt 2>/dev/null |
+				grep "$domain" |
+				grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' |
+				sed 's/*.//' | anew -q .tmp/probed_tmp.txt
 		else
-			axiom-scan subdomains/subdomains.txt -m httpx ${HTTPX_FLAGS} -no-color -json -random-agent -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -retries 2 -timeout $HTTPX_TIMEOUT -o .tmp/web_full_info_probe.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			printf "%b[!] webs/web_full_info.txt does not exist or is empty.%b\n" "$yellow" "$reset"
+		fi
+
+		# Extract web info to plain text
+		if [[ -s "webs/web_full_info.txt" ]]; then
+			jq -r 'try . |"\(.url) [\(.status_code)] [\(.title)] [\(.webserver)] \(.tech)"' webs/web_full_info.txt |
+				grep "$domain" | anew -q webs/web_full_info_plain.txt
+		fi
+
+		# Remove out-of-scope entries
+		if [[ -s $outOfScope_file ]]; then
+			if ! deleteOutScoped "$outOfScope_file" .tmp/probed_tmp.txt; then
+				printf "%b[!] Failed to delete out-of-scope entries.%b\n" "$bred" "$reset"
+			fi
+		fi
+
+		# Count new websites
+		if ! NUMOFLINES=$(anew webs/webs.txt <.tmp/probed_tmp.txt 2>>"$LOGFILE" | sed '/^$/d' | wc -l); then
+			printf "%b[!] Failed to count new websites.%b\n" "$bred" "$reset"
+			NUMOFLINES=0
 		fi
-		cat .tmp/web_full_info.txt .tmp/web_full_info_probe.txt webs/web_full_info.txt 2>>"$LOGFILE" | jq -s 'try .' | jq 'try unique_by(.input)' | jq 'try .[]' 2>>"$LOGFILE" >webs/web_full_info.txt
-		[ -s "webs/web_full_info.txt" ] && cat webs/web_full_info.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | sed "s/*.//" | anew -q .tmp/probed_tmp.txt
-		[ -s "webs/web_full_info.txt" ] && cat webs/web_full_info.txt | jq -r 'try . |"\(.url) [\(.status_code)] [\(.title)] [\(.webserver)] \(.tech)"' | grep "$domain" | anew -q webs/web_full_info_plain.txt
-		[ -s "$outOfScope_file" ] && deleteOutScoped $outOfScope_file .tmp/probed_tmp.txt
-		NUMOFLINES=$(cat .tmp/probed_tmp.txt 2>>"$LOGFILE" | anew webs/webs.txt | sed '/^$/d' | wc -l)
+
+		# Update webs_all.txt
 		cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
-		end_subfunc "${NUMOFLINES} new websites resolved" ${FUNCNAME[0]}
-		if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(cat webs/webs.txt | wc -l) -le $DEEP_LIMIT2 ]]; then
-			notification "Sending websites to proxy" info
-			ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null
+
+		end_subfunc "${NUMOFLINES} new websites resolved" "${FUNCNAME[0]}"
+
+		# Send websites to proxy if conditions met
+		if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(wc -l <webs/webs.txt) -le $DEEP_LIMIT2 ]]; then
+			notification "Sending websites to proxy" "info"
+			ffuf -mc all -w webs/webs.txt -u FUZZ -replay-proxy "$proxy_url" 2>>"$LOGFILE" >/dev/null
 		fi
+
 	else
 		if [[ $WEBPROBESIMPLE == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" ".${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -1543,44 +2699,100 @@ function webprobe_simple() {
 
 function webprobe_full() {
 
-	mkdir -p {.tmp,webs,subdomains}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs subdomains; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WEBPROBEFULL == true ]]; then
-		start_func ${FUNCNAME[0]} "Http probing non standard ports"
-		[[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt" && touch webs/webs.txt
+		start_func "${FUNCNAME[0]}" "HTTP Probing Non-Standard Ports"
+
+		# If in multi mode and subdomains.txt doesn't exist, create it
+		if [[ -n $multi ]] && [[ ! -f "$dir/subdomains/subdomains.txt" ]]; then
+			printf "%b\n" "$domain" >"$dir/subdomains/subdomains.txt"
+			touch webs/webs.txt
+		fi
+
+		# Check if subdomains.txt is non-empty
 		if [[ -s "subdomains/subdomains.txt" ]]; then
 			if [[ $AXIOM != true ]]; then
-				if [[ -s "subdomains/subdomains.txt" ]]; then
-					cat subdomains/subdomains.txt | httpx -follow-host-redirects -random-agent -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" >/dev/null
-				fi
+				# Run httpx on subdomains.txt
+				httpx -follow-host-redirects -random-agent -status-code \
+					-p "$UNCOMMON_PORTS_WEB" -threads "$HTTPX_UNCOMMONPORTS_THREADS" \
+					-timeout "$HTTPX_UNCOMMONPORTS_TIMEOUT" -silent -retries 2 \
+					-title -web-server -tech-detect -location -no-color -json \
+					-o .tmp/web_full_info_uncommon.txt <subdomains/subdomains.txt 2>>"$LOGFILE" >/dev/null
 			else
-				if [[ -s "subdomains/subdomains.txt" ]]; then
-					axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -p $UNCOMMON_PORTS_WEB -threads $HTTPX_UNCOMMONPORTS_THREADS -timeout $HTTPX_UNCOMMONPORTS_TIMEOUT -silent -retries 2 -title -web-server -tech-detect -location -no-color -json -o .tmp/web_full_info_uncommon.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-				fi
+				# Run axiom-scan with httpx module on subdomains.txt
+				axiom-scan subdomains/subdomains.txt -m httpx -follow-host-redirects \
+					-H "${HEADER}" -status-code -p "$UNCOMMON_PORTS_WEB" \
+					-threads "$HTTPX_UNCOMMONPORTS_THREADS" -timeout "$HTTPX_UNCOMMONPORTS_TIMEOUT" \
+					-silent -retries 2 -title -web-server -tech-detect -location -no-color -json \
+					-o .tmp/web_full_info_uncommon.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
 			fi
+		else
+			printf "%b[!] subdomains/subdomains.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
-		[ -s ".tmp/web_full_info_uncommon.txt" ] && cat .tmp/web_full_info_uncommon.txt | jq -r 'try .url' 2>/dev/null | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?' | sed "s/*.//" | anew -q .tmp/probed_uncommon_ports_tmp.txt
-		[ -s ".tmp/web_full_info_uncommon.txt" ] && cat .tmp/web_full_info_uncommon.txt | jq -r 'try . |"\(.url) [\(.status_code)] [\(.title)] [\(.webserver)] \(.tech)"' | grep "$domain" | anew -q webs/web_full_info_uncommon_plain.txt
+
+		# Process web_full_info_uncommon.txt
 		if [[ -s ".tmp/web_full_info_uncommon.txt" ]]; then
-			if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+			# Extract URLs
+			jq -r 'try .url' .tmp/web_full_info_uncommon.txt 2>/dev/null |
+				grep "$domain" |
+				grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?' |
+				sed 's/*.//' |
+				anew -q .tmp/probed_uncommon_ports_tmp.txt
+
+			# Extract plain web info
+			jq -r 'try . | "\(.url) [\(.status_code)] [\(.title)] [\(.webserver)] \(.tech)"' .tmp/web_full_info_uncommon.txt |
+				grep "$domain" |
+				anew -q webs/web_full_info_uncommon_plain.txt
+
+			# Update webs_full_info_uncommon.txt based on whether domain is IP
+			if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 				cat .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" | anew -q webs/web_full_info_uncommon.txt
 			else
-				cat .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" | grep "$domain" | anew -q webs/web_full_info_uncommon.txt
+				grep "$domain" .tmp/web_full_info_uncommon.txt | anew -q webs/web_full_info_uncommon.txt
 			fi
-		fi
-		NUMOFLINES=$(cat .tmp/probed_uncommon_ports_tmp.txt 2>>"$LOGFILE" | anew webs/webs_uncommon_ports.txt | sed '/^$/d' | wc -l)
-		notification "Uncommon web ports: ${NUMOFLINES} new websites" good
-		[ -s "webs/webs_uncommon_ports.txt" ] && cat webs/webs_uncommon_ports.txt
-		cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
-		end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" ${FUNCNAME[0]}
-		if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(cat webs/webs_uncommon_ports.txt | wc -l) -le $DEEP_LIMIT2 ]]; then
-			notification "Sending websites with uncommon ports to proxy" info
-			ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null
+
+			# Count new websites
+			if ! NUMOFLINES=$(anew webs/webs_uncommon_ports.txt <.tmp/probed_uncommon_ports_tmp.txt | sed '/^$/d' | wc -l); then
+				printf "%b[!] Failed to count new websites.%b\n" "$bred" "$reset"
+				NUMOFLINES=0
+			fi
+
+			# Notify user
+			notification "Uncommon web ports: ${NUMOFLINES} new websites" "good"
+
+			# Display new uncommon ports websites
+			if [[ -s "webs/webs_uncommon_ports.txt" ]]; then
+				cat "webs/webs_uncommon_ports.txt"
+			else
+				printf "%b[!] No new websites with uncommon ports found.%b\n" "$yellow" "$reset"
+			fi
+
+			# Update webs_all.txt
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+
+			end_func "Results are saved in $domain/webs/webs_uncommon_ports.txt" "${FUNCNAME[0]}"
+
+			# Send to proxy if conditions met
+			if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(wc -l <webs/webs_uncommon_ports.txt) -le $DEEP_LIMIT2 ]]; then
+				notification "Sending websites with uncommon ports to proxy" "info"
+				ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy "$proxy_url" 2>>"$LOGFILE" >/dev/null
+			fi
+		else
+			printf "%b[!] .tmp/web_full_info_uncommon.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 	else
 		if [[ $WEBPROBEFULL == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" ".${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -1588,22 +2800,91 @@ function webprobe_full() {
 
 function screenshot() {
 
-	mkdir -p {webs,screenshots}
+	# Create necessary directories
+	if ! mkdir -p webs screenshots; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WEBSCREENSHOT == true ]]; then
-		start_func ${FUNCNAME[0]} "Web Screenshots"
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		start_func "${FUNCNAME[0]}" "Web Screenshots"
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			 cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		fi
 
+		# Run nuclei or axiom-scan based on AXIOM flag
 		if [[ $AXIOM != true ]]; then
-			[ -s "webs/webs_all.txt" ] && cat webs/webs_all.txt | nuclei -headless -id screenshot -V dir='screenshots' 2>>"$LOGFILE"
+			if [[ -s "webs/webs_all.txt" ]]; then
+				nuclei -headless -id screenshot -V dir='screenshots' <webs/webs_all.txt 2>>"$LOGFILE"
+			else
+				printf "%b[!] webs/webs_all.txt does not exist or is empty.%b\n" "$yellow" "$reset"
+			fi
+		else
+			if [[ -s "webs/webs_all.txt" ]]; then
+				axiom-scan webs/webs_all.txt -m nuclei-screenshots -o screenshots "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+			else
+				printf "%b[!] webs/webs_all.txt does not exist or is empty.%b\n" "$yellow" "$reset"
+			fi
+		fi
+
+		# Extract and process URLs from web_full_info_uncommon.txt
+		if [[ -s ".tmp/web_full_info_uncommon.txt" ]]; then
+			jq -r 'try .url' .tmp/web_full_info_uncommon.txt 2>/dev/null |
+				grep "$domain" |
+				grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?' |
+				sed 's/*.//' |
+				anew -q .tmp/probed_uncommon_ports_tmp.txt
+
+			jq -r 'try . | "\(.url) [\(.status_code)] [\(.title)] [\(.webserver)] \(.tech)"' .tmp/web_full_info_uncommon.txt |
+				grep "$domain" |
+				anew -q webs/web_full_info_uncommon_plain.txt
+
+			# Update webs_full_info_uncommon.txt based on whether domain is IP
+			if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+				cat .tmp/web_full_info_uncommon.txt 2>>"$LOGFILE" | anew -q webs/web_full_info_uncommon.txt
+			else
+				grep "$domain" .tmp/web_full_info_uncommon.txt | anew -q webs/web_full_info_uncommon.txt
+			fi
+
+			# Count new websites
+			if ! NUMOFLINES=$(anew webs/webs_uncommon_ports.txt <.tmp/probed_uncommon_ports_tmp.txt 2>>"$LOGFILE" | sed '/^$/d' | wc -l); then
+				printf "%b[!] Failed to count new websites.%b\n" "$bred" "$reset"
+				NUMOFLINES=0
+			fi
+
+			# Notify user
+			notification "Uncommon web ports: ${NUMOFLINES} new websites" "good"
+
+			# Display new uncommon ports websites
+			if [[ -s "webs/webs_uncommon_ports.txt" ]]; then
+				cat "webs/webs_uncommon_ports.txt"
+			else
+				printf "%b[!] No new websites with uncommon ports found.%b\n" "$yellow" "$reset"
+			fi
+
+			# Update webs_all.txt
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+
+			end_func "Results are saved in $domain/screenshots folder" "${FUNCNAME[0]}"
+
+			# Send to proxy if conditions met
+			if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(wc -l <webs/webs_uncommon_ports.txt) -le $DEEP_LIMIT2 ]]; then
+				notification "Sending websites with uncommon ports to proxy" "info"
+				ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy "$proxy_url" 2>>"$LOGFILE" >/dev/null
+			fi
 		else
-			[ -s "webs/webs_all.txt" ] && axiom-scan webs/webs_all.txt -m nuclei-screenshots -o screenshots $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			printf "%b[!] .tmp/web_full_info_uncommon.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
-		end_func "Results are saved in $domain/screenshots folder" ${FUNCNAME[0]}
 	else
 		if [[ $WEBSCREENSHOT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" ".${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -1611,26 +2892,70 @@ function screenshot() {
 
 function virtualhosts() {
 
-	mkdir -p {.tmp/virtualhosts,virtualhosts,webs}
+	# Create necessary directories
+	if ! mkdir -p .tmp/virtualhosts virtualhosts webs; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $VIRTUALHOSTS == true ]]; then
-		start_func ${FUNCNAME[0]} "Virtual Hosts dicovery"
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		start_func "${FUNCNAME[0]}" "Virtual Hosts Discovery"
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		fi
+
+		# Proceed only if webs_all.txt exists and is non-empty
 		if [[ -s "webs/webs_all.txt" ]]; then
-			interlace -tL webs/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf -ac -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -H \"Host: FUZZ._cleantarget_\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u  _target_ -of json -o _output_/_cleantarget_.json" -o $dir/.tmp/virtualhosts 2>>"$LOGFILE" >/dev/null
-			for sub in $(cat webs/webs_all.txt); do
-				sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
-				[ -s "$dir/.tmp/virtualhosts/${sub_out}.json" ] && cat $dir/.tmp/virtualhosts/${sub_out}.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort | anew -q $dir/virtualhosts/${sub_out}.txt
+			if [[ $AXIOM != true ]]; then
+				# Run ffuf using interlace
+				interlace -tL webs/webs_all.txt -threads "$INTERLACE_THREADS" \
+					-c "ffuf -ac -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} \
+					-H \"${HEADER}\" -H \"Host: FUZZ._cleantarget_\" \
+					-w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} \
+					-u _target_ -of json -o _output_/_cleantarget_.json" \
+					-o .tmp/virtualhosts 2>>"$LOGFILE" >/dev/null
+			else
+				# Run axiom-scan with nuclei-screenshots module
+				axiom-scan webs/webs_all.txt -m nuclei-screenshots \
+					-o virtualhosts "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+			fi
+
+			# Process ffuf output
+			while IFS= read -r sub; do
+				sub_out=$(echo "$sub" | sed -e 's|^[^/]*//||' -e 's|/.*$||')
+				json_file="$dir/.tmp/virtualhosts/${sub_out}.json"
+				txt_file="$dir/virtualhosts/${sub_out}.txt"
+
+				if [[ -s $json_file ]]; then
+					jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' "$json_file" | sort | anew -q "$txt_file"
+				else
+					printf "%b[!] JSON file %s does not exist or is empty.%b\n" "$yellow" "$json_file" "$reset"
+				fi
 			done
-			find $dir/virtualhosts/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | anew -q $dir/virtualhosts/virtualhosts_full.txt
-			end_func "Results are saved in $domain/virtualhosts/*subdomain*.txt" ${FUNCNAME[0]}
+
+			# Merge all virtual host txt files into virtualhosts_full.txt
+			find "$dir/virtualhosts/" -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | anew -q "$dir/virtualhosts/virtualhosts_full.txt"
+
+			end_func "Results are saved in $domain/virtualhosts/*subdomain*.txt" "${FUNCNAME[0]}"
+
 		else
-			end_func "No $domain/web/webs.txts file found, virtualhosts skipped " ${FUNCNAME[0]}
+			end_func "No webs/webs_all.txt file found, virtualhosts skipped." "${FUNCNAME[0]}"
+		fi
+
+		# Optionally send to proxy if conditions are met
+		if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(wc -l <webs/webs_uncommon_ports.txt) -le $DEEP_LIMIT2 ]]; then
+			notification "Sending websites with uncommon ports to proxy" "info"
+			ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy "$proxy_url" 2>>"$LOGFILE" >/dev/null
 		fi
+
 	else
 		if [[ $VIRTUALHOSTS == false ]]; then
 			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -1642,37 +2967,62 @@ function virtualhosts() {
 
 function favicon() {
 
-	mkdir -p hosts
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $FAVICON == true ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-		start_func ${FUNCNAME[0]} "Favicon Ip Lookup"
-		pushd "${tools}/fav-up" >/dev/null || {
-			echo "Failed to cd to $dir in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
+	# Create necessary directories
+	if ! mkdir -p hosts .tmp/virtualhosts; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } &&
+		[[ $FAVICON == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Favicon IP Lookup"
+
+		# Navigate to the fav-up tool directory
+		if ! pushd "${tools}/fav-up" >/dev/null; then
+			printf "%b[!] Failed to change directory to %s in %s @ line %s.%b\n" \
+				"$bred" "${tools}/fav-up" "${FUNCNAME[0]}" "${LINENO}" "$reset"
+			return 1
+		fi
 
+		# Run the favicon IP lookup tool
 		python3 favUp.py -w "$domain" -sc -o favicontest.json 2>>"$LOGFILE" >/dev/null
+
+		# Process the results if favicontest.json exists and is not empty
 		if [[ -s "favicontest.json" ]]; then
-			cat favicontest.json | jq -r 'try .found_ips' 2>>"$LOGFILE" | grep -v "not-found" >favicontest.txt
+			jq -r 'try .found_ips' favicontest.json 2>>"$LOGFILE" |
+				grep -v "not-found" >favicontest.txt
+
+			# Replace '|' with newlines
 			sed -i "s/|/\n/g" favicontest.txt
-			cat favicontest.txt 2>>"$LOGFILE"
-			mv favicontest.txt $dir/hosts/favicontest.txt 2>>"$LOGFILE"
+
+			# Move the processed IPs to the hosts directory
+			mv favicontest.txt "$dir/hosts/favicontest.txt" 2>>"$LOGFILE"
+
+			# Remove the JSON file
 			rm -f favicontest.json 2>>"$LOGFILE"
+		else
+			printf "%b[!] favicontest.json does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 
-		popd >/dev/null || {
-			echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
-		}
-		end_func "Results are saved in hosts/favicontest.txt" ${FUNCNAME[0]}
+		# Return to the original directory
+		if ! popd >/dev/null; then
+			printf "%b[!] Failed to return to the previous directory in %s @ line %s.%b\n" \
+				"$bred" "${FUNCNAME[0]}" "${LINENO}" "$reset"
+		fi
+
+		end_func "Results are saved in hosts/favicontest.txt" "${FUNCNAME[0]}"
+
 	else
 		if [[ $FAVICON == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP, do nothing
 			return
 		else
-			if [[ $FAVICON == false ]]; then
-				printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-			else
-				printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
-			fi
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
 		fi
 	fi
 
@@ -1680,67 +3030,148 @@ function favicon() {
 
 function portscan() {
 
-	mkdir -p {.tmp,subdomains,hosts}
+	# Create necessary directories
+	if ! mkdir -p .tmp subdomains hosts; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $PORTSCANNER == true ]]; then
-		start_func ${FUNCNAME[0]} "Port scan"
-		if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then
-			[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try . | "\(.host) \(.a[0])"' | anew -q .tmp/subs_ips.txt
-			[ -s ".tmp/subs_ips.txt" ] && awk '{ print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt
-			[ -s "hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt
-		else
-			echo $domain | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt
-		fi
-		[ ! -s "hosts/cdn_providers.txt" ] && cat hosts/ips.txt 2>/dev/null | cdncheck -silent -resp -cdn -waf -nc 2>/dev/null >hosts/cdn_providers.txt
-		[ -s "hosts/ips.txt" ] && comm -23 <(cat hosts/ips.txt | sort -u) <(cat hosts/cdn_providers.txt | cut -d'[' -f1 | sed 's/[[:space:]]*$//' | sort -u) | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u | anew -q .tmp/ips_nocdn.txt
-		printf "${bblue}\n[$(date +'%Y-%m-%d %H:%M:%S')] Resolved IP addresses (No CDN) ${reset}\n\n"
-		[ -s ".tmp/ips_nocdn.txt" ] && cat .tmp/ips_nocdn.txt | sort
-		printf "${bblue}\n[$(date +'%Y-%m-%d %H:%M:%S')] Scanning ports... ${reset}\n\n"
+		start_func "${FUNCNAME[0]}" "Port scan"
+
+		# Determine if domain is IP address or domain name
+		if ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Not an IP address
+			if [[ -s "subdomains/subdomains_dnsregs.json" ]]; then
+				# Extract host and IP from JSON
+				jq -r 'try . | "\(.host) \(.a[0])"' "subdomains/subdomains_dnsregs.json" | anew -q .tmp/subs_ips.txt
+			else
+				printf "%b[!] subdomains_dnsregs.json does not exist or is empty.%b\n" "$yellow" "$reset"
+			fi
+
+			if [[ -s ".tmp/subs_ips.txt" ]]; then
+				# Reorder fields and sort
+				awk '{ print $2 " " $1}' ".tmp/subs_ips.txt" | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt
+			else
+				printf "%b[!] No IPs found in subs_ips.txt.%b\n" "$yellow" "$reset"
+			fi
+
+			if [[ -s "hosts/subs_ips_vhosts.txt" ]]; then
+				# Extract IPs, filter out private ranges
+				awk '{print $1}' "hosts/subs_ips_vhosts.txt" | grep -aEiv "^(127|10|169\.254|172\.1[6-9]|172\.2[0-9]|172\.3[0-1]|192\.168)\." | grep -oE '\b([0-9]{1,3}\.){3}[0-9]{1,3}\b' | anew -q hosts/ips.txt
+			else
+				printf "%b[!] No data in subs_ips_vhosts.txt.%b\n" "$yellow" "$reset"
+			fi
+
+		else
+			# Domain is an IP address
+			printf "%b\n" "$domain" | grep -aEiv "^(127|10|169\.254|172\.1[6-9]|172\.2[0-9]|172\.3[0-1]|192\.168)\." | grep -oE '\b([0-9]{1,3}\.){3}[0-9]{1,3}\b' | anew -q hosts/ips.txt
+		fi
+
+		# Check for CDN providers
+		if [[ ! -s "hosts/cdn_providers.txt" ]]; then
+			if [[ -s "hosts/ips.txt" ]]; then
+				cdncheck -silent -resp -cdn -waf -nc <hosts/ips.txt 2>/dev/null >hosts/cdn_providers.txt
+			else
+				printf "%b[!] No IPs found in hosts/ips.txt.%b\n" "$yellow" "$reset"
+			fi
+		fi
+
+		if [[ -s "hosts/ips.txt" ]]; then
+			# Remove CDN IPs
+			comm -23 <(sort -u hosts/ips.txt) <(cut -d'[' -f1 hosts/cdn_providers.txt | sed 's/[[:space:]]*$//' | sort -u) \
+				| grep -aEiv "^(127|10|169\.254|172\.1[6-9]|172\.2[0-9]|172\.3[0-1]|192\.168)\." | grep -oE '\b([0-9]{1,3}\.){3}[0-9]{1,3}\b' \
+				| sort -u | anew -q .tmp/ips_nocdn.txt
+		else
+			printf "%b[!] No IPs to process in hosts/ips.txt.%b\n" "$yellow" "$reset"
+		fi
+
+		# Display resolved IPs without CDN
+		printf "%b\n[%s] Resolved IP addresses (No CDN):%s\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+		if [[ -s ".tmp/ips_nocdn.txt" ]]; then
+			sort ".tmp/ips_nocdn.txt"
+		else
+			printf "%b[!] No IPs found after CDN filtering.%b\n" "$yellow" "$reset"
+		fi
+
+		printf "%b\n[%s] Scanning ports...%b\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+
 		ips_file="${dir}/hosts/ips.txt"
-		if [ "$PORTSCAN_PASSIVE" = true ]; then
-			if [ ! -f $ips_file ]; then
-				echo "File $ips_file does not exist."
+
+		if [[ $PORTSCAN_PASSIVE == true ]]; then
+			if [[ ! -f $ips_file ]]; then
+				printf "%b[!] File %s does not exist.%b\n" "$bred" "$ips_file" "$reset"
 			else
-				for cip in $(cat "$ips_file"); do
-					json_result=$(curl -s https://internetdb.shodan.io/${cip})
-					json_array+=("$json_result")
-				done
+				json_array=()
+				while IFS= read -r cip; do
+					if ! json_result=$(curl -s "https://internetdb.shodan.io/${cip}"); then
+						printf "%b[!] Failed to retrieve data for IP %s.%b\n" "$bred" "$cip" "$reset"
+					else
+						json_array+=("$json_result")
+					fi
+				done <"$ips_file"
 				formatted_json="["
 				for ((i = 0; i < ${#json_array[@]}; i++)); do
-					formatted_json+="$(echo ${json_array[i]} | tr -d '\n')"
+					formatted_json+="$(echo "${json_array[i]}" | tr -d '\n')"
 					if [ $i -lt $((${#json_array[@]} - 1)) ]; then
 						formatted_json+=", "
 					fi
 				done
 				formatted_json+="]"
-				echo "$formatted_json" >"${dir}/hosts/portscan_shodan.txt"
+				if ! echo "$formatted_json" >"${dir}/hosts/portscan_shodan.txt"; then
+					printf "%b[!] Failed to write portscan_shodan.txt.%b\n" "$bred" "$reset"
+				fi
 			fi
-		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
+
 		if [[ $PORTSCAN_PASSIVE == true ]] && [[ ! -f "hosts/portscan_passive.txt" ]] && [[ -s ".tmp/ips_nocdn.txt" ]]; then
 			smap -iL .tmp/ips_nocdn.txt >hosts/portscan_passive.txt
 		fi
+
 		if [[ $PORTSCAN_ACTIVE == true ]]; then
 			if [[ $AXIOM != true ]]; then
-				[ -s ".tmp/ips_nocdn.txt" ] && $SUDO nmap ${PORTSCAN_ACTIVE_OPTIONS} -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" >/dev/null
+				if [[ -s ".tmp/ips_nocdn.txt" ]]; then
+					"$SUDO" nmap $PORTSCAN_ACTIVE_OPTIONS -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" >/dev/null
+				else
+					printf "%b[!] No IPs to scan for active port scan.%b\n" "$yellow" "$reset"
+				fi
 			else
-				[ -s ".tmp/ips_nocdn.txt" ] && axiom-scan .tmp/ips_nocdn.txt -m nmapx ${PORTSCAN_ACTIVE_OPTIONS} -oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				if [[ -s ".tmp/ips_nocdn.txt" ]]; then
+					axiom-scan .tmp/ips_nocdn.txt -m nmapx $PORTSCAN_ACTIVE_OPTIONS \
+					-oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				else
+					printf "%b[!] No IPs to scan for active port scan.%b\n" "$yellow" "$reset"
+				fi
 			fi
 		fi
 
-		[ -s "hosts/portscan_active.xml" ] && cat hosts/portscan_active.xml | nmapurls 2>>"$LOGFILE" | anew -q hosts/webs.txt
+		if [[ -s "hosts/portscan_active.xml" ]]; then
+			nmapurls <hosts/portscan_active.xml 2>>"$LOGFILE" | anew -q hosts/webs.txt
+		else
+			printf "%b[!] No portscan_active.xml found.%b\n" "$yellow" "$reset"
+		fi
 
-		if [ -s "hosts/webs.txt" ]; then
-			NUMOFLINES=$(cat hosts/webs.txt | wc -l)
-			notification "Webs detected from port scan: ${NUMOFLINES} new websites" good
+		if [[ -s "hosts/webs.txt" ]]; then
+			if ! NUMOFLINES=$(wc -l <hosts/webs.txt); then
+				printf "%b[!] Failed to count lines in hosts/webs.txt.%b\n" "$bred" "$reset"
+				NUMOFLINES=0
+			fi
+			notification "Webs detected from port scan: ${NUMOFLINES} new websites" "good"
 			cat hosts/webs.txt
+		else
+			printf "%b[!] No webs detected from port scan.%b\n" "$yellow" "$reset"
 		fi
-		end_func "Results are saved in hosts/portscan_[passive|active|shodan].[txt|xml]" ${FUNCNAME[0]}
+
+		end_func "Results are saved in hosts/portscan_[passive|active|shodan].[txt|xml]" "${FUNCNAME[0]}"
+
 	else
 		if [[ $PORTSCANNER == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" ".${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -1748,17 +3179,48 @@ function portscan() {
 
 function cdnprovider() {
 
-	mkdir -p {.tmp,subdomains,hosts}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CDN_IP == true ]]; then
-		start_func ${FUNCNAME[0]} "CDN provider check"
-		[ -s "subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r 'try . | .a[]' | grep -aEiv "^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | sort -u >.tmp/ips_cdn.txt
-		[ -s ".tmp/ips_cdn.txt" ] && cat .tmp/ips_cdn.txt | cdncheck -silent -resp -nc | anew -q $dir/hosts/cdn_providers.txt
-		end_func "Results are saved in hosts/cdn_providers.txt" ${FUNCNAME[0]}
+	# Create necessary directories
+	if ! mkdir -p .tmp subdomains hosts; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } &&
+		[[ $CDN_IP == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "CDN Provider Check"
+
+		# Check if subdomains_dnsregs.json exists and is not empty
+		if [[ -s "subdomains/subdomains_dnsregs.json" ]]; then
+			# Extract IPs from .a[] fields, exclude private IPs, extract IPs, sort uniquely
+			jq -r 'try . | .a[]' "subdomains/subdomains_dnsregs.json" |
+				grep -aEiv "^(127|10|169\.254|172\.(1[6-9]|2[0-9]|3[01])|192\.168)\." |
+				grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" |
+				sort -u >.tmp/ips_cdn.txt
+		else
+			printf "%b[!] subdomains/subdomains_dnsregs.json does not exist or is empty.%b\n" "$yellow" "$reset"
+		fi
+
+		# Check if ips_cdn.txt exists and is not empty
+		if [[ -s ".tmp/ips_cdn.txt" ]]; then
+			# Run cdncheck on the IPs and save to cdn_providers.txt
+			cdncheck -silent -resp -nc <.tmp/ips_cdn.txt | anew -q "$dir/hosts/cdn_providers.txt"
+		else
+			printf "%b[!] No IPs found for CDN provider check.%b\n" "$yellow" "$reset"
+		fi
+
+		end_func "Results are saved in hosts/cdn_providers.txt" "${FUNCNAME[0]}"
+
 	else
 		if [[ $CDN_IP == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP, do nothing
+			return
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
 		fi
 	fi
 
@@ -1770,124 +3232,246 @@ function cdnprovider() {
 
 function waf_checks() {
 
-	mkdir -p {.tmp,webs}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WAF_DETECTION == true ]]; then
-		start_func ${FUNCNAME[0]} "Website's WAF detection"
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		start_func "${FUNCNAME[0]}" "Website's WAF Detection"
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		fi
+
+		# Proceed only if webs_all.txt exists and is non-empty
 		if [[ -s "webs/webs_all.txt" ]]; then
 			if [[ $AXIOM != true ]]; then
-				wafw00f -i webs/webs_all.txt -o .tmp/wafs.txt 2>>"$LOGFILE" >/dev/null
+				# Run wafw00f on webs_all.txt
+				wafw00f -i "webs/webs_all.txt" -o ".tmp/wafs.txt" 2>>"$LOGFILE" >/dev/null
+			else
+				# Run axiom-scan with wafw00f module on webs_all.txt
+				axiom-scan "webs/webs_all.txt" -m wafw00f -o ".tmp/wafs.txt" "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+			fi
+
+			# Process wafs.txt if it exists and is not empty
+			if [[ -s ".tmp/wafs.txt" ]]; then
+				# Format the wafs.txt file
+				sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' ".tmp/wafs.txt" | tr -s "\t" ";" >"webs/webs_wafs.txt"
+
+				# Count the number of websites protected by WAF
+				if ! NUMOFLINES=$(sed '/^$/d' "webs/webs_wafs.txt" 2>>"$LOGFILE" | wc -l); then
+					printf "%b[!] Failed to count lines in webs_wafs.txt.%b\n" "$bred" "$reset"
+					NUMOFLINES=0
+				fi
+
+				# Send a notification about the number of WAF-protected websites
+				notification "${NUMOFLINES} websites protected by WAF" "info"
+
+				# End the function with a success message
+				end_func "Results are saved in webs/webs_wafs.txt" "${FUNCNAME[0]}"
 			else
-				axiom-scan webs/webs_all.txt -m wafw00f -o .tmp/wafs.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				# End the function indicating no results were found
+				end_func "No results found" "${FUNCNAME[0]}"
+			fi
+		else
+			# End the function indicating there are no websites to scan
+			end_func "No websites to scan" "${FUNCNAME[0]}"
+		fi
+	else
+		# Handle cases where WAF_DETECTION is false or the function has already been processed
+		if [[ $WAF_DETECTION == false ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP address; skip the function
+			return
+		else
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
+		fi
+	fi
+
+}
+
+function nuclei_check() {
+
+	# Create necessary directories
+	if ! mkdir -p .tmp webs subdomains nuclei_output; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $NUCLEICHECK == true ]]; then
+		start_func "${FUNCNAME[0]}" "Templates-based Web Scanner"
+
+		# Update nuclei templates
+		nuclei -update 2>>"$LOGFILE" >/dev/null
+
+		# Handle multi mode and initialize subdomains.txt if necessary
+		if [[ -n $multi ]] && [[ ! -f "$dir/subdomains/subdomains.txt" ]]; then
+			printf "%b\n" "$domain" >"$dir/subdomains/subdomains.txt"
+			touch webs/webs.txt webs/webs_uncommon_ports.txt
+		fi
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		fi
+
+		# Combine url_extract_nodupes.txt, subdomains.txt, and webs_all.txt into webs_subs.txt if it doesn't exist
+		if [[ ! -s ".tmp/webs_subs.txt" ]]; then
+			cat webs/url_extract_nodupes.txt subdomains/subdomains.txt webs/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt
+		fi
+
+		# If fuzzing_full.txt exists, process it and create webs_fuzz.txt
+		if [[ -s "$dir/fuzzing/fuzzing_full.txt" ]]; then
+			grep "^200" "$dir/fuzzing/fuzzing_full.txt" | cut -d " " -f3 | anew -q .tmp/webs_fuzz.txt
+		fi
+
+		# Combine webs_subs.txt and webs_fuzz.txt into webs_nuclei.txt and duplicate it
+		cat .tmp/webs_subs.txt .tmp/webs_fuzz.txt 2>>"$LOGFILE" | anew -q .tmp/webs_nuclei.txt | tee -a webs/webs_nuclei.txt
+
+		# Check if AXIOM is enabled
+		if [[ $AXIOM != true ]]; then
+			# Split severity levels into an array
+			IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY"
+
+			for crit in "${severity_array[@]}"; do
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Nuclei Severity: $crit ${reset}\n\n"
+
+				# Run nuclei for each severity level
+				nuclei $NUCLEI_FLAGS -severity "$crit" -nh -rl "$NUCLEI_RATELIMIT" -o "nuclei_output/${crit}.txt" <.tmp/webs_nuclei.txt
+			done
+			printf "\n\n"
+		else
+			# Check if webs_nuclei.txt exists and is not empty
+			if [[ -s ".tmp/webs_nuclei.txt" ]]; then
+				# Split severity levels into an array
+				IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY"
+
+				for crit in "${severity_array[@]}"; do
+					printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Axiom Nuclei Severity: $crit. Check results in nuclei_output folder.${reset}\n\n"
+					# Run axiom-scan with nuclei module for each severity level
+					axiom-scan .tmp/webs_nuclei.txt -m nuclei \
+						--nuclei-templates "$NUCLEI_TEMPLATES_PATH" \
+						-severity "$crit" -nh -rl "$NUCLEI_RATELIMIT" \
+						-o "nuclei_output/${crit}.txt" "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+
+					# Display the results if the output file exists and is not empty
+					if [[ -s "nuclei_output/${crit}.txt" ]]; then
+						cat "nuclei_output/${crit}.txt"
+					fi
+				done
+				printf "\n\n"
+			fi
+		fi
+
+		end_func "Results are saved in $domain/nuclei_output folder" "${FUNCNAME[0]}"
+	else
+		# Handle cases where NUCLEICHECK is false or the function has already been processed
+		if [[ $NUCLEICHECK == false ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP address; skip the function
+			return
+		else
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
+		fi
+	fi
+
+}
+
+function fuzz() {
+
+	# Create necessary directories
+	if ! mkdir -p .tmp/fuzzing webs fuzzing nuclei_output; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $FUZZ == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Web Directory Fuzzing"
+
+		# Handle multi mode and initialize subdomains.txt if necessary
+		if [[ -n $multi ]] && [[ ! -f "$dir/webs/webs.txt" ]]; then
+			if ! printf "%b\n" "$domain" >"$dir/webs/webs.txt"; then
+				printf "%b[!] Failed to create webs.txt.%b\n" "$bred" "$reset"
 			fi
-			if [[ -s ".tmp/wafs.txt" ]]; then
-				cat .tmp/wafs.txt | sed -e 's/^[ \t]*//' -e 's/ \+ /\t/g' -e '/(None)/d' | tr -s "\t" ";" >webs/webs_wafs.txt
-				NUMOFLINES=$(cat webs/webs_wafs.txt 2>>"$LOGFILE" | sed '/^$/d' | wc -l)
-				notification "${NUMOFLINES} websites protected by waf" info
-				end_func "Results are saved in $domain/webs/webs_wafs.txt" ${FUNCNAME[0]}
-			else
-				end_func "No results found" ${FUNCNAME[0]}
+			if ! touch webs/webs_uncommon_ports.txt; then
+				printf "%b[!] Failed to initialize webs_uncommon_ports.txt.%b\n" "$bred" "$reset"
 			fi
-		else
-			end_func "No websites to scan" ${FUNCNAME[0]}
 		fi
-	else
-		if [[ $WAF_DETECTION == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
 		fi
-	fi
 
-}
+		# Combine url_extract_nodupes.txt, subdomains.txt, and webs_all.txt into webs_subs.txt if it doesn't exist
+		if [[ ! -s ".tmp/webs_subs.txt" ]]; then
+			cat webs/url_extract_nodupes.txt subdomains/subdomains.txt webs/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt
+		fi
 
-function nuclei_check() {
+		# If fuzzing_full.txt exists, process it and create webs_fuzz.txt
+		if [[ -s "$dir/fuzzing/fuzzing_full.txt" ]]; then
+			grep "^200" "$dir/fuzzing/fuzzing_full.txt" | cut -d " " -f3 | anew -q .tmp/webs_fuzz.txt
+		fi
 
-	mkdir -p {.tmp,webs,subdomains,nuclei_output}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $NUCLEICHECK == true ]]; then
-		start_func ${FUNCNAME[0]} "Templates based web scanner"
-		nuclei -update 2>>"$LOGFILE" >/dev/null
-		mkdir -p nuclei_output
-		[[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt" && touch webs/webs.txt webs/webs_uncommon_ports.txt
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
-		[ ! -s ".tmp/webs_subs.txt" ] && cat webs/url_extract_nodupes.txt subdomains/subdomains.txt webs/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt
-		[ -s "$dir/fuzzing/fuzzing_full.txt" ] && cat $dir/fuzzing/fuzzing_full.txt | grep -e "^200" | cut -d " " -f3 | anew -q .tmp/webs_fuzz.txt
+		# Combine webs_subs.txt and webs_fuzz.txt into webs_nuclei.txt and duplicate it
 		cat .tmp/webs_subs.txt .tmp/webs_fuzz.txt 2>>"$LOGFILE" | anew -q .tmp/webs_nuclei.txt | tee -a webs/webs_nuclei.txt
-		cp .tmp/webs_nuclei.txt webs/webs_nuclei.txt
 
-		if [[ $AXIOM != true ]]; then # avoid globbing (expansion of *).
+		# Check if AXIOM is enabled
+		if [[ $AXIOM != true ]]; then
+			# Split severity levels into an array
 			IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY"
+
 			for crit in "${severity_array[@]}"; do
-				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running : Nuclei $crit ${reset}\n\n"
-				cat .tmp/webs_nuclei.txt 2>/dev/null | nuclei $NUCLEI_FLAGS -severity $crit -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Nuclei Severity: $crit ${reset}\n\n"
+
+				# Run nuclei for each severity level
+				nuclei $NUCLEI_FLAGS -severity "$crit" -nh -rl "$NUCLEI_RATELIMIT" -o "nuclei_output/${crit}.txt" <.tmp/webs_nuclei.txt
 			done
 			printf "\n\n"
 		else
+			# Check if webs_nuclei.txt exists and is not empty
 			if [[ -s ".tmp/webs_nuclei.txt" ]]; then
+				# Split severity levels into an array
 				IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY"
-				for crit in "${severity_array[@]}"; do
-					printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running : Nuclei $crit, check results on nuclei_output folder${reset}\n\n"
-					axiom-scan .tmp/webs_nuclei.txt -m nuclei --nuclei-templates ${NUCLEI_TEMPLATES_PATH} -severity ${crit} -nh -rl $NUCLEI_RATELIMIT -o nuclei_output/${crit}.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-					[ -s "nuclei_output/${crit}.txt" ] && cat nuclei_output/${crit}.txt
-				done
-				printf "\n\n"
-			fi
-		fi
-		end_func "Results are saved in $domain/nuclei_output folder" ${FUNCNAME[0]}
-	else
-		if [[ $NUCLEICHECK == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
-		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
-		fi
-	fi
 
-}
+				for crit in "${severity_array[@]}"; do
+					printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Axiom Nuclei Severity: $crit. Check results in nuclei_output folder.${reset}\n\n"
 
-function fuzz() {
+					# Run axiom-scan with nuclei module for each severity level
+					axiom-scan .tmp/webs_nuclei.txt -m nuclei \
+						--nuclei-templates "$NUCLEI_TEMPLATES_PATH" \
+						-severity "$crit" -nh -rl "$NUCLEI_RATELIMIT" \
+						-o "nuclei_output/${crit}.txt" "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
 
-	mkdir -p {.tmp/fuzzing,webs,fuzzing}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $FUZZ == true ]]; then
-		start_func ${FUNCNAME[0]} "Web directory fuzzing"
-		[[ -n $multi ]] && [ ! -f "$dir/webs/webs.txt" ] && echo "$domain" >"$dir/webs/webs.txt" && touch webs/webs_uncommon_ports.txt
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
-		if [[ -s "webs/webs_all.txt" ]]; then
-			if [[ $AXIOM != true ]]; then
-				interlace -tL webs/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -o _output_/_cleantarget_.json" -o $dir/.tmp/fuzzing 2>>"$LOGFILE" >/dev/null
-				for sub in $(cat webs/webs_all.txt); do
-					sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
-
-					pushd "${tools}/ffufPostprocessing" >/dev/null || {
-						echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
-					}
-					./ffufPostprocessing -result-file $dir/.tmp/fuzzing/${sub_out}.json -overwrite-result-file
-					popd >/dev/null || {
-						echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
-					}
-
-					[ -s "$dir/.tmp/fuzzing/${sub_out}.json" ] && cat $dir/.tmp/fuzzing/${sub_out}.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt
-				done
-				find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | sort -k1 | anew -q $dir/fuzzing/fuzzing_full.txt
-			else
-				axiom-exec "mkdir -p /home/op/lists/seclists/Discovery/Web-Content/" &>/dev/null
-				axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/fuzz_wordlist.txt" &>/dev/null
-				axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/seclists/Discovery/Web-Content/big.txt" &>/dev/null
-				axiom-scan webs/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-				for sub in $(cat webs/webs_all.txt); do
-					sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
-					[ -s "$dir/.tmp/ffuf-content.json" ] && cat .tmp/ffuf-content.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | grep $sub | sort -k1 | anew -q fuzzing/${sub_out}.txt
+					# Display the results if the output file exists and is not empty
+					if [[ -s "nuclei_output/${crit}.txt" ]]; then
+						cat "nuclei_output/${crit}.txt"
+					fi
 				done
-				find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | sort -k1 | anew -q $dir/fuzzing/fuzzing_full.txt
+				printf "\n\n"
 			fi
-			end_func "Results are saved in $domain/fuzzing/*subdomain*.txt" ${FUNCNAME[0]}
-		else
-			end_func "No $domain/web/webs.txts file found, fuzzing skipped " ${FUNCNAME[0]}
 		fi
+
+		end_func "Results are saved in $domain/nuclei_output folder" "${FUNCNAME[0]}"
 	else
+		# Handle cases where NUCLEICHECK is false or the function has already been processed
 		if [[ $FUZZ == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP address; skip the function
+			return
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
 		fi
 	fi
 
@@ -1895,25 +3479,56 @@ function fuzz() {
 
 function iishortname() {
 
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $IIS_SHORTNAME == true ]]; then
-		start_func ${FUNCNAME[0]} "IIS Shortname Scanner"
-		[ -s "nuclei_output/info.txt" ] && cat nuclei_output/info.txt | grep "iis-version" | cut -d " " -f4 >.tmp/iis_sites.txt
+		start_func "${FUNCNAME[0]}" "IIS Shortname Scanner"
+
+		# Ensure nuclei_output/info.txt exists and is not empty
+		if [[ -s "nuclei_output/info.txt" ]]; then
+			# Extract IIS version information and save to .tmp/iis_sites.txt
+			grep "iis-version" "nuclei_output/info.txt" | cut -d " " -f4 >.tmp/iis_sites.txt
+		else
+			printf "%b[!] nuclei_output/info.txt does not exist or is empty.%b\n" "$yellow" "$reset"
+		fi
+
+		# Proceed only if iis_sites.txt exists and is non-empty
 		if [[ -s ".tmp/iis_sites.txt" ]]; then
-			mkdir -p $dir/vulns/iis-shortname-shortscan/
-			mkdir -p $dir/vulns/iis-shortname-sns/
-			interlace -tL .tmp/iis_sites.txt -threads ${INTERLACE_THREADS} -c "shortscan _target_ -F -s -p 1 > _output_/_cleantarget_.txt" -o $dir/vulns/iis-shortname-shortscan/ 2>>"$LOGFILE" >/dev/null
-			find $dir/vulns/iis-shortname-shortscan/ -type f -print0 | xargs --null grep -Z -L 'Vulnerable: Yes' | xargs --null rm 2>>"$LOGFILE" >/dev/null
-			interlace -tL .tmp/iis_sites.txt -threads ${INTERLACE_THREADS} -c "sns -u _target_ > _output_/_cleantarget_.txt" -o $dir/vulns/iis-shortname-sns/ 2>>"$LOGFILE" >/dev/null
-			find $dir/vulns/iis-shortname-sns/ -type f -print0 | xargs --null grep -Z 'Target is not vulnerable' | xargs --null rm 2>>"$LOGFILE" >/dev/null
-			end_func "Results are saved in vulns/iis-shortname/" ${FUNCNAME[0]}
+			# Create necessary directories
+			mkdir -p "$dir/vulns/iis-shortname-shortscan/" "$dir/vulns/iis-shortname-sns/"
+
+			# Run shortscan using interlace
+			interlace -tL .tmp/iis_sites.txt -threads "$INTERLACE_THREADS" \
+				-c "shortscan _target_ -F -s -p 1 > _output_/_cleantarget_.txt" \
+				-o "$dir/vulns/iis-shortname-shortscan/" 2>>"$LOGFILE" >/dev/null
+
+			# Remove non-vulnerable shortscan results
+			find "$dir/vulns/iis-shortname-shortscan/" -type f -iname "*.txt" -print0 |
+				xargs --null grep -Z -L 'Vulnerable: Yes' |
+				xargs --null rm 2>>"$LOGFILE" >/dev/null
+
+			# Run sns using interlace
+			interlace -tL .tmp/iis_sites.txt -threads "$INTERLACE_THREADS" \
+				-c "sns -u _target_ > _output_/_cleantarget_.txt" \
+				-o "$dir/vulns/iis-shortname-sns/" 2>>"$LOGFILE" >/dev/null
+
+			# Remove non-vulnerable sns results
+			find "$dir/vulns/iis-shortname-sns/" -type f -iname "*.txt" -print0 |
+				xargs --null grep -Z 'Target is not vulnerable' |
+				xargs --null rm 2>>"$LOGFILE" >/dev/null
+
+			end_func "Results are saved in vulns/iis-shortname/" "${FUNCNAME[0]}"
 		else
-			end_func "No IIS sites detected, iishortname check skipped " ${FUNCNAME[0]}
+			end_func "No IIS sites detected, iishortname check skipped." "${FUNCNAME[0]}"
 		fi
 	else
+		# Handle cases where IIS_SHORTNAME is false or the function has already been processed
 		if [[ $IIS_SHORTNAME == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP address; skip the function
+			return
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
 		fi
 	fi
 
@@ -1921,127 +3536,208 @@ function iishortname() {
 
 function cms_scanner() {
 
-	mkdir -p {.tmp,webs,cms}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CMS_SCANNER == true ]]; then
-		start_func ${FUNCNAME[0]} "CMS Scanner"
-		mkdir -p $dir/cms 2>/dev/null
-		rm -rf $dir/cms/*
-		[[ -n $multi ]] && [ ! -f "$dir/webs/webs.txt" ] && echo "$domain" >"$dir/webs/webs.txt" && touch webs/webs_uncommon_ports.txt
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+	# Create necessary directories
+	if ! mkdir -p .tmp/fuzzing webs cms; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CMS_SCANNER == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "CMS Scanner"
+
+		rm -rf "$dir/cms/"*
+
+		# Handle multi mode and initialize webs.txt if necessary
+		if [[ -n $multi ]] && [[ ! -f "$dir/webs/webs.txt" ]]; then
+			printf "%b\n" "$domain" >"$dir/webs/webs.txt"
+			touch webs/webs_uncommon_ports.txt
+		fi
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		fi
+
+		# Combine webs_all.txt into .tmp/cms.txt as a comma-separated list
 		if [[ -s "webs/webs_all.txt" ]]; then
 			tr '\n' ',' <webs/webs_all.txt >.tmp/cms.txt 2>>"$LOGFILE"
-			timeout -k 1m ${CMSSCAN_TIMEOUT}s python3 ${tools}/CMSeeK/cmseek.py -l .tmp/cms.txt --batch -r &>>"$LOGFILE" || (true && echo "CMSeek timeout reached")
+		else
+			end_func "No webs/webs_all.txt file found, cms scanner skipped." "${FUNCNAME[0]}"
+			return
+		fi
+
+		# Run CMSeeK with timeout
+		if ! timeout -k 1m "${CMSSCAN_TIMEOUT}s" python3 "${tools}/CMSeeK/cmseek.py" -l .tmp/cms.txt --batch -r &>>"$LOGFILE"; then
 			exit_status=$?
-			if [[ ${exit_status} -eq 125 ]]; then
+			if [[ ${exit_status} -eq 124 || ${exit_status} -eq 137 ]]; then
 				echo "TIMEOUT cmseek.py - investigate manually for $dir" >>"$LOGFILE"
-				end_func "TIMEOUT cmseek.py - investigate manually for $dir" ${FUNCNAME[0]}
+				end_func "TIMEOUT cmseek.py - investigate manually for $dir" "${FUNCNAME[0]}"
 				return
 			elif [[ ${exit_status} -ne 0 ]]; then
 				echo "ERROR cmseek.py - investigate manually for $dir" >>"$LOGFILE"
-				end_func "ERROR cmseek.py - investigate manually for $dir" ${FUNCNAME[0]}
+				end_func "ERROR cmseek.py - investigate manually for $dir" "${FUNCNAME[0]}"
 				return
-			fi # otherwise Assume we have a successfully exited cmseek
-			for sub in $(cat webs/webs_all.txt); do
-				sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
-				cms_id=$(cat ${tools}/CMSeeK/Result/${sub_out}/cms.json 2>/dev/null | jq -r 'try .cms_id')
-				if [[ -z $cms_id ]]; then
-					rm -rf ${tools}/CMSeeK/Result/${sub_out}
+			fi
+		fi
+
+		# Process CMSeeK results
+		while IFS= read -r sub; do
+			sub_out=$(echo "$sub" | sed -e 's|^[^/]*//||' -e 's|/.*$||')
+			cms_json_path="${tools}/CMSeeK/Result/${sub_out}/cms.json"
+
+			if [[ -s $cms_json_path ]]; then
+				cms_id=$(jq -r 'try .cms_id' "$cms_json_path")
+				if [[ -n $cms_id ]]; then
+					mv -f "${tools}/CMSeeK/Result/${sub_out}" "$dir/cms/" 2>>"$LOGFILE"
 				else
-					mv -f ${tools}/CMSeeK/Result/${sub_out} $dir/cms/ 2>>"$LOGFILE"
+					rm -rf "${tools}/CMSeeK/Result/${sub_out}" 2>>"$LOGFILE"
 				fi
-			done
-			end_func "Results are saved in $domain/cms/*subdomain* folder" ${FUNCNAME[0]}
-		else
-			end_func "No $domain/web/webs.txts file found, cms scanner skipped" ${FUNCNAME[0]}
-		fi
+			else
+				printf "%b[!] cms.json does not exist or is empty for $sub_out.%b\n" "$yellow" "$reset"
+			fi
+		done <"webs/webs_all.txt"
+
+		end_func "Results are saved in $domain/cms/*subdomain* folder" "${FUNCNAME[0]}"
 	else
+		# Handle cases where CMS_SCANNER is false or the function has already been processed
 		if [[ $CMS_SCANNER == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP address; skip the function
+			return
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
 		fi
 	fi
-
 }
 
 function urlchecks() {
 
-	mkdir -p {.tmp,webs}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $URL_CHECK == true ]]; then
-		start_func ${FUNCNAME[0]} "URL Extraction"
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		start_func "${FUNCNAME[0]}" "URL Extraction"
+
+		# Combine webs.txt and webs_uncommon_ports.txt if webs_all.txt doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+		fi
+
 		if [[ -s "webs/webs_all.txt" ]]; then
 			if [[ $AXIOM != true ]]; then
 				if [[ $URL_CHECK_PASSIVE == true ]]; then
 					if [[ $DEEP == true ]]; then
-						cat webs/webs_all.txt | unfurl -u domains >.tmp/waymore_input.txt
+						unfurl -u domains <webs/webs_all.txt >.tmp/waymore_input.txt
 						waymore -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null
 					else
-						cat webs/webs_all.txt | unfurl -u domains >.tmp/waymore_input.txt
-						waymore -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null # could add -xcc to remove commoncrawl wich takes a bit longer
+						unfurl -u domains <webs/webs_all.txt >.tmp/waymore_input.txt
+						waymore -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null
 					fi
-					if [[ -s ${GITHUB_TOKENS} ]]; then
-						github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null
-						[ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt
+
+					if [[ -s $GITHUB_TOKENS ]]; then
+						github-endpoints -q -k -d "$domain" -t "$GITHUB_TOKENS" -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null
+						if [[ -s ".tmp/github-endpoints.txt" ]]; then
+							cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt
+						fi
 					fi
 				fi
+
 				diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt 2>>"$LOGFILE") <(sort -u webs/webs_all.txt 2>>"$LOGFILE") | wc -l)
 				if [[ $diff_webs != "0" ]] || [[ ! -s ".tmp/katana.txt" ]]; then
 					if [[ $URL_CHECK_ACTIVE == true ]]; then
 						if [[ $DEEP == true ]]; then
-							katana -silent -list webs/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
+							katana -silent -list webs/webs_all.txt -jc -kf all -c "$KATANA_THREADS" -d 3 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
 						else
-							katana -silent -list webs/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
+							katana -silent -list webs/webs_all.txt -jc -kf all -c "$KATANA_THREADS" -d 2 -fs rdn -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
 						fi
 					fi
 				fi
 			else
 				if [[ $URL_CHECK_PASSIVE == true ]]; then
 					if [[ $DEEP == true ]]; then
-						cat webs/webs_all.txt | unfurl -u domains >.tmp/waymore_input.txt
-						axiom-scan .tmp/waymore_input.txt -m waymore -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+						unfurl -u domains <webs/webs_all.txt >.tmp/waymore_input.txt
+						axiom-scan .tmp/waymore_input.txt -m waymore -o .tmp/url_extract_tmp.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
 					else
-						axiom-scan webs/webs_all.txt -m gau -o .tmp/url_extract_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+						axiom-scan webs/webs_all.txt -m gau -o .tmp/url_extract_tmp.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
 					fi
-					if [[ -s ${GITHUB_TOKENS} ]]; then
-						github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null
-						[ -s ".tmp/github-endpoints.txt" ] && cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt
+
+					if [[ -s $GITHUB_TOKENS ]]; then
+						github-endpoints -q -k -d "$domain" -t "$GITHUB_TOKENS" -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null
+						if [[ -s ".tmp/github-endpoints.txt" ]]; then
+							cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt
+						fi
 					fi
 				fi
+
 				diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u webs/webs_all.txt) | wc -l)
 				if [[ $diff_webs != "0" ]] || [[ ! -s ".tmp/katana.txt" ]]; then
 					if [[ $URL_CHECK_ACTIVE == true ]]; then
 						if [[ $DEEP == true ]]; then
-							axiom-scan webs/webs_all.txt -m katana -jc -kf all -d 3 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+							axiom-scan webs/webs_all.txt -m katana -jc -kf all -d 3 -fs rdn -o .tmp/katana.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
 						else
-							axiom-scan webs/webs_all.txt -m katana -jc -kf all -d 2 -fs rdn -fs rdn -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+							axiom-scan webs/webs_all.txt -m katana -jc -kf all -d 2 -fs rdn -o .tmp/katana.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
 						fi
 					fi
 				fi
 			fi
-			[ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt
-			[ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | anew -q .tmp/url_extract_tmp.txt
-			[ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep -aEi "\.(js)" | anew -q .tmp/url_extract_js.txt
-			[ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep -aEi "\.(js\.map)" | anew -q .tmp/url_extract_jsmap.txt
-			if [[ $DEEP == true ]]; then
-				[ -s ".tmp/url_extract_js.txt" ] && interlace -tL .tmp/url_extract_js.txt -threads 10 -c "python3 ${tools}/JSA/jsa.py -f target | anew -q .tmp/url_extract_tmp.txt" &>/dev/null
+
+			if [[ -s ".tmp/katana.txt" ]]; then
+				sed -i '/^.\{2048\}./d' .tmp/katana.txt
+				cat .tmp/katana.txt | anew -q .tmp/url_extract_tmp.txt
 			fi
-			[ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep "${domain}" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt
-			[ -s ".tmp/url_extract_tmp2.txt" ] && cat .tmp/url_extract_tmp2.txt | python3 ${tools}/urless/urless/urless.py | anew -q .tmp/url_extract_uddup.txt 2>>"$LOGFILE" >/dev/null
-			NUMOFLINES=$(cat .tmp/url_extract_uddup.txt 2>>"$LOGFILE" | anew webs/url_extract.txt | sed '/^$/d' | wc -l)
-			notification "${NUMOFLINES} new urls with params" info
-			end_func "Results are saved in $domain/webs/url_extract.txt" ${FUNCNAME[0]}
-			p1radup -i webs/url_extract.txt -o webs/url_extract_nodupes.txt -s  2>>"$LOGFILE" >/dev/null
-			if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then
-				notification "Sending urls to proxy" info
-				ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null
+
+			if [[ -s ".tmp/url_extract_tmp.txt" ]]; then
+				grep "$domain" .tmp/url_extract_tmp.txt | grep -E '^((http|https):\/\/)?([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}(\/.*)?$' | grep -aEi "\.js$" | anew -q .tmp/url_extract_js.txt
+				grep "$domain" .tmp/url_extract_tmp.txt | grep -E '^((http|https):\/\/)?([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}(\/.*)?$' | grep -aEi "\.js\.map$" | anew -q .tmp/url_extract_jsmap.txt
+				if [[ $DEEP == true ]] && [[ -s ".tmp/url_extract_js.txt" ]]; then
+					interlace -tL .tmp/url_extract_js.txt -threads 10 -c "python3 ${tools}/JSA/jsa.py -f _target_ | anew -q .tmp/url_extract_tmp.txt" &>/dev/null
+				fi
+
+				grep "$domain" .tmp/url_extract_tmp.txt | grep -E '^((http|https):\/\/)?([a-zA-Z0-9\-\.]+\.)+[a-zA-Z]{1,}(\/.*)?$' | grep "=" | qsreplace -a 2>>"$LOGFILE" | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | anew -q .tmp/url_extract_tmp2.txt
+
+				if [[ -s ".tmp/url_extract_tmp2.txt" ]]; then
+					python3 "${tools}/urless/urless/urless.py" <.tmp/url_extract_tmp2.txt | anew -q .tmp/url_extract_uddup.txt 2>>"$LOGFILE" >/dev/null
+				fi
+
+				if [[ -s ".tmp/url_extract_uddup.txt" ]]; then
+					if ! NUMOFLINES=$(anew webs/url_extract.txt <.tmp/url_extract_uddup.txt | sed '/^$/d' | wc -l); then
+						printf "%b[!] Failed to update url_extract.txt.%b\n" "$bred" "$reset"
+						NUMOFLINES=0
+					fi
+					notification "${NUMOFLINES} new URLs with parameters" "info"
+				else
+					NUMOFLINES=0
+				fi
+
+				end_func "Results are saved in $domain/webs/url_extract.txt" "${FUNCNAME[0]}"
+
+				p1radup -i webs/url_extract.txt -o webs/url_extract_nodupes.txt -s 2>>"$LOGFILE" >/dev/null
+
+				if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(wc -l <webs/url_extract.txt) -le $DEEP_LIMIT2 ]]; then
+					notification "Sending URLs to proxy" "info"
+					ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy "$proxy_url" 2>>"$LOGFILE" >/dev/null
+				fi
+			else
+				printf "%b[!] No URLs extracted.%b\n" "$yellow" "$reset"
 			fi
+		else
+			printf "%b[!] webs/webs_all.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 	else
 		if [[ $URL_CHECK == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" ".${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -2049,27 +3745,70 @@ function urlchecks() {
 
 function url_gf() {
 
-	mkdir -p {.tmp,webs,gf}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $URL_GF == true ]]; then
-		start_func ${FUNCNAME[0]} "Vulnerable Pattern Search"
+	# Create necessary directories
+	if ! mkdir -p .tmp webs gf; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $URL_GF == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Vulnerable Pattern Search"
+
+		# Ensure webs_nuclei.txt exists and is not empty
 		if [[ -s "webs/webs_nuclei.txt" ]]; then
-			gf xss webs/webs_nuclei.txt | anew -q gf/xss.txt
-			gf ssti webs/webs_nuclei.txt | anew -q gf/ssti.txt
-			gf ssrf webs/webs_nuclei.txt | anew -q gf/ssrf.txt
-			gf sqli webs/webs_nuclei.txt | anew -q gf/sqli.txt
-			gf redirect webs/webs_nuclei.txt | anew -q gf/redirect.txt
-			[ -s "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt
-			gf rce webs/webs_nuclei.txt | anew -q gf/rce.txt
-			gf potential webs/webs_nuclei.txt | cut -d ':' -f3-5 | anew -q gf/potential.txt
-			[ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q gf/endpoints.txt
-			gf lfi webs/webs_nuclei.txt | anew -q gf/lfi.txt
-		fi
-		end_func "Results are saved in $domain/gf folder" ${FUNCNAME[0]}
+			# Define an array of GF patterns
+			declare -A gf_patterns=(
+				["xss"]="gf/xss.txt"
+				["ssti"]="gf/ssti.txt"
+				["ssrf"]="gf/ssrf.txt"
+				["sqli"]="gf/sqli.txt"
+				["redirect"]="gf/redirect.txt"
+				["rce"]="gf/rce.txt"
+				["potential"]="gf/potential.txt"
+				["lfi"]="gf/lfi.txt"
+			)
+
+			# Iterate over GF patterns and process each
+			for pattern in "${!gf_patterns[@]}"; do
+				output_file="${gf_patterns[$pattern]}"
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: GF Pattern '$pattern'${reset}\n\n"
+				if [[ $pattern == "potential" ]]; then
+					# Special handling for 'potential' pattern
+					gf "$pattern" "webs/webs_nuclei.txt" | cut -d ':' -f3-5 | anew -q "$output_file"
+				elif [[ $pattern == "redirect" && -s "gf/ssrf.txt" ]]; then
+					# Append SSFR results to redirect if ssrf.txt exists
+					gf "$pattern" "webs/webs_nuclei.txt" | anew -q "$output_file"
+				else
+					# General handling for other patterns
+					gf "$pattern" "webs/webs_nuclei.txt" | anew -q "$output_file"
+				fi
+			done
+
+			# Process endpoints extraction
+			if [[ -s ".tmp/url_extract_tmp.txt" ]]; then
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Extracting endpoints...${reset}\n\n"
+				grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" ".tmp/url_extract_tmp.txt" |
+					unfurl -u format '%s://%d%p' 2>>"$LOGFILE" | anew -q "gf/endpoints.txt"
+			fi
+
+		else
+			end_func "No webs/webs_nuclei.txt file found, URL_GF check skipped." "${FUNCNAME[0]}"
+			return
+		fi
+
+		end_func "Results are saved in $domain/gf folder" "${FUNCNAME[0]}"
 	else
+		# Handle cases where URL_GF is false or the function has already been processed
 		if [[ $URL_GF == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP address; skip the function
+			return
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
 		fi
 	fi
 
@@ -2077,26 +3816,62 @@ function url_gf() {
 
 function url_ext() {
 
-	mkdir -p {.tmp,webs}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $URL_EXT == true ]]; then
+	# Create necessary directories
+	if ! mkdir -p .tmp webs gf; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $URL_EXT == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
 		if [[ -s ".tmp/url_extract_tmp.txt" ]]; then
-			start_func ${FUNCNAME[0]} "Urls by extension"
-			ext=("7z" "achee" "action" "adr" "apk" "arj" "ascx" "asmx" "asp" "aspx" "axd" "backup" "bak" "bat" "bin" "bkf" "bkp" "bok" "cab" "cer" "cfg" "cfm" "cfml" "cgi" "cnf" "conf" "config" "cpl" "crt" "csr" "csv" "dat" "db" "dbf" "deb" "dmg" "dmp" "doc" "docx" "drv" "email" "eml" "emlx" "env" "exe" "gadget" "gz" "html" "ica" "inf" "ini" "iso" "jar" "java" "jhtml" "json" "jsp" "key" "log" "lst" "mai" "mbox" "mbx" "md" "mdb" "msg" "msi" "nsf" "ods" "oft" "old" "ora" "ost" "pac" "passwd" "pcf" "pdf" "pem" "pgp" "php" "php3" "php4" "php5" "phtm" "phtml" "pkg" "pl" "plist" "pst" "pwd" "py" "rar" "rb" "rdp" "reg" "rpm" "rtf" "sav" "sh" "shtm" "shtml" "skr" "sql" "swf" "sys" "tar" "tar.gz" "tmp" "toast" "tpl" "txt" "url" "vcd" "vcf" "wml" "wpd" "wsdl" "wsf" "xls" "xlsm" "xlsx" "xml" "xsd" "yaml" "yml" "z" "zip")
-			#echo "" > webs/url_extract.txt
+			start_func "${FUNCNAME[0]}" "Vulnerable Pattern Search"
+
+			# Define an array of file extensions
+			ext=("7z" "achee" "action" "adr" "apk" "arj" "ascx" "asmx" "asp" "aspx" "axd" "backup" "bak" "bat" "bin" "bkf" "bkp" "bok" "cab" "cer" "cfg" "cfm" "cnf" "conf" "config" "cpl" "crt" "csr" "csv" "dat" "db" "dbf" "deb" "dmg" "dmp" "doc" "docx" "drv" "email" "eml" "emlx" "env" "exe" "gadget" "gz" "html" "ica" "inf" "ini" "iso" "jar" "java" "jhtml" "json" "jsp" "key" "log" "lst" "mai" "mbox" "mbx" "md" "mdb" "msg" "msi" "nsf" "ods" "oft" "old" "ora" "ost" "pac" "passwd" "pcf" "pdf" "pem" "pgp" "php" "php3" "php4" "php5" "phtm" "phtml" "pkg" "pl" "plist" "pst" "pwd" "py" "rar" "rb" "rdp" "reg" "rpm" "rtf" "sav" "sh" "shtm" "shtml" "skr" "sql" "swf" "sys" "tar" "tar.gz" "tmp" "toast" "tpl" "txt" "url" "vcd" "vcf" "wml" "wpd" "wsdl" "wsf" "xls" "xlsm" "xlsx" "xml" "xsd" "yaml" "yml" "z" "zip")
+
+			# Initialize the output file
+			if ! : >webs/urls_by_ext.txt; then
+				printf "%b[!] Failed to initialize webs/urls_by_ext.txt.%b\n" "$bred" "$reset"
+			fi
+
+			# Iterate over extensions and extract matching URLs
 			for t in "${ext[@]}"; do
-				NUMOFLINES=$(cat .tmp/url_extract_tmp.txt | grep -aEi "\.(${t})($|\/|\?)" | sort -u | sed '/^$/d' | wc -l)
-				if [[ ${NUMOFLINES} -gt 0 ]]; then
-					echo -e "\n############################\n + ${t} + \n############################\n" >>webs/urls_by_ext.txt
-					cat .tmp/url_extract_tmp.txt | grep -aEi "\.(${t})($|\/|\?)" >>webs/urls_by_ext.txt
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Processing extension: $t${reset}\n"
+
+				# Extract unique matching URLs
+				matches=$(grep -aEi "\.(${t})($|/|\?)" ".tmp/url_extract_tmp.txt" | sort -u | sed '/^$/d')
+
+				NUMOFLINES=$(echo "$matches" | wc -l)
+
+				if [[ $NUMOFLINES -gt 0 ]]; then
+					printf "\n############################\n + %s + \n############################\n" "$t" >>webs/urls_by_ext.txt
+					echo "$matches" >>webs/urls_by_ext.txt
 				fi
 			done
-			end_func "Results are saved in $domain/webs/urls_by_ext.txt" ${FUNCNAME[0]}
+
+			# Append ssrf.txt to redirect.txt if ssrf.txt exists and is not empty
+			if [[ -s "gf/ssrf.txt" ]]; then
+				cat "gf/ssrf.txt" | anew -q "gf/redirect.txt"
+			fi
+
+			end_func "Results are saved in $domain/webs/urls_by_ext.txt" "${FUNCNAME[0]}"
+
+		else
+			end_func "No .tmp/url_extract_tmp.txt file found, URL_EXT check skipped." "${FUNCNAME[0]}"
 		fi
+
 	else
+		# Handle cases where URL_EXT is false or function already processed
 		if [[ $URL_EXT == false ]]; then
 			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP address; skip the function
+			return
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
 		fi
 	fi
 
@@ -2104,60 +3879,135 @@ function url_ext() {
 
 function jschecks() {
 
-	mkdir -p {.tmp,webs,subdomains,js}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs subdomains js; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $JSCHECKS == true ]]; then
-		start_func ${FUNCNAME[0]} "Javascript Scan"
+		start_func "${FUNCNAME[0]}" "JavaScript Scan"
+
 		if [[ -s ".tmp/url_extract_js.txt" ]]; then
 
-			printf "${yellow} Running : Fetching Urls 1/6${reset}\n"
+			printf "%bRunning: Fetching URLs 1/6%s\n" "$yellow" "$reset"
 			if [[ $AXIOM != true ]]; then
-				cat .tmp/url_extract_js.txt | subjs -ua "Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" -c 40 | grep "$domain" | grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subjslinks.txt
+				subjs -ua "Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" -c 40 <.tmp/url_extract_js.txt |
+					grep "$domain" |
+					grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' |
+					anew -q .tmp/subjslinks.txt
 			else
-				axiom-scan .tmp/url_extract_js.txt -m subjs -o .tmp/subjslinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				axiom-scan .tmp/url_extract_js.txt -m subjs -o .tmp/subjslinks.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
 			fi
-			[ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | egrep -iv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)" | anew -q js/nojs_links.txt
-			[ -s ".tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | grep -iE "\.js($|\?)" | anew -q .tmp/url_extract_js.txt
-			cat .tmp/url_extract_js.txt | python3 ${tools}/urless/urless/urless.py | anew -q js/url_extract_js.txt 2>>"$LOGFILE" >/dev/null
 
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Resolving JS Urls 2/6${reset}\n"
+			if [[ -s ".tmp/subjslinks.txt" ]]; then
+				grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" .tmp/subjslinks.txt |
+					anew -q js/nojs_links.txt
+				grep -iE "\.js($|\?)" .tmp/subjslinks.txt | anew -q .tmp/url_extract_js.txt
+			else
+				printf "%b[!] No subjslinks found.%b\n" "$yellow" "$reset"
+			fi
+
+			python3 "${tools}/urless/urless/urless.py" <.tmp/url_extract_js.txt |
+				anew -q js/url_extract_js.txt 2>>"$LOGFILE" >/dev/null
+
+			printf "%b[%s] Running: Resolving JS URLs 2/6%s\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 			if [[ $AXIOM != true ]]; then
-				[ -s "js/url_extract_js.txt" ] && cat js/url_extract_js.txt | httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -status-code -content-type -retries 2 -no-color | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
+				if [[ -s "js/url_extract_js.txt" ]]; then
+					httpx -follow-redirects -random-agent -silent -timeout "$HTTPX_TIMEOUT" -threads "$HTTPX_THREADS" \
+						-rl "$HTTPX_RATELIMIT" -status-code -content-type -retries 2 -no-color <js/url_extract_js.txt |
+						grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
+				else
+					printf "%b[!] No JavaScript URLs to resolve.%b\n" "$yellow" "$reset"
+				fi
+			else
+				if [[ -s "js/url_extract_js.txt" ]]; then
+					axiom-scan js/url_extract_js.txt -m httpx -follow-host-redirects -H "$HEADER" -status-code \
+						-threads "$HTTPX_THREADS" -rl "$HTTPX_RATELIMIT" -timeout "$HTTPX_TIMEOUT" -silent \
+						-content-type -retries 2 -no-color -o .tmp/js_livelinks.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+					if [[ -s ".tmp/js_livelinks.txt" ]]; then
+						cat .tmp/js_livelinks.txt | anew .tmp/web_full_info.txt |
+							grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
+					else
+						printf "%b[!] No live JavaScript links found.%b\n" "$yellow" "$reset"
+					fi
+				else
+					printf "%b[!] No JavaScript URLs to resolve.%b\n" "$yellow" "$reset"
+				fi
+			fi
+
+			printf "%bRunning: Extracting JS from sourcemaps 3/6%s\n" "$yellow" "$reset"
+			if ! mkdir -p .tmp/sourcemapper; then
+				printf "%b[!] Failed to create sourcemapper directory.%b\n" "$bred" "$reset"
+			fi
+			if [[ -s "js/js_livelinks.txt" ]]; then
+				interlace -tL js/js_livelinks.txt -threads "$INTERLACE_THREADS" \
+					-c "sourcemapper -jsurl '_target_' -output _output_/_cleantarget_" \
+					-o .tmp/sourcemapper 2>>"$LOGFILE" >/dev/null
+			else
+				printf "%b[!] No live JavaScript links for sourcemapping.%b\n" "$yellow" "$reset"
+			fi
+
+			if [[ -s ".tmp/url_extract_jsmap.txt" ]]; then
+				interlace -tL js/js_livelinks.txt -threads "$INTERLACE_THREADS" \
+					-c "sourcemapper -url '_target_' -output _output_/_cleantarget_" \
+					-o .tmp/sourcemapper 2>>"$LOGFILE" >/dev/null
+			fi
+
+			printf "%b[%s] Running: Gathering endpoints 4/6%s\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			if [[ -s "js/js_livelinks.txt" ]]; then
+				xnLinkFinder -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d "$XNLINKFINDER_DEPTH" \
+					-o .tmp/js_endpoints.txt 2>>"$LOGFILE" >/dev/null
 			else
-				[ -s "js/url_extract_js.txt" ] && axiom-scan js/url_extract_js.txt -m httpx -follow-host-redirects -H \"${HEADER}\" -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -content-type -retries 2 -no-color -o .tmp/js_livelinks.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-				[ -s ".tmp/js_livelinks.txt" ] && cat .tmp/js_livelinks.txt | anew .tmp/web_full_info.txt | grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
+				printf "%b[!] No live JavaScript links for endpoint extraction.%b\n" "$yellow" "$reset"
 			fi
 
-			printf "${yellow} Running : Extracting JS from sourcemaps 3/6${reset}\n"
-			mkdir -p .tmp/sourcemapper
-			[ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "sourcemapper -jsurl '_target_' -output _output_/_cleantarget_" -o .tmp/sourcemapper 2>>"$LOGFILE" >/dev/null
-			[ -s ".tmp/url_extract_jsmap.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "sourcemapper -url '_target_' -output _output_/_cleantarget_" -o .tmp/sourcemapper 2>>"$LOGFILE" >/dev/null
+			find .tmp/sourcemapper/ \( -name "*.js" -o -name "*.ts" \) -type f |
+				jsluice urls | jq -r .url | anew -q .tmp/js_endpoints.txt
 
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Gathering endpoints 4/6${reset}\n"
-			[ -s "js/js_livelinks.txt" ] && xnLinkFinder -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d $XNLINKFINDER_DEPTH -o .tmp/js_endpoints.txt 2>>"$LOGFILE" >/dev/null
-			find .tmp/sourcemapper/ \( -name "*.js" -o -name "*.ts" \) -type f | jsluice urls | jq -r .url | anew -q .tmp/js_endpoints.txt
-			[ -s "parameters.txt" ] && rm -f parameters.txt 2>>"$LOGFILE" >/dev/null
 			if [[ -s ".tmp/js_endpoints.txt" ]]; then
 				sed -i '/^\//!d' .tmp/js_endpoints.txt
 				cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt
+			else
+				printf "%b[!] No JavaScript endpoints found.%b\n" "$yellow" "$reset"
+			fi
+
+			printf "%b[%s] Running: Gathering secrets 5/6%s\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			if [[ -s "js/js_livelinks.txt" ]]; then
+				axiom-scan js/js_livelinks.txt -m mantra -ua "$HEADER" -s -o js/js_secrets.txt "$AXIOM_EXTRA_ARGS" &>/dev/null
+				if [[ -s "js/js_secrets.txt" ]]; then
+					trufflehog filesystem js/js_secrets.txt -j 2>/dev/null |
+						jq -c | anew -q js/js_secrets_trufflehog.txt
+					trufflehog filesystem .tmp/sourcemapper/ -j 2>/dev/null |
+						jq -c | anew -q js/js_secrets_trufflehog.txt
+					sed -r "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2};?)?)?[mGK]//g" -i js/js_secrets.txt
+				else
+					printf "%b[!] No secrets found in JavaScript files.%b\n" "$yellow" "$reset"
+				fi
+			else
+				printf "%b[!] No live JavaScript links for secret gathering.%b\n" "$yellow" "$reset"
 			fi
 
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Gathering secrets 5/6${reset}\n"
-			[ -s "js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua \"${HEADER}\" -s -o js/js_secrets.txt $AXIOM_EXTRA_ARGS &>/dev/null
-			[ -s "js/js_secrets.txt" ] && trufflehog filesystem js/js_secrets.txt -j 2>/dev/null | jq -c | anew -q js/js_secrets_trufflehog.txt
-			[ -s "js/js_secrets.txt" ] && trufflehog filesystem .tmp/sourcemapper/ -j 2>/dev/null | jq -c | anew -q js/js_secrets_trufflehog.txt
-			[ -s "js/js_secrets.txt" ] && sed -r "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2};?)?)?[mGK]//g" -i js/js_secrets.txt
+			printf "%b[%s] Running: Building wordlist 6/6%s\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			if [[ -s "js/js_livelinks.txt" ]]; then
+				interlace -tL js/js_livelinks.txt -threads "$INTERLACE_THREADS" \
+					-c "python3 ${tools}/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" >/dev/null
+			else
+				printf "%b[!] No live JavaScript links for wordlist building.%b\n" "$yellow" "$reset"
+			fi
 
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] Running : Building wordlist 6/6${reset}\n"
-			[ -s "js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c "python3 ${tools}/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" >/dev/null
-			end_func "Results are saved in $domain/js folder" ${FUNCNAME[0]}
+			end_func "Results are saved in $domain/js folder" "${FUNCNAME[0]}"
 		else
-			end_func "No JS urls found for $domain, function skipped" ${FUNCNAME[0]}
+			end_func "No JS URLs found for $domain, function skipped" "${FUNCNAME[0]}"
 		fi
 	else
 		if [[ $JSCHECKS == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n%b[%s] %s skipped due to mode or defined in reconftw.cfg.%b\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$reset"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "%b[%s] %s has already been processed. To force execution, delete:\n    %s/.%s%b\n\n" \
+				"$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "${FUNCNAME[0]}" "$called_fn_dir" ".${FUNCNAME[0]}" "$reset"
 		fi
 	fi
 
@@ -2165,26 +4015,72 @@ function jschecks() {
 
 function wordlist_gen() {
 
-	mkdir -p {.tmp,webs}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WORDLIST == true ]]; then
-		start_func ${FUNCNAME[0]} "Wordlist generation"
+	# Create necessary directories
+	if ! mkdir -p .tmp webs gf; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WORDLIST == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Wordlist Generation"
+
+		# Ensure url_extract_tmp.txt exists and is not empty
+		if [[ -s ".tmp/url_extract_tmp.txt" ]]; then
+			# Define patterns for keys and values
+			patterns=("keys" "values")
+
+			for pattern in "${patterns[@]}"; do
+				output_file="webs/dict_${pattern}.txt"
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Extracting ${pattern}...${reset}\n"
+
+				if [[ $pattern == "keys" || $pattern == "values" ]]; then
+					unfurl -u "$pattern" ".tmp/url_extract_tmp.txt" 2>>"$LOGFILE" |
+						sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' |
+						anew -q "$output_file"
+				fi
+			done
+
+			# Extract words by removing punctuation
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Extracting words...${reset}\n"
+			tr "[:punct:]" "\n" <".tmp/url_extract_tmp.txt" | anew -q "webs/dict_words.txt"
+		else
+			printf "%b[!] .tmp/url_extract_tmp.txt does not exist or is empty.%b\n" "$yellow" "$reset"
+		fi
+
+		# Process js_endpoints.txt if it exists and is not empty
+		if [[ -s ".tmp/js_endpoints.txt" ]]; then
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Processing js_endpoints.txt...${reset}\n"
+			unfurl -u format '%s://%d%p' ".tmp/js_endpoints.txt" 2>>"$LOGFILE" |
+				anew -q "webs/all_paths.txt"
+		fi
+
+		# Process url_extract_tmp.txt if it exists and is not empty
 		if [[ -s ".tmp/url_extract_tmp.txt" ]]; then
-			cat .tmp/url_extract_tmp.txt | unfurl -u keys 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_params.txt
-			cat .tmp/url_extract_tmp.txt | unfurl -u values 2>>"$LOGFILE" | sed 's/[][]//g' | sed 's/[#]//g' | sed 's/[}{]//g' | anew -q webs/dict_values.txt
-			cat .tmp/url_extract_tmp.txt | tr "[:punct:]" "\n" | anew -q webs/dict_words.txt
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Processing url_extract_tmp.txt...${reset}\n"
+			unfurl -u format '%s://%d%p' ".tmp/url_extract_tmp.txt" 2>>"$LOGFILE" |
+				anew -q "webs/all_paths.txt"
 		fi
-		[ -s ".tmp/js_endpoints.txt" ] && cat .tmp/js_endpoints.txt | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q webs/all_paths.txt
-		[ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q webs/all_paths.txt
-		end_func "Results are saved in $domain/webs/dict_[words|paths].txt" ${FUNCNAME[0]}
-		if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ $(cat webs/all_paths.txt | wc -l) -le $DEEP_LIMIT2 ]]; then
-			notification "Sending urls to proxy" info
-			ffuf -mc all -w webs/all_paths.txt -u FUZZ -replay-proxy $proxy_url 2>>"$LOGFILE" >/dev/null
+
+		end_func "Results are saved in $domain/webs/dict_[words|paths].txt" "${FUNCNAME[0]}"
+
+		# Handle proxy if conditions are met
+		if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ "$(wc -l <webs/all_paths.txt)" -le $DEEP_LIMIT2 ]]; then
+			notification "Sending URLs to proxy" info
+			ffuf -mc all -w "webs/all_paths.txt" -u "FUZZ" -replay-proxy "$proxy_url" 2>>"$LOGFILE" >/dev/null
 		fi
+
 	else
+		# Handle cases where WORDLIST is false or function already processed
 		if [[ $WORDLIST == false ]]; then
 			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP address; skip the function
+			return
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
 		fi
 	fi
 
@@ -2192,19 +4088,50 @@ function wordlist_gen() {
 
 function wordlist_gen_roboxtractor() {
 
-	mkdir -p webs
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $ROBOTSWORDLIST == true ]]; then
-		start_func ${FUNCNAME[0]} "Robots wordlist generation"
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+	# Create necessary directories
+	if ! mkdir -p .tmp webs gf; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $ROBOTSWORDLIST == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Robots Wordlist Generation"
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q "webs/webs_all.txt"
+		fi
+
+		# Proceed only if webs_all.txt exists and is non-empty
 		if [[ -s "webs/webs_all.txt" ]]; then
-			cat webs/webs_all.txt | roboxtractor -m 1 -wb 2>/dev/null | anew -q webs/robots_wordlist.txt
+			# Extract URLs using roboxtractor and append unique entries to robots_wordlist.txt
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Roboxtractor for Robots Wordlist${reset}\n\n"
+			roboxtractor -m 1 -wb <"webs/webs_all.txt" 2>>"$LOGFILE" | anew -q "webs/robots_wordlist.txt"
+		else
+			end_func "No webs/webs_all.txt file found, Robots Wordlist generation skipped." "${FUNCNAME[0]}"
+			return
+		fi
+
+		end_func "Results are saved in $domain/webs/robots_wordlist.txt" "${FUNCNAME[0]}"
+
+		# Handle Proxy if conditions are met
+		if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ "$(wc -l <"webs/robots_wordlist.txt")" -le $DEEP_LIMIT2 ]]; then
+			notification "Sending URLs to proxy" info
+			ffuf -mc all -w "webs/robots_wordlist.txt" -u "FUZZ" -replay-proxy "$proxy_url" 2>>"$LOGFILE" >/dev/null
 		fi
-		end_func "Results are saved in $domain/webs/robots_wordlist.txt" ${FUNCNAME[0]}
+
 	else
+		# Handle cases where ROBOTSWORDLIST is false or function already processed
 		if [[ $ROBOTSWORDLIST == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP address; skip the function
+			return
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
 		fi
 	fi
 
@@ -2212,17 +4139,38 @@ function wordlist_gen_roboxtractor() {
 
 function password_dict() {
 
-	mkdir -p webs
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $PASSWORD_DICT == true ]]; then
-		start_func ${FUNCNAME[0]} "Password dictionary generation"
-		word=${domain%%.*}
-		python3 ${tools}/pydictor/pydictor.py -extend $word --leet 0 1 2 11 21 --len ${PASSWORD_MIN_LENGTH} ${PASSWORD_MAX_LENGTH} -o webs/password_dict.txt 2>>"$LOGFILE" >/dev/null
-		end_func "Results are saved in $domain/webs/password_dict.txt" ${FUNCNAME[0]}
+	# Create necessary directories
+	if ! mkdir -p "$dir/webs"; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $PASSWORD_DICT == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Password Dictionary Generation"
+
+		# Extract the first part of the domain
+		word="${domain%%.*}"
+
+		# Run pydictor.py with specified parameters
+		printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: pydictor.py for Password Dictionary Generation${reset}\n\n"
+		python3 "${tools}/pydictor/pydictor.py" -extend "$word" --leet 0 1 2 11 21 --len "$PASSWORD_MIN_LENGTH" "$PASSWORD_MAX_LENGTH" -o "$dir/webs/password_dict.txt" 2>>"$LOGFILE" >/dev/null
+		end_func "Results are saved in $domain/webs/password_dict.txt" "${FUNCNAME[0]}"
+
+		# Optionally, create a marker file to indicate the function has been processed
+		touch "$called_fn_dir/.${FUNCNAME[0]}"
+
 	else
+		# Handle cases where PASSWORD_DICT is false or function already processed
 		if [[ $PASSWORD_DICT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP address; skip the function
+			return
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
 		fi
 	fi
 
@@ -2234,98 +4182,162 @@ function password_dict() {
 
 function brokenLinks() {
 
-	mkdir -p {.tmp,webs,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $BROKENLINKS == true ]]; then
-		start_func ${FUNCNAME[0]} "Broken links checks"
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
-		if [[ $AXIOM != true ]]; then
-			if [[ ! -s ".tmp/katana.txt" ]]; then
-				if [[ $DEEP == true ]]; then
-					[ -s "webs/webs_all.txt" ] && katana -silent -list webs/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 3 -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
-				else
-					[ -s "webs/webs_all.txt" ] && katana -silent -list webs/webs_all.txt -jc -kf all -c $KATANA_THREADS -d 2 -o .tmp/katana.txt 2>>"$LOGFILE" >/dev/null
+	# Create necessary directories
+	if ! mkdir -p .tmp webs vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $BROKENLINKS == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Broken Links Checks"
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q "webs/webs_all.txt"
+		fi
+
+		# Check if webs_all.txt exists and is not empty
+		if [[ -s "webs/webs_all.txt" ]]; then
+			if [[ $AXIOM != true ]]; then
+				# Use katana for scanning
+				if [[ ! -s ".tmp/katana.txt" ]]; then
+					if [[ $DEEP == true ]]; then
+						katana -silent -list "webs/webs_all.txt" -jc -kf all -c "$KATANA_THREADS" -d 3 -o ".tmp/katana.txt" 2>>"$LOGFILE" >/dev/null
+					else
+						katana -silent -list "webs/webs_all.txt" -jc -kf all -c "$KATANA_THREADS" -d 2 -o ".tmp/katana.txt" 2>>"$LOGFILE" >/dev/null
+					fi
 				fi
-			fi
-			[ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt
-		else
-			if [[ ! -s ".tmp/katana.txt" ]]; then
-				if [[ $DEEP == true ]]; then
-					[ -s "webs/webs_all.txt" ] && axiom-scan webs/webs_all.txt -m katana -jc -kf all -d 3 -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-				else
-					[ -s "webs/webs_all.txt" ] && axiom-scan webs/webs_all.txt -m katana -jc -kf all -d 2 -o .tmp/katana.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				# Remove lines longer than 2048 characters
+				if [[ -s ".tmp/katana.txt" ]]; then
+					sed -i '/^.\{2048\}./d' ".tmp/katana.txt"
+				fi
+			else
+				# Use axiom-scan for scanning
+				if [[ ! -s ".tmp/katana.txt" ]]; then
+					if [[ $DEEP == true ]]; then
+						axiom-scan "webs/webs_all.txt" -m katana -jc -kf all -d 3 -o ".tmp/katana.txt" $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+					else
+						axiom-scan "webs/webs_all.txt" -m katana -jc -kf all -d 2 -o ".tmp/katana.txt" $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+					fi
+					# Remove lines longer than 2048 characters
+					if [[ -s ".tmp/katana.txt" ]]; then
+						sed -i '/^.\{2048\}./d' ".tmp/katana.txt"
+					fi
 				fi
-				[ -s ".tmp/katana.txt" ] && sed -i '/^.\{2048\}./d' .tmp/katana.txt
 			fi
+
+			# Process katana.txt to find broken links
+			if [[ -s ".tmp/katana.txt" ]]; then
+				httpx -follow-redirects -random-agent -status-code -threads "$HTTPX_THREADS" -rl "$HTTPX_RATELIMIT" -timeout "$HTTPX_TIMEOUT" -silent -retries 2 -no-color <".tmp/katana.txt" 2>>"$LOGFILE" |
+					grep "\[4" | cut -d ' ' -f1 | anew -q ".tmp/brokenLinks_total.txt"
+			fi
+
+			# Update brokenLinks.txt with unique entries
+			if [[ -s ".tmp/brokenLinks_total.txt" ]]; then
+				NUMOFLINES=$(wc -l <".tmp/brokenLinks_total.txt" 2>>"$LOGFILE" | awk '{print $1}')
+				cat .tmp/brokenLinks_total.txt | anew -q "vulns/brokenLinks.txt"
+				NUMOFLINES=$(sed '/^$/d' "vulns/brokenLinks.txt" | wc -l)
+				notification "${NUMOFLINES} new broken links found" info
+			fi
+
+			end_func "Results are saved in vulns/brokenLinks.txt" "${FUNCNAME[0]}"
+		else
+			end_func "No webs/webs_all.txt file found, Broken Links check skipped." "${FUNCNAME[0]}"
+			return
 		fi
-		[ -s ".tmp/katana.txt" ] && cat .tmp/katana.txt | sort -u | httpx -follow-redirects -random-agent -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -retries 2 -no-color | grep "\[4" | cut -d ' ' -f1 | anew -q .tmp/brokenLinks_total.txt
-		NUMOFLINES=$(cat .tmp/brokenLinks_total.txt 2>>"$LOGFILE" | anew vulns/brokenLinks.txt | sed '/^$/d' | wc -l)
-		notification "${NUMOFLINES} new broken links found" info
-		end_func "Results are saved in vulns/brokenLinks.txt" ${FUNCNAME[0]}
 	else
+		# Handle cases where BROKENLINKS is false or function already processed
 		if [[ $BROKENLINKS == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+			# Domain is an IP address; skip the function
+			return
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
 		fi
 	fi
 
-}
+}
+
+function xss() {
+
+	# Create necessary directories
+	if ! mkdir -p .tmp webs vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $XSS == true ]] && [[ -s "gf/xss.txt" ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 
-function xss() {
+		start_func "${FUNCNAME[0]}" "XSS Analysis"
+
+		# Process gf/xss.txt with qsreplace and Gxss
+		if [[ -s "gf/xss.txt" ]]; then
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: XSS Payload Generation${reset}\n\n"
+			qsreplace FUZZ <"gf/xss.txt" | sed '/FUZZ/!d' | Gxss -c 100 -p Xss | qsreplace FUZZ | sed '/FUZZ/!d' |
+				anew -q ".tmp/xss_reflected.txt"
+		fi
 
-	mkdir -p {.tmp,webs,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $XSS == true ]] && [[ -s "gf/xss.txt" ]]; then
-		start_func ${FUNCNAME[0]} "XSS Analysis"
-		[ -s "gf/xss.txt" ] && cat gf/xss.txt | qsreplace FUZZ | sed '/FUZZ/!d' | Gxss -c 100 -p Xss | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/xss_reflected.txt
+		# Determine whether to use Axiom or Katana for scanning
 		if [[ $AXIOM != true ]]; then
+			# Using Katana
 			if [[ $DEEP == true ]]; then
-				if [[ -n $XSS_SERVER ]]; then
-					[ -s ".tmp/xss_reflected.txt" ] && cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --only-poc r --ignore-return 302,404,403 --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt
-				else
-					printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] No XSS_SERVER defined, blind xss skipped\n\n"
-					[ -s ".tmp/xss_reflected.txt" ] && cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --only-poc r --ignore-return 302,404,403 --skip-bav -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt
-				fi
+				DEPTH=3
 			else
-				if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le $DEEP_LIMIT ]]; then
-					if [[ -n $XSS_SERVER ]]; then
-						cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --skip-bav --skip-mining-dom --skip-mining-dict --only-poc r --ignore-return 302,404,403 -b ${XSS_SERVER} -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt
-					else
-						printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] No XSS_SERVER defined, blind xss skipped\n\n"
-						cat .tmp/xss_reflected.txt | dalfox pipe --silence --no-color --no-spinner --skip-bav --skip-mining-dom --skip-mining-dict --only-poc r --ignore-return 302,404,403 -w $DALFOX_THREADS 2>>"$LOGFILE" | anew -q vulns/xss.txt
-					fi
-				else
-					printf "${bred}[$(date +'%Y-%m-%d %H:%M:%S')] Skipping XSS: Too many URLs to test, try with --deep flag${reset}\n"
-				fi
+				DEPTH=2
+			fi
+
+			if [[ -n $XSS_SERVER ]]; then
+				OPTIONS="-b ${XSS_SERVER} -w $DALFOX_THREADS"
+			else
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] No XSS_SERVER defined, blind XSS skipped\n\n"
+				OPTIONS="-w $DALFOX_THREADS"
+			fi
+
+			# Run Dalfox with Katana output
+			if [[ -s ".tmp/xss_reflected.txt" ]]; then
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Dalfox with Katana${reset}\n\n"
+				dalfox pipe --silence --no-color --no-spinner --only-poc r --ignore-return 302,404,403 --skip-bav $OPTIONS -d "$DEPTH" <".tmp/xss_reflected.txt" 2>>"$LOGFILE" \
+				| anew -q "vulns/xss.txt"
 			fi
 		else
+			# Using Axiom
 			if [[ $DEEP == true ]]; then
-				if [[ -n $XSS_SERVER ]]; then
-					[ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-				else
-					printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] No XSS_SERVER defined, blind xss skipped\n\n"
-					[ -s ".tmp/xss_reflected.txt" ] && axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-				fi
+				DEPTH=3
+				AXIOM_ARGS="$AXIOM_EXTRA_ARGS"
 			else
-				if [[ $(cat .tmp/xss_reflected.txt | wc -l) -le $DEEP_LIMIT ]]; then
-					if [[ -n $XSS_SERVER ]]; then
-						axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -b ${XSS_SERVER} -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-					else
-						printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] No XSS_SERVER defined, blind xss skipped\n\n"
-						axiom-scan .tmp/xss_reflected.txt -m dalfox --skip-bav --skip-grepping --skip-mining-all --skip-mining-dict -w $DALFOX_THREADS -o vulns/xss.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-					fi
-				else
-					printf "${bred}[$(date +'%Y-%m-%d %H:%M:%S')] Skipping XSS: Too many URLs to test, try with --deep flag${reset}\n"
-				fi
+				DEPTH=2
+				AXIOM_ARGS="$AXIOM_EXTRA_ARGS"
+			fi
+
+			if [[ -n $XSS_SERVER ]]; then
+				OPTIONS="-b ${XSS_SERVER} -w $DALFOX_THREADS"
+			else
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] No XSS_SERVER defined, blind XSS skipped\n\n"
+				OPTIONS="-w $DALFOX_THREADS"
+			fi
+
+			# Run Dalfox with Axiom-scan output
+			if [[ -s ".tmp/xss_reflected.txt" ]]; then
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Dalfox with Axiom${reset}\n\n"
+				axiom-scan ".tmp/xss_reflected.txt" -m dalfox --skip-bav $OPTIONS -d "$DEPTH" -o "vulns/xss.txt" $AXIOM_ARGS 2>>"$LOGFILE" >/dev/null
 			fi
 		fi
-		end_func "Results are saved in vulns/xss.txt" ${FUNCNAME[0]}
+
+		end_func "Results are saved in vulns/xss.txt" "${FUNCNAME[0]}"
 	else
+		# Handle cases where XSS is false, no vulnerable URLs, or already processed
 		if [[ $XSS == false ]]; then
 			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
 		elif [[ ! -s "gf/xss.txt" ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} No URLs potentially vulnerables to XSS ${reset}\n\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to XSS ${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2333,17 +4345,42 @@ function xss() {
 
 function cors() {
 
-	mkdir -p {.tmp,webs,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CORS == true ]]; then
-		start_func ${FUNCNAME[0]} "CORS Scan"
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
-		[ -s "webs/webs_all.txt" ] && python3 ${tools}/Corsy/corsy.py -i webs/webs_all.txt -o vulns/cors.txt 2>>"$LOGFILE" >/dev/null
-		end_func "Results are saved in vulns/cors.txt" ${FUNCNAME[0]}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CORS == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "CORS Scan"
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q "webs/webs_all.txt"
+		fi
+
+		# Proceed only if webs_all.txt exists and is non-empty
+		if [[ -s "webs/webs_all.txt" ]]; then
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Corsy for CORS Scan${reset}\n\n"
+			python3 "${tools}/Corsy/corsy.py" -i "webs/webs_all.txt" -o "vulns/cors.txt" 2>>"$LOGFILE" >/dev/null
+		else
+			end_func "No webs/webs_all.txt file found, CORS Scan skipped." "${FUNCNAME[0]}"
+			return
+		fi
+
+		end_func "Results are saved in vulns/cors.txt" "${FUNCNAME[0]}"
+
 	else
+		# Handle cases where CORS is false, no vulnerable URLs, or already processed
 		if [[ $CORS == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ ! -s "gf/xss.txt" ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs available for CORS Scan.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2351,25 +4388,46 @@ function cors() {
 
 function open_redirect() {
 
-	mkdir -p {.tmp,gf,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $OPEN_REDIRECT == true ]] && [[ -s "gf/redirect.txt" ]]; then
-		start_func ${FUNCNAME[0]} "Open redirects checks"
-		if [[ $DEEP == true ]] || [[ $(cat gf/redirect.txt | wc -l) -le $DEEP_LIMIT ]]; then
-			cat gf/redirect.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_redirect.txt
-			python3 ${tools}/Oralyzer/oralyzer.py -l .tmp/tmp_redirect.txt -p ${tools}/Oralyzer/payloads.txt >vulns/redirect.txt
-			sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" vulns/redirect.txt
-			end_func "Results are saved in vulns/redirect.txt" ${FUNCNAME[0]}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $OPEN_REDIRECT == true ]] &&
+		[[ -s "gf/redirect.txt" ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Open Redirects Checks"
+
+		# Determine whether to proceed based on DEEP flag or number of URLs
+		URL_COUNT=$(wc -l <"gf/redirect.txt")
+		if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT ]]; then
+
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Open Redirects Payload Generation${reset}\n\n"
+
+			# Process redirect.txt with qsreplace and filter lines containing 'FUZZ'
+			qsreplace FUZZ <"gf/redirect.txt" | sed '/FUZZ/!d' | anew -q ".tmp/tmp_redirect.txt"
+
+			# Run Oralyzer with the generated payloads
+			python3 "${tools}/Oralyzer/oralyzer.py" -l ".tmp/tmp_redirect.txt" -p "${tools}/Oralyzer/payloads.txt" >"vulns/redirect.txt" 2>>"$LOGFILE" >/dev/null
+
+			# Remove ANSI color codes from the output
+			sed -r -i "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" "vulns/redirect.txt"
+
+			end_func "Results are saved in vulns/redirect.txt" "${FUNCNAME[0]}"
 		else
-			end_func "Skipping Open redirects: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]}
+			end_func "Skipping Open Redirects: Too many URLs to test, try with --deep flag." "${FUNCNAME[0]}"
 			printf "${bgreen}#######################################################################${reset}\n"
 		fi
 	else
+		# Handle cases where OPEN_REDIRECT is false, no vulnerable URLs, or already processed
 		if [[ $OPEN_REDIRECT == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
 		elif [[ ! -s "gf/redirect.txt" ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} No URLs potentially vulnerables to Open Redirect ${reset}\n\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to Open Redirect.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2377,40 +4435,87 @@ function open_redirect() {
 
 function ssrf_checks() {
 
-	mkdir -p {.tmp,gf,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SSRF_CHECKS == true ]] && [[ -s "gf/ssrf.txt" ]]; then
-		start_func ${FUNCNAME[0]} "SSRF checks"
+	# Create necessary directories
+	if ! mkdir -p .tmp gf vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SSRF_CHECKS == true ]] &&
+		[[ -s "gf/ssrf.txt" ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "SSRF Checks"
+
+		# Handle COLLAB_SERVER configuration
 		if [[ -z $COLLAB_SERVER ]]; then
 			interactsh-client &>.tmp/ssrf_callback.txt &
+			INTERACTSH_PID=$!
 			sleep 2
-			COLLAB_SERVER_FIX="FFUFHASH.$(cat .tmp/ssrf_callback.txt | tail -n1 | cut -c 16-)"
+
+			# Extract FFUFHASH from interactsh_callback.txt
+			COLLAB_SERVER_FIX="FFUFHASH.$(tail -n1 .tmp/ssrf_callback.txt | cut -c 16-)"
 			COLLAB_SERVER_URL="http://$COLLAB_SERVER_FIX"
 			INTERACT=true
 		else
-			COLLAB_SERVER_FIX="FFUFHASH.$(echo ${COLLAB_SERVER} | sed -r "s/https?:\/\///")"
+			COLLAB_SERVER_FIX="FFUFHASH.$(echo "$COLLAB_SERVER" | sed -r "s|https?://||")"
 			INTERACT=false
 		fi
-		if [[ $DEEP == true ]] || [[ $(cat gf/ssrf.txt | wc -l) -le $DEEP_LIMIT ]]; then
-			cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_FIX} | anew -q .tmp/tmp_ssrf.txt
-			cat gf/ssrf.txt | qsreplace ${COLLAB_SERVER_URL} | anew -q .tmp/tmp_ssrf.txt
-			ffuf -v -H "${HEADER}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -w .tmp/tmp_ssrf.txt -u FUZZ 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssrf_requested_url.txt
-			ffuf -v -w .tmp/tmp_ssrf.txt:W1,${tools}/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -u W1 2>/dev/null | anew -q vulns/ssrf_requested_headers.txt
-			ffuf -v -w .tmp/tmp_ssrf.txt:W1,${tools}/headers_inject.txt:W2 -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t $FFUF_THREADS -rate $FFUF_RATELIMIT -u W1 2>/dev/null | anew -q vulns/ssrf_requested_headers.txt
+
+		# Determine whether to proceed based on DEEP flag or URL count
+		URL_COUNT=$(wc -l <"gf/ssrf.txt")
+		if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT ]]; then
+
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: SSRF Payload Generation${reset}\n\n"
+
+			# Generate temporary SSRF payloads
+			qsreplace "$COLLAB_SERVER_FIX" <"gf/ssrf.txt" | sed '/FUZZ/!d' | anew -q ".tmp/tmp_ssrf.txt"
+
+			qsreplace "$COLLAB_SERVER_URL" <"gf/ssrf.txt" | sed '/FUZZ/!d' | anew -q ".tmp/tmp_ssrf.txt"
+
+			# Run FFUF to find requested URLs
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: FFUF for SSRF Requested URLs${reset}\n\n"
+			ffuf -v -H "${HEADER}" -t "$FFUF_THREADS" -rate "$FFUF_RATELIMIT" -w ".tmp/tmp_ssrf.txt" -u "FUZZ" 2>/dev/null |
+				grep "URL" | sed 's/| URL | //' | anew -q "vulns/ssrf_requested_url.txt"
+
+			# Run FFUF with header injection for SSRF
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: FFUF for SSRF Requested Headers with COLLAB_SERVER_FIX${reset}\n\n"
+			ffuf -v -w ".tmp/tmp_ssrf.txt:W1,${tools}/headers_inject.txt:W2" -H "${HEADER}" -H "W2: ${COLLAB_SERVER_FIX}" -t "$FFUF_THREADS" \
+				-rate "$FFUF_RATELIMIT" -u "W1" 2>/dev/null | anew -q "vulns/ssrf_requested_headers.txt"
+
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: FFUF for SSRF Requested Headers with COLLAB_SERVER_URL${reset}\n\n"
+			ffuf -v -w ".tmp/tmp_ssrf.txt:W1,${tools}/headers_inject.txt:W2" -H "${HEADER}" -H "W2: ${COLLAB_SERVER_URL}" -t "$FFUF_THREADS" \
+				-rate "$FFUF_RATELIMIT" -u "W1" 2>/dev/null | anew -q "vulns/ssrf_requested_headers.txt"
+
+			# Allow time for callbacks to be received
 			sleep 5
-			[ -s ".tmp/ssrf_callback.txt" ] && cat .tmp/ssrf_callback.txt | tail -n+11 | anew -q vulns/ssrf_callback.txt && NUMOFLINES=$(cat .tmp/ssrf_callback.txt | tail -n+12 | sed '/^$/d' | wc -l)
-			[ "$INTERACT" = true ] && notification "SSRF: ${NUMOFLINES} callbacks received" info
-			end_func "Results are saved in vulns/ssrf_*" ${FUNCNAME[0]}
+
+			# Process SSRF callback results if INTERACT is enabled
+			if [[ $INTERACT == true ]] && [[ -s ".tmp/ssrf_callback.txt" ]]; then
+				tail -n +11 .tmp/ssrf_callback.txt | anew -q "vulns/ssrf_callback.txt"
+				NUMOFLINES=$(tail -n +12 .tmp/ssrf_callback.txt | sed '/^$/d' | wc -l)
+				notification "SSRF: ${NUMOFLINES} callbacks received" info
+			fi
+
+			end_func "Results are saved in vulns/ssrf_*" "${FUNCNAME[0]}"
 		else
-			end_func "Skipping SSRF: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]}
+			end_func "Skipping SSRF: Too many URLs to test, try with --deep flag." "${FUNCNAME[0]}"
+			printf "${bgreen}#######################################################################${reset}\n"
+		fi
+
+		# Terminate interactsh-client if it was started
+		if [[ $INTERACT == true ]]; then
+			pkill -f interactsh-client &
 		fi
-		pkill -f interactsh-client &
+
 	else
+		# Handle cases where SSRF_CHECKS is false, no vulnerable URLs, or already processed
 		if [[ $SSRF_CHECKS == false ]]; then
 			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
 		elif [[ ! -s "gf/ssrf.txt" ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} No URLs potentially vulnerables to SSRF ${reset}\n\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to SSRF.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2418,21 +4523,44 @@ function ssrf_checks() {
 
 function crlf_checks() {
 
-	mkdir -p {webs,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CRLF_CHECKS == true ]]; then
-		start_func ${FUNCNAME[0]} "CRLF checks"
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
-		if [[ $DEEP == true ]] || [[ $(cat webs/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then
-			crlfuzz -l webs/webs_all.txt -o vulns/crlf.txt 2>>"$LOGFILE" >/dev/null
-			end_func "Results are saved in vulns/crlf.txt" ${FUNCNAME[0]}
+	# Create necessary directories
+	if ! mkdir -p webs vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $CRLF_CHECKS == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "CRLF Checks"
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q "webs/webs_all.txt"
+		fi
+
+		# Determine whether to proceed based on DEEP flag or number of URLs
+		URL_COUNT=$(wc -l <"webs/webs_all.txt")
+		if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT ]]; then
+
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: CRLF Fuzzing${reset}\n\n"
+
+			# Run CRLFuzz
+			crlfuzz -l "webs/webs_all.txt" -o "vulns/crlf.txt" 2>>"$LOGFILE" >/dev/null
+
+			end_func "Results are saved in vulns/crlf.txt" "${FUNCNAME[0]}"
 		else
-			end_func "Skipping CRLF: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]}
+			end_func "Skipping CRLF: Too many URLs to test, try with --deep flag." "${FUNCNAME[0]}"
 		fi
 	else
+		# Handle cases where CRLF_CHECKS is false, no vulnerable URLs, or already processed
 		if [[ $CRLF_CHECKS == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ ! -s "gf/crlf.txt" ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to CRLF.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2440,25 +4568,51 @@ function crlf_checks() {
 
 function lfi() {
 
-	mkdir -p {.tmp,gf,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $LFI == true ]] && [[ -s "gf/lfi.txt" ]]; then
-		start_func ${FUNCNAME[0]} "LFI checks"
+	# Create necessary directories
+	if ! mkdir -p .tmp gf vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $LFI == true ]] &&
+		[[ -s "gf/lfi.txt" ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "LFI Checks"
+
+		# Ensure gf/lfi.txt is not empty
 		if [[ -s "gf/lfi.txt" ]]; then
-			cat gf/lfi.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_lfi.txt
-			if [[ $DEEP == true ]] || [[ $(cat .tmp/tmp_lfi.txt | wc -l) -le $DEEP_LIMIT ]]; then
-				interlace -tL .tmp/tmp_lfi.txt -threads ${INTERLACE_THREADS} -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${lfi_wordlist} -u \"_target_\" -mr \"root:\" " 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/lfi.txt
-				end_func "Results are saved in vulns/lfi.txt" ${FUNCNAME[0]}
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: LFI Payload Generation${reset}\n\n"
+
+			# Process lfi.txt with qsreplace and filter lines containing 'FUZZ'
+			qsreplace "FUZZ" <"gf/lfi.txt" | sed '/FUZZ/!d' | anew -q ".tmp/tmp_lfi.txt"
+
+			# Determine whether to proceed based on DEEP flag or number of URLs
+			URL_COUNT=$(wc -l <".tmp/tmp_lfi.txt")
+			if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT ]]; then
+
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: LFI Fuzzing with FFUF${reset}\n\n"
+
+				# Use Interlace to parallelize FFUF scanning
+				interlace -tL ".tmp/tmp_lfi.txt" -threads "$INTERLACE_THREADS" -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w \"${lfi_wordlist}\" -u \"_target_\" -mr \"root:\" " 2>>"$LOGFILE" |
+					grep "URL" | sed 's/| URL | //' | anew -q "vulns/lfi.txt"
+
+				end_func "Results are saved in vulns/lfi.txt" "${FUNCNAME[0]}"
 			else
-				end_func "Skipping LFI: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]}
+				end_func "Skipping LFI: Too many URLs to test, try with --deep flag." "${FUNCNAME[0]}"
 			fi
+		else
+			end_func "No gf/lfi.txt file found, LFI Checks skipped." "${FUNCNAME[0]}"
+			return
 		fi
 	else
+		# Handle cases where LFI is false, no vulnerable URLs, or already processed
 		if [[ $LFI == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
 		elif [[ ! -s "gf/lfi.txt" ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} No URLs potentially vulnerables to LFI ${reset}\n\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to LFI.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2466,26 +4620,51 @@ function lfi() {
 
 function ssti() {
 
-	mkdir -p {.tmp,gf,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SSTI == true ]] && [[ -s "gf/ssti.txt" ]]; then
-		start_func ${FUNCNAME[0]} "SSTI checks"
+	# Create necessary directories
+	if ! mkdir -p .tmp gf vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SSTI == true ]] &&
+		[[ -s "gf/ssti.txt" ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "SSTI Checks"
+
+		# Ensure gf/ssti.txt is not empty
 		if [[ -s "gf/ssti.txt" ]]; then
-			cat gf/ssti.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_ssti.txt
-			if [[ $DEEP == true ]] || [[ $(cat .tmp/tmp_ssti.txt | wc -l) -le $DEEP_LIMIT ]]; then
-				#TInjA url -u "file:./Recon/DOMAIN/gf/ssti.txt" --csti --reportpath "vulns/"
-				interlace -tL .tmp/tmp_ssti.txt -threads ${INTERLACE_THREADS} -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${ssti_wordlist} -u \"_target_\" -mr \"ssti49\" " 2>/dev/null | grep "URL" | sed 's/| URL | //' | anew -q vulns/ssti.txt
-				end_func "Results are saved in vulns/ssti.txt" ${FUNCNAME[0]}
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: SSTI Payload Generation${reset}\n\n"
+
+			# Process ssti.txt with qsreplace and filter lines containing 'FUZZ'
+			qsreplace "FUZZ" <"gf/ssti.txt" | sed '/FUZZ/!d' | anew -q ".tmp/tmp_ssti.txt"
+
+			# Determine whether to proceed based on DEEP flag or number of URLs
+			URL_COUNT=$(wc -l <".tmp/tmp_ssti.txt")
+			if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT ]]; then
+
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: SSTI Fuzzing with FFUF${reset}\n\n"
+
+				# Use Interlace to parallelize FFUF scanning
+				interlace -tL ".tmp/tmp_ssti.txt" -threads "$INTERLACE_THREADS" -c "ffuf -v -r -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w \"${ssti_wordlist}\" -u \"_target_\" -mr \"ssti49\"" 2>>"$LOGFILE" |
+					grep "URL" | sed 's/| URL | //' | anew -q "vulns/ssti.txt"
+
+				end_func "Results are saved in vulns/ssti.txt" "${FUNCNAME[0]}"
 			else
-				end_func "Skipping SSTI: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]}
+				end_func "Skipping SSTI: Too many URLs to test, try with --deep flag." "${FUNCNAME[0]}"
 			fi
+		else
+			end_func "No gf/ssti.txt file found, SSTI Checks skipped." "${FUNCNAME[0]}"
+			return
 		fi
 	else
+		# Handle cases where SSTI is false, no vulnerable URLs, or already processed
 		if [[ $SSTI == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
 		elif [[ ! -s "gf/ssti.txt" ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} No URLs potentially vulnerables to SSTI ${reset}\n\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to SSTI.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2493,29 +4672,58 @@ function ssti() {
 
 function sqli() {
 
-	mkdir -p {.tmp,gf,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SQLI == true ]] && [[ -s "gf/sqli.txt" ]]; then
-		start_func ${FUNCNAME[0]} "SQLi checks"
+	# Create necessary directories
+	if ! mkdir -p .tmp gf vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SQLI == true ]] &&
+		[[ -s "gf/sqli.txt" ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
 
-		cat gf/sqli.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_sqli.txt
-		if [[ $DEEP == true ]] || [[ $(cat .tmp/tmp_sqli.txt | wc -l) -le $DEEP_LIMIT ]]; then
-			if [[ $SQLMAP == true ]]; then
-				python3 ${tools}/sqlmap/sqlmap.py -m .tmp/tmp_sqli.txt -b -o --smart --batch --disable-coloring --random-agent --output-dir=vulns/sqlmap 2>>"$LOGFILE" >/dev/null
-			fi
-			if [[ $GHAURI == true ]]; then
-				interlace -tL .tmp/tmp_sqli.txt -threads ${INTERLACE_THREADS} -c "ghauri -u _target_ --batch -H \"${HEADER}\" --force-ssl >> vulns/ghauri_log.txt" 2>>"$LOGFILE" >/dev/null
+		start_func "${FUNCNAME[0]}" "SQLi Checks"
+
+		# Ensure gf/sqli.txt is not empty
+		if [[ -s "gf/sqli.txt" ]]; then
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: SQLi Payload Generation${reset}\n\n"
+
+			# Process sqli.txt with qsreplace and filter lines containing 'FUZZ'
+			qsreplace "FUZZ" <"gf/sqli.txt" | sed '/FUZZ/!d' | anew -q ".tmp/tmp_sqli.txt"
+
+			# Determine whether to proceed based on DEEP flag or number of URLs
+			URL_COUNT=$(wc -l <".tmp/tmp_sqli.txt")
+			if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT ]]; then
+
+				# Check if SQLMAP is enabled and run SQLMap
+				if [[ $SQLMAP == true ]]; then
+					printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: SQLMap for SQLi Checks${reset}\n\n"
+					python3 "${tools}/sqlmap/sqlmap.py" -m ".tmp/tmp_sqli.txt" -b -o --smart \
+					--batch --disable-coloring --random-agent --output-dir="vulns/sqlmap" 2>>"$LOGFILE" >/dev/null
+				fi
+
+				# Check if GHAURI is enabled and run Ghauri
+				if [[ $GHAURI == true ]]; then
+					printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Ghauri for SQLi Checks${reset}\n\n"
+					interlace -tL ".tmp/tmp_sqli.txt" -threads "$INTERLACE_THREADS" -c "ghauri -u _target_ --batch -H \"${HEADER}\" --force-ssl >> vulns/ghauri_log.txt" 2>>"$LOGFILE" >/dev/null
+				fi
+
+				end_func "Results are saved in vulns/sqlmap folder" "${FUNCNAME[0]}"
+			else
+				end_func "Skipping SQLi: Too many URLs to test, try with --deep flag." "${FUNCNAME[0]}"
 			fi
-			end_func "Results are saved in vulns/sqlmap folder" ${FUNCNAME[0]}
 		else
-			end_func "Skipping SQLi: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]}
+			end_func "No gf/sqli.txt file found, SQLi Checks skipped." "${FUNCNAME[0]}"
+			return
 		fi
 	else
+		# Handle cases where SQLI is false, no vulnerable URLs, or already processed
 		if [[ $SQLI == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
 		elif [[ ! -s "gf/sqli.txt" ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} No URLs potentially vulnerables to SQLi ${reset}\n\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to SQLi.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2523,17 +4731,37 @@ function sqli() {
 
 function test_ssl() {
 
-	mkdir -p {hosts,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $TEST_SSL == true ]]; then
-		start_func ${FUNCNAME[0]} "SSL Test"
-		[[ -n $multi ]] && [ ! -f "$dir/hosts/ips.txt" ] && echo "$domain" >"$dir/hosts/ips.txt"
-		${tools}/testssl.sh/testssl.sh --quiet --color 0 -U -iL hosts/ips.txt 2>>"$LOGFILE" >vulns/testssl.txt
-		end_func "Results are saved in vulns/testssl.txt" ${FUNCNAME[0]}
+	# Create necessary directories
+	if ! mkdir -p hosts vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $TEST_SSL == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "SSL Test"
+
+		# Handle multi-domain scenarios
+		if [[ -n $multi ]] && [[ ! -f "$dir/hosts/ips.txt" ]]; then
+			echo "$domain" >"$dir/hosts/ips.txt"
+		fi
+
+		# Run testssl.sh
+		printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: SSL Test with testssl.sh${reset}\n\n"
+		"${tools}/testssl.sh/testssl.sh" --quiet --color 0 -U -iL "hosts/ips.txt" 2>>"$LOGFILE" >"vulns/testssl.txt"
+
+		end_func "Results are saved in vulns/testssl.txt" "${FUNCNAME[0]}"
+
 	else
+		# Handle cases where TEST_SSL is false, no vulnerable URLs, or already processed
 		if [[ $TEST_SSL == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ ! -s "gf/testssl.txt" ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to SSL issues.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2541,18 +4769,40 @@ function test_ssl() {
 
 function spraying() {
 
-	mkdir -p vulns
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SPRAY == true ]]; then
-		start_func ${FUNCNAME[0]} "Password spraying"
+	# Create necessary directories
+	if ! mkdir -p "vulns"; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SPRAY == true ]] &&
+		[[ -s "$dir/hosts/portscan_active.gnmap" ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Password Spraying"
+
+		# Ensure portscan_active.gnmap exists and is not empty
+		if [[ ! -s "$dir/hosts/portscan_active.gnmap" ]]; then
+			printf "%b[!] File $dir/hosts/portscan_active.gnmap does not exist or is empty.%b\n" "$bred" "$reset"
+			end_func "Port scan results missing. Password Spraying aborted." "${FUNCNAME[0]}"
+			return 1
+		fi
+
+		printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Password Spraying with BruteSpray${reset}\n\n"
 
-		brutespray -f $dir/hosts/portscan_active.gnmap -T $BRUTESPRAY_CONCURRENCE -o $dir/vulns/brutespray 2>>"$LOGFILE" >/dev/null
+		# Run BruteSpray for password spraying
+		brutespray -f "$dir/hosts/portscan_active.gnmap" -T "$BRUTESPRAY_CONCURRENCE" -o "$dir/vulns/brutespray" 2>>"$LOGFILE" >/dev/null
+
+		end_func "Results are saved in vulns/brutespray folder" "${FUNCNAME[0]}"
 
-		end_func "Results are saved in vulns/brutespray folder" ${FUNCNAME[0]}
 	else
+		# Handle cases where SPRAY is false, required files are missing, or already processed
 		if [[ $SPRAY == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ ! -s "$dir/hosts/portscan_active.gnmap" ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No active port scan results found.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2560,23 +4810,53 @@ function spraying() {
 
 function command_injection() {
 
-	mkdir -p {.tmp,gf,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $COMM_INJ == true ]] && [[ -s "gf/rce.txt" ]]; then
-		start_func ${FUNCNAME[0]} "Command Injection checks"
-		[ -s "gf/rce.txt" ] && cat gf/rce.txt | qsreplace FUZZ | sed '/FUZZ/!d' | anew -q .tmp/tmp_rce.txt
-		if [[ $DEEP == true ]] || [[ $(cat .tmp/tmp_rce.txt | wc -l) -le $DEEP_LIMIT ]]; then
-			[ -s ".tmp/tmp_rce.txt" ] && python3 ${tools}/commix/commix.py --batch -m .tmp/tmp_rce.txt --output-dir vulns/command_injection.txt 2>>"$LOGFILE" >/dev/null
-			end_func "Results are saved in vulns/command_injection folder" ${FUNCNAME[0]}
+	# Create necessary directories
+	if ! mkdir -p .tmp gf vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $COMM_INJ == true ]] &&
+		[[ -s "gf/rce.txt" ]] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Command Injection Checks"
+
+		# Ensure gf/rce.txt is not empty and process it
+		if [[ -s "gf/rce.txt" ]]; then
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Command Injection Payload Generation${reset}\n\n"
+
+			# Process rce.txt with qsreplace and filter lines containing 'FUZZ'
+			qsreplace "FUZZ" <"gf/rce.txt" | sed '/FUZZ/!d' | anew -q ".tmp/tmp_rce.txt"
+
+			# Determine whether to proceed based on DEEP flag or number of URLs
+			URL_COUNT=$(wc -l <".tmp/tmp_rce.txt")
+			if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT ]]; then
+
+				# Run Commix if enabled
+				if [[ $SQLMAP == true ]]; then
+					printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Commix for Command Injection Checks${reset}\n\n"
+					python3 "${tools}/commix/commix.py" --batch -m ".tmp/tmp_rce.txt" --output-dir "vulns/command_injection" 2>>"$LOGFILE" >/dev/null
+				fi
+
+				# Additional tools can be integrated here (e.g., Ghauri, sqlmap)
+
+				end_func "Results are saved in vulns/command_injection folder" "${FUNCNAME[0]}"
+			else
+				end_func "Skipping Command Injection: Too many URLs to test, try with --deep flag." "${FUNCNAME[0]}"
+			fi
 		else
-			end_func "Skipping Command injection: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]}
+			end_func "No gf/rce.txt file found, Command Injection Checks skipped." "${FUNCNAME[0]}"
+			return
 		fi
 	else
+		# Handle cases where COMM_INJ is false, no vulnerable URLs, or already processed
 		if [[ $COMM_INJ == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
 		elif [[ ! -s "gf/rce.txt" ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} No URLs potentially vulnerables to Command Injection ${reset}\n\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to Command Injection.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2584,30 +4864,65 @@ function command_injection() {
 
 function 4xxbypass() {
 
-	mkdir -p {.tmp,fuzzing,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $BYPASSER4XX == true ]]; then
-		if [[ $(cat fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 | wc -l) -le 1000 ]] || [[ $DEEP == true ]]; then
-			start_func "403 bypass"
-			cat $dir/fuzzing/fuzzing_full.txt 2>/dev/null | grep -E '^4' | grep -Ev '^404' | cut -d ' ' -f3 >$dir/.tmp/403test.txt
+	# Create necessary directories
+	if ! mkdir -p .tmp fuzzing vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $BYPASSER4XX == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		# Extract relevant URLs starting with 4xx but not 404
+		printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: 403 Bypass${reset}\n\n"
+		grep -E '^4' "fuzzing/fuzzing_full.txt" 2>/dev/null | grep -Ev '^404' | awk '{print $3}' | anew -q ".tmp/403test.txt"
+
+		# Count the number of URLs to process
+		URL_COUNT=$(wc -l <".tmp/403test.txt")
+		if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT ]]; then
+
+			start_func "${FUNCNAME[0]}" "403 Bypass"
+
+			# Navigate to nomore403 tool directory
+			if ! pushd "${tools}/nomore403" >/dev/null; then
+				printf "%b[!] Failed to navigate to nomore403 directory.%b\n" "$bred" "$reset"
+				end_func "Failed to navigate to nomore403 directory during 403 Bypass." "${FUNCNAME[0]}"
+				return 1
+			fi
+
+			# Run nomore403 on the processed URLs
+			./nomore403 <"$dir/.tmp/403test.txt" >"$dir/.tmp/4xxbypass.txt" 2>>"$LOGFILE"
+
+			# Return to the original directory
+			if ! popd >/dev/null; then
+				printf "%b[!] Failed to return to the original directory.%b\n" "$bred" "$reset"
+				end_func "Failed to return to the original directory during 403 Bypass." "${FUNCNAME[0]}"
+				return 1
+			fi
 
-			pushd "${tools}/nomore403" >/dev/null || {
-				echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
-			}
+			# Append unique bypassed URLs to the vulns directory
+			if [[ -s "$dir/.tmp/4xxbypass.txt" ]]; then
+				cat "$dir/.tmp/4xxbypass.txt" | anew -q "vulns/4xxbypass.txt"
+			else
+				printf "%b[!] No bypassed URLs found in 4xxbypass.txt.%b\n" "$bred" "$reset"
+			fi
+
+			end_func "Results are saved in vulns/4xxbypass.txt" "${FUNCNAME[0]}"
 
-			cat $dir/.tmp/403test.txt | ./nomore403 >$dir/.tmp/4xxbypass.txt
-			popd >/dev/null || {
-				echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
-			}
-			[ -s ".tmp/4xxbypass.txt" ] && cat .tmp/4xxbypass.txt | anew -q vulns/4xxbypass.txt
-			end_func "Results are saved in vulns/4xxbypass.txt" ${FUNCNAME[0]}
 		else
-			notification "Too many urls to bypass, skipping" warn
+			notification "Too many URLs to bypass, skipping" warn
+			end_func "Skipping Command Injection: Too many URLs to test, try with --deep flag." "${FUNCNAME[0]}"
 		fi
+
 	else
+		# Handle cases where BYPASSER4XX is false, no vulnerable URLs, or already processed
 		if [[ $BYPASSER4XX == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ ! -s "fuzzing/fuzzing_full.txt" ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to 4xx bypass.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2615,21 +4930,55 @@ function 4xxbypass() {
 
 function prototype_pollution() {
 
-	mkdir -p {.tmp,webs,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $PROTO_POLLUTION == true ]]; then
-		start_func ${FUNCNAME[0]} "Prototype Pollution checks"
-		if [[ $DEEP == true ]] || [[ $(cat webs/url_extract_nodupes.txt | wc -l) -le $DEEP_LIMIT ]]; then
-			[ -s "webs/url_extract_nodupes.txt" ] && cat webs/url_extract_nodupes.txt | ppmap &>.tmp/prototype_pollution.txt
-			[ -s ".tmp/prototype_pollution.txt" ] && cat .tmp/prototype_pollution.txt | grep "EXPL" | anew -q vulns/prototype_pollution.txt
-			end_func "Results are saved in vulns/prototype_pollution.txt" ${FUNCNAME[0]}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $PROTO_POLLUTION == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Prototype Pollution Checks"
+
+		# Determine whether to proceed based on DEEP flag or number of URLs
+		URL_COUNT=$(wc -l <"webs/url_extract_nodupes.txt" 2>/dev/null)
+		if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT ]]; then
+
+			# Ensure fuzzing_full.txt exists and has content
+			if [[ -s "webs/url_extract_nodupes.txt" ]]; then
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Prototype Pollution Mapping${reset}\n\n"
+
+				# Process URL list with ppmap and save results
+				ppmap <"webs/url_extract_nodupes.txt" >".tmp/prototype_pollution.txt" 2>>"$LOGFILE"
+
+				# Filter and save relevant results
+				if [[ -s ".tmp/prototype_pollution.txt" ]]; then
+					grep "EXPL" ".tmp/prototype_pollution.txt" | anew -q "vulns/prototype_pollution.txt"
+				else
+					printf "%b[!] No Prototype Pollution findings detected.%b\n" "$bred" "$reset"
+				fi
+
+				end_func "Results are saved in vulns/prototype_pollution.txt" "${FUNCNAME[0]}"
+			else
+				printf "%b[!] File webs/url_extract_nodupes.txt is missing or empty.%b\n" "$bred" "$reset"
+				end_func "File webs/url_extract_nodupes.txt is missing or empty." "${FUNCNAME[0]}"
+				return 1
+			fi
+
 		else
-			end_func "Skipping Prototype Pollution: Too many URLs to test, try with --deep flag" ${FUNCNAME[0]}
+			end_func "Skipping Prototype Pollution: Too many URLs to test, try with --deep flag." "${FUNCNAME[0]}"
 		fi
+
 	else
+		# Handle cases where PROTO_POLLUTION is false, no vulnerable URLs, or already processed
 		if [[ $PROTO_POLLUTION == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ ! -s "webs/url_extract_nodupes.txt" ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to Prototype Pollution.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2637,29 +4986,71 @@ function prototype_pollution() {
 
 function smuggling() {
 
-	mkdir -p {.tmp,webs,vulns/smuggling}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SMUGGLING == true ]]; then
-		start_func ${FUNCNAME[0]} "HTTP Request Smuggling checks"
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
-		if [[ $DEEP == true ]] || [[ $(cat webs/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then
-			pushd "${tools}/smuggler" >/dev/null || {
-				echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
-			}
-			cat $dir/webs/webs_all.txt | python3 smuggler.py -q --no-color 2>/dev/null | anew -q $dir/.tmp/smuggling.txt
-			find payloads -type f ! -name "README*" -exec mv {} $dir/vulns/smuggling/ \;
-			popd >/dev/null || {
-				echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
-			}
-			[ -s ".tmp/smuggling.txt" ] && cat .tmp/smuggling.txt | anew -q vulns/smuggling_log.txt
-			end_func "Results are saved in vulns/smuggling_log.txt and findings in vulns/smuggling/" ${FUNCNAME[0]}
-		else
-			end_func "Skipping Request Smuggling: Too many webs to test, try with --deep flag" ${FUNCNAME[0]}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs vulns/smuggling; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $SMUGGLING == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "HTTP Request Smuggling Checks"
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat "webs/webs.txt" "webs/webs_uncommon_ports.txt" 2>/dev/null | anew -q "webs/webs_all.txt"
+		fi
+
+		# Determine whether to proceed based on DEEP flag or number of URLs
+		URL_COUNT=$(wc -l <"webs/webs_all.txt")
+		if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT ]]; then
+
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: HTTP Request Smuggling Checks${reset}\n\n"
+
+			# Navigate to smuggler tool directory
+			if ! pushd "${tools}/smuggler" >/dev/null; then
+				printf "%b[!] Failed to navigate to smuggler directory.%b\n" "$bred" "$reset"
+				end_func "Failed to navigate to smuggler directory during HTTP Request Smuggling Checks." "${FUNCNAME[0]}"
+				return 1
+			fi
+
+			# Run smuggler.py on the list of URLs
+			python3 "smuggler.py" -f "$dir/webs/webs_all.txt" -o "$dir/.tmp/smuggling.txt" 2>>"$LOGFILE" >/dev/null
+
+			# Move payload files to vulns/smuggling/
+			find "payloads" -type f ! -name "README*" -exec mv {} "$dir/vulns/smuggling/" \;
+
+			# Return to the original directory
+			if ! popd >/dev/null; then
+				printf "%b[!] Failed to return to the original directory.%b\n" "$bred" "$reset"
+				end_func "Failed to return to the original directory during HTTP Request Smuggling Checks." "${FUNCNAME[0]}"
+				return 1
+			fi
+
+			# Append unique smuggling results to vulns directory
+			if [[ -s "$dir/.tmp/smuggling.txt" ]]; then
+				cat "$dir/.tmp/smuggling.txt" | grep "EXPL" | anew -q "vulns/prototype_pollution.txt"
+			else
+				printf "%b[!] No smuggling results found in smuggling.txt.%b\n" "$bred" "$reset"
+			fi
+
+			end_func "Results are saved in vulns/smuggling_log.txt and findings in vulns/smuggling/" "${FUNCNAME[0]}"
+
+		else
+			notification "Too many URLs to bypass, skipping" warn
+			end_func "Skipping HTTP Request Smuggling: Too many URLs to test, try with --deep flag." "${FUNCNAME[0]}"
 		fi
+
 	else
+		# Handle cases where SMUGGLING is false, no vulnerable URLs, or already processed
 		if [[ $SMUGGLING == false ]]; then
 			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+		elif [[ ! -s "webs/webs_all.txt" ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to HTTP Request Smuggling.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2667,29 +5058,68 @@ function smuggling() {
 
 function webcache() {
 
-	mkdir -p {.tmp,webs,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WEBCACHE == true ]]; then
-		start_func ${FUNCNAME[0]} "Web Cache Poisoning checks"
-		[ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
-		if [[ $DEEP == true ]] || [[ $(cat webs/webs_all.txt | wc -l) -le $DEEP_LIMIT ]]; then
-			pushd "${tools}/Web-Cache-Vulnerability-Scanner" >/dev/null || {
-				echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
-			}
+	# Create necessary directories
+	if ! mkdir -p .tmp webs vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $WEBCACHE == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Web Cache Poisoning Checks"
+
+		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
+		if [[ ! -s "webs/webs_all.txt" ]]; then
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q "webs/webs_all.txt"
+		fi
+
+		# Determine whether to proceed based on DEEP flag or number of URLs
+		URL_COUNT=$(wc -l <"webs/webs_all.txt")
+		if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT ]]; then
+
+			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Web Cache Poisoning Checks${reset}\n\n"
+
+			# Navigate to Web-Cache-Vulnerability-Scanner tool directory
+			if ! pushd "${tools}/Web-Cache-Vulnerability-Scanner" >/dev/null; then
+				printf "%b[!] Failed to navigate to Web-Cache-Vulnerability-Scanner directory.%b\n" "$bred" "$reset"
+				end_func "Failed to navigate to Web-Cache-Vulnerability-Scanner directory during Web Cache Poisoning Checks." "${FUNCNAME[0]}"
+				return 1
+			fi
+
+			# Run the Web-Cache-Vulnerability-Scanner
+			./Web-Cache-Vulnerability-Scanner -u "file:$dir/webs/webs_all.txt" -v 0 2>>"$LOGFILE" |
+				anew -q "$dir/.tmp/webcache.txt"
+
+			# Return to the original directory
+			if ! popd >/dev/null; then
+				printf "%b[!] Failed to return to the original directory.%b\n" "$bred" "$reset"
+				end_func "Failed to return to the original directory during Web Cache Poisoning Checks." "${FUNCNAME[0]}"
+				return 1
+			fi
+
+			# Append unique findings to vulns/webcache.txt
+			if [[ -s "$dir/.tmp/webcache.txt" ]]; then
+				cat "$dir/.tmp/webcache.txt" | anew -q "vulns/webcache.txt"
+			else
+				printf "%b[!] No findings found in webcache.txt.%b\n" "$bred" "$reset"
+			fi
+
+			end_func "Results are saved in vulns/webcache.txt" "${FUNCNAME[0]}"
 
-			Web-Cache-Vulnerability-Scanner -u file:$dir/webs/webs_all.txt -v 0 2>/dev/null | anew -q $dir/.tmp/webcache.txt
-			popd >/dev/null || {
-				echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
-			}
-			[ -s ".tmp/webcache.txt" ] && cat .tmp/webcache.txt | anew -q vulns/webcache.txt
-			end_func "Results are saved in vulns/webcache.txt" ${FUNCNAME[0]}
 		else
-			end_func "Web Cache Poisoning: Too many webs to test, try with --deep flag" ${FUNCNAME[0]}
+			end_func "Skipping Web Cache Poisoning: Too many URLs to test, try with --deep flag." "${FUNCNAME[0]}"
 		fi
+
 	else
+		# Handle cases where WEBCACHE is false, no vulnerable URLs, or already processed
 		if [[ $WEBCACHE == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ ! -s "fuzzing/fuzzing_full.txt" ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to Web Cache Poisoning.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -2697,29 +5127,75 @@ function webcache() {
 
 function fuzzparams() {
 
-	mkdir -p {.tmp,webs,vulns}
-	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $FUZZPARAMS == true ]]; then
-		start_func ${FUNCNAME[0]} "Fuzzing params values checks"
-		if [[ $DEEP == true ]] || [[ $(cat webs/url_extract_nodupes.txt | wc -l) -le $DEEP_LIMIT2 ]]; then
+	# Create necessary directories
+	if ! mkdir -p .tmp webs vulns; then
+		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
+		return 1
+	fi
+
+	# Check if the function should run
+	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $FUZZPARAMS == true ]] &&
+		! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+
+		start_func "${FUNCNAME[0]}" "Fuzzing Parameters Values Checks"
+
+		# Determine if we should proceed based on DEEP flag or number of URLs
+		URL_COUNT=$(wc -l <"webs/url_extract_nodupes.txt")
+		if [[ $DEEP == true ]] || [[ $URL_COUNT -le $DEEP_LIMIT2 ]]; then
+
 			if [[ $AXIOM != true ]]; then
-				nuclei -update 2>>"$LOGFILE" >/dev/null
-				git -C ${tools}/fuzzing-templates pull 2>>"$LOGFILE"
-				cat webs/url_extract_nodupes.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t ${tools}/fuzzing-templates -dast -o .tmp/fuzzparams.txt
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Nuclei Setup and Execution${reset}\n\n"
+
+				# Update Nuclei
+				if ! nuclei -update 2>>"$LOGFILE" >/dev/null; then
+					printf "%b[!] Nuclei update failed.%b\n" "$bred" "$reset"
+					end_func "Nuclei update failed." "${FUNCNAME[0]}"
+					return 1
+				fi
+
+				# Pull latest fuzzing templates
+				if ! git -C ${NUCLEI_FUZZING_TEMPLATES_PATH} pull 2>>"$LOGFILE"; then
+					printf "%b[!] Failed to pull latest fuzzing templates.%b\n" "$bred" "$reset"
+					end_func "Failed to pull latest fuzzing templates." "${FUNCNAME[0]}"
+					return 1
+				fi
+
+				# Execute Nuclei with the fuzzing templates
+				nuclei -silent -retries 3 -rl "$NUCLEI_RATELIMIT" -t ${NUCLEI_FUZZING_TEMPLATES_PATH} -dast -o ".tmp/fuzzparams.txt" <"webs/url_extract_nodupes.txt" 2>>"$LOGFILE"
+
 			else
-				axiom-exec "git clone https://github.com/projectdiscovery/fuzzing-templates /home/op/fuzzing-templates" &>/dev/null
-				axiom-scan webs/url_extract_nodupes.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -rl $NUCLEI_RATELIMIT -dast -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Axiom with Nuclei${reset}\n\n"
+
+				# Clone fuzzing-templates if not already present
+				if [[ ! -d "/home/op/fuzzing-templates" ]]; then
+					axiom-exec "git clone https://github.com/projectdiscovery/fuzzing-templates /home/op/fuzzing-templates" &>/dev/null
+				fi
+
+				# Execute Axiom scan with Nuclei
+				axiom-scan "webs/url_extract_nodupes.txt" -m nuclei -nh -retries 3 -w "/home/op/fuzzing-templates" -rl "$NUCLEI_RATELIMIT" -dast -o ".tmp/fuzzparams.txt" $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+			fi
 
+			# Append unique results to vulns/fuzzparams.txt
+			if [[ -s ".tmp/fuzzparams.txt" ]]; then
+				cat ".tmp/fuzzparams.txt" | anew -q "vulns/fuzzparams.txt"
+			else
+				printf "%b[!] No results found in fuzzparams.txt.%b\n" "$bred" "$reset"
 			fi
-			[ -s ".tmp/fuzzparams.txt" ] && cat .tmp/fuzzparams.txt | anew -q vulns/fuzzparams.txt
-			end_func "Results are saved in vulns/fuzzparams.txt" ${FUNCNAME[0]}
+
+			end_func "Results are saved in vulns/fuzzparams.txt" "${FUNCNAME[0]}"
+
 		else
-			end_func "Fuzzing params values: Too many entries to test, try with --deep flag" ${FUNCNAME[0]}
+			end_func "Fuzzing Parameters Values: Too many entries to test, try with --deep flag" "${FUNCNAME[0]}"
 		fi
+
 	else
+		# Handle cases where FUZZPARAMS is false, no vulnerable URLs, or already processed
 		if [[ $FUZZPARAMS == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
+		elif [[ ! -s "webs/url_extract_nodupes.txt" ]]; then
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped: No URLs potentially vulnerable to Fuzzing Parameters.${reset}\n\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 
@@ -3156,7 +5632,7 @@ function passive() {
 	ip_info
 	emails
 	google_dorks
-	github_dorks
+	#github_dorks
 	github_repos
 	metadata
 	apileaks
@@ -3201,7 +5677,7 @@ function osint() {
 	ip_info
 	emails
 	google_dorks
-	github_dorks
+	#github_dorks
 	github_repos
 	metadata
 	apileaks
@@ -3281,7 +5757,7 @@ function multi_osint() {
 		ip_info
 		emails
 		google_dorks
-		github_dorks
+		#github_dorks
 		github_repos
 		metadata
 		apileaks
@@ -3303,7 +5779,7 @@ function recon() {
 	ip_info
 	emails
 	google_dorks
-	github_dorks
+	#github_dorks
 	github_repos
 	metadata
 	apileaks
@@ -3419,7 +5895,7 @@ function multi_recon() {
 		ip_info
 		emails
 		google_dorks
-		github_dorks
+		#github_dorks
 		github_repos
 		metadata
 		apileaks

From fda333ebce4c3a717815f80026c844b66643f44b Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 25 Oct 2024 12:07:13 +0200
Subject: [PATCH 11/34] push error fix

---
 install.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/install.sh b/install.sh
index c0473ab0..b449bb96 100755
--- a/install.sh
+++ b/install.sh
@@ -368,7 +368,7 @@ function install_golang_version() {
 				;;
 			esac
 
-			"$SUDO" ln -sf /usr/local/go/bin/go /usr/local/bin/
+			"$SUDO" ln -sf /usr/local/go/bin/go /usr/local/bin/ 2>/dev/null
 			export GOROOT=/usr/local/go
 			export GOPATH="${HOME}/go"
 			export PATH="$GOPATH/bin:$GOROOT/bin:$HOME/.local/bin:$PATH"

From 108daa0f7befc6bda374da24322e7f30b8eccb88 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 25 Oct 2024 12:13:39 +0200
Subject: [PATCH 12/34] skip git clone output

---
 install.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/install.sh b/install.sh
index b449bb96..f5c945fb 100755
--- a/install.sh
+++ b/install.sh
@@ -202,7 +202,7 @@ function install_tools() {
 		fi
 		# Clone the repository
 		if [[ ! -d "${dir}/${repo}" || -z "$(ls -A "${dir}/${repo}")" ]]; then
-			git clone --filter="blob:none" "https://github.com/${repos[$repo]}" "${dir}/${repo}" #&>/dev/null
+			git clone --filter="blob:none" "https://github.com/${repos[$repo]}" "${dir}/${repo}" &>/dev/null
 			exit_status=$?
 			if [[ $exit_status -ne 0 ]]; then
 				echo -e "${red}Unable to clone repository $repo.${reset}"

From ad112fe8bca9daf39803e42ab0c8cb9550513243 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 25 Oct 2024 12:58:22 +0200
Subject: [PATCH 13/34] Fix misconfig-mapper error handling and filtering

---
 reconftw.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/reconftw.sh b/reconftw.sh
index 7c1b6713..1e3543d4 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -597,7 +597,7 @@ function third_party_misconfigs() {
 		fi
 
 		# Run misconfig-mapper and handle errors
-		./misconfig-mapper -target "$company_name" -service "*" | grep -v "\-\]" >"${dir}/osint/3rdparts_misconfigurations.txt"
+		./misconfig-mapper -target "$company_name" -service "*" 2>&1 | grep -v "\-\]" | grep -v "Failed" >"${dir}/osint/3rdparts_misconfigurations.txt"
 
 		# Return to the previous directory
 		if ! popd >/dev/null; then

From 4788212e8171248a8e535237e845dad5ae2ba5d3 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Tue, 29 Oct 2024 10:14:04 +0100
Subject: [PATCH 14/34] fix recursive perms

---
 reconftw.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/reconftw.sh b/reconftw.sh
index 1e3543d4..8e3f0d71 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -2073,7 +2073,7 @@ function sub_recursive_brute() {
 			fi
 
 			if [[ -s ".tmp/brute_recursive_result.txt" ]]; then
-				cat .tmp/brute_recursive.txt | anew -q .tmp/brute_recursive_result.txt
+				cat .tmp/brute_recursive_result.txt | anew -q .tmp/brute_recursive.txt
 			fi
 
 			# Generate permutations

From 721fa88cbfedf60f9b5da907b7d527286da4298c Mon Sep 17 00:00:00 2001
From: wanetty <eduard.gm@gmail.com>
Date: Wed, 30 Oct 2024 11:12:10 +0100
Subject: [PATCH 15/34] Add proxmox deploy

---
 Proxmox/README.md               |  61 +++++++++++++
 Proxmox/reconftw_prox_deploy.sh | 150 ++++++++++++++++++++++++++++++++
 2 files changed, 211 insertions(+)
 create mode 100644 Proxmox/README.md
 create mode 100644 Proxmox/reconftw_prox_deploy.sh

diff --git a/Proxmox/README.md b/Proxmox/README.md
new file mode 100644
index 00000000..33fbcabd
--- /dev/null
+++ b/Proxmox/README.md
@@ -0,0 +1,61 @@
+# ReconFTW Proxmox LXC Deployment Script
+
+This script automates the deployment of ReconFTW in a Linux Container (LXC) on a Proxmox server. It simplifies the process of setting up a dedicated environment for reconnaissance activities.
+
+## Prerequisites
+
+- A Proxmox VE server (version 6.x or later)
+- Root access to the Proxmox server
+- Sufficient storage space on the Proxmox server
+
+## Usage
+
+1. Copy the script `bash -c $(curl -fsSL https://raw.githubusercontent.com/six2dez/reconftw/master/Proxmox/reconftw_prox_deploy.sh)` to your Proxmox server.
+
+4. Follow the prompts to configure your LXC container. You'll be asked for:
+- Container ID
+- Storage location
+- Root filesystem size
+- RAM allocation
+- Number of CPU cores
+- Hostname
+- Password
+
+5. The script will then:
+- Download the Debian template if not already present
+- Create and configure the LXC container
+- Install ReconFTW and its dependencies
+
+6. Once completed, the script will display the container information, including ID, hostname, and password.
+
+## Logging
+
+The script generates a log file in `/var/log/` with the format `reconftw_deploy_YYYYMMDD_HHMMSS.log`. Refer to this log for detailed information about the deployment process.
+
+## Post-Installation
+
+After the script completes:
+
+1. You can access the container using:
+
+```bash
+pct enter <CONTAINER_ID>
+```
+
+2. ReconFTW will be installed in `/opt/reconftw/`. Navigate to this directory to use ReconFTW.
+
+3. Refer to the [ReconFTW documentation](https://github.com/six2dez/reconftw) for usage instructions.
+
+## Troubleshooting
+
+- If the script fails, check the log file for error messages.
+- Ensure you have sufficient storage space and resources on your Proxmox server.
+- Verify that your Proxmox server has internet access to download necessary packages.
+
+## Security Note
+
+Remember to change the default password after accessing the container for the first time.
+
+## Support
+
+For issues related to this deployment script, please open an issue in the GitHub repository. For ReconFTW-specific questions, refer to the [ReconFTW GitHub page](https://github.com/six2dez/reconftw).
\ No newline at end of file
diff --git a/Proxmox/reconftw_prox_deploy.sh b/Proxmox/reconftw_prox_deploy.sh
new file mode 100644
index 00000000..1b743c09
--- /dev/null
+++ b/Proxmox/reconftw_prox_deploy.sh
@@ -0,0 +1,150 @@
+#!/bin/bash
+# Enhanced script to deploy ReconFTW in a LXC container on Proxmox using Debian 12
+
+# Colors for better visualization
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+NC='\033[0m'
+
+# Logging configuration
+LOGFILE="/var/log/reconftw_deploy_$(date +%Y%m%d_%H%M%S).log"
+exec 1> >(tee -a "$LOGFILE") 2>&1
+
+# Logging function
+log() {
+   echo -e "[$(date '+%Y-%m-%d %H:%M:%S')] $1"
+}
+
+# Function to show errors and exit
+error_exit() {
+   log "${RED}ERROR: $1${NC}"
+   exit 1
+}
+
+# Function to validate numbers
+validate_number() {
+   if ! [[ "$1" =~ ^[0-9]+$ ]]; then
+       error_exit "Please enter a valid number"
+   fi
+}
+
+# Enhanced input function with validation
+get_input() {
+   local prompt=$1
+   local default=$2
+   local validate_func=$3
+   local result
+
+   while true; do
+       read -p "$prompt [Default: $default]: " result
+       result="${result:-$default}"
+       
+       if [[ -n "$validate_func" ]]; then
+           if $validate_func "$result"; then
+               echo "$result"
+               return 0
+           fi
+       else
+           echo "$result"
+           return 0
+       fi
+   done
+}
+
+# Function to validate disk space
+check_storage_space() {
+   local storage=$1
+   local required_space=$2
+   
+   # Get available space in GB
+   available_space=$(pvesm status | grep "$storage" | awk '{print $5}' | sed 's/G//')
+   
+   if (( available_space < required_space )); then
+       error_exit "Not enough space in $storage. Available: ${available_space}GB, Required: ${required_space}GB"
+   fi
+}
+
+# Verify root execution
+[[ $EUID -ne 0 ]] && error_exit "This script must be run as root"
+
+# Verify Proxmox environment
+[[ ! -f /etc/pve/local/pve-ssl.key ]] && error_exit "This script must be run on a Proxmox server"
+
+# Template configuration
+TEMPLATE_NAME="debian-11-standard_11.7-1_amd64.tar.zst"
+TEMPLATE_PATH="local:vztmpl/${TEMPLATE_NAME}"
+
+# Verify and download template
+log "${YELLOW}Checking template...${NC}"
+if ! pveam list local| grep -q $TEMPLATE_NAME; then
+   log "Downloading template ${TEMPLATE_NAME}..."
+   pveam download local $TEMPLATE_NAME || error_exit "Error downloading template"
+fi
+
+# Get next available ID
+NEXTID=$(pvesh get /cluster/nextid)
+CONTAINER_ID=$(get_input "Container ID" $NEXTID validate_number)
+
+# Container configuration with validations
+STORAGE=$(get_input "Storage" "local-lvm")
+ROOTFS_SIZE=$(get_input "Root filesystem size (GB)" "20" validate_number)
+MEMORY=$(get_input "RAM Memory (MB)" "2048" validate_number)
+CPU_CORES=$(get_input "Number of CPUs" "2" validate_number)
+HOSTNAME=$(get_input "Hostname" "reconftw-container")
+PASSWORD=$(get_input "Password" "$(openssl rand -base64 12)")
+
+# Verify storage space
+check_storage_space "$STORAGE" "$ROOTFS_SIZE"
+
+# Configuration summary
+log "${GREEN}Container configuration:${NC}"
+echo "ID: $CONTAINER_ID"
+echo "Storage: $STORAGE"
+echo "Size: ${ROOTFS_SIZE}GB"
+echo "RAM: ${MEMORY}MB"
+echo "CPUs: $CPU_CORES"
+echo "Hostname: $HOSTNAME"
+
+# Create container with error handling
+log "${YELLOW}Creating LXC container...${NC}"
+pct create $CONTAINER_ID $TEMPLATE_PATH \
+   --storage $STORAGE \
+   --rootfs $STORAGE:${ROOTFS_SIZE} \
+   --memory $MEMORY \
+   --cores $CPU_CORES \
+   --hostname $HOSTNAME \
+   --password "$PASSWORD" \
+   --unprivileged 1 \
+   --net0 name=eth0,bridge=vmbr0,ip=dhcp || error_exit "Error creating container"
+
+# Start container
+log "${YELLOW}Starting container...${NC}"
+pct start $CONTAINER_ID || error_exit "Error starting container"
+
+# Wait for container to be ready
+log "Waiting for container to be ready..."
+for i in {1..15}; do
+   if pct exec $CONTAINER_ID -- systemctl is-system-running &>/dev/null; then
+       break
+   fi
+   sleep 2
+done
+
+# Install ReconFTW
+log "${YELLOW}Installing ReconFTW and dependencies...${NC}"
+pct exec $CONTAINER_ID -- bash -c "apt update && \
+   DEBIAN_FRONTEND=noninteractive apt -y upgrade && \
+   apt install -y git sudo python3 python3-pip && \
+   cd /opt && \
+   git clone --recursive https://github.com/six2dez/reconftw.git && \
+   cd reconftw && \
+   ./install.sh" || error_exit "Error installing ReconFTW"
+
+# Show final information
+log "${GREEN}Installation completed${NC}"
+echo "Container information:"
+echo "ID: $CONTAINER_ID"
+echo "Hostname: $HOSTNAME"
+echo "Password: $PASSWORD"
+echo "Log file: $LOGFILE"
\ No newline at end of file

From 1b8bfb11a7d42cca2eb37316abe082e0231df148 Mon Sep 17 00:00:00 2001
From: wanetty <eduard.gm@gmail.com>
Date: Wed, 30 Oct 2024 14:27:32 +0100
Subject: [PATCH 16/34] fix url

---
 Proxmox/README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Proxmox/README.md b/Proxmox/README.md
index 33fbcabd..1f93f70d 100644
--- a/Proxmox/README.md
+++ b/Proxmox/README.md
@@ -10,7 +10,7 @@ This script automates the deployment of ReconFTW in a Linux Container (LXC) on a
 
 ## Usage
 
-1. Copy the script `bash -c $(curl -fsSL https://raw.githubusercontent.com/six2dez/reconftw/master/Proxmox/reconftw_prox_deploy.sh)` to your Proxmox server.
+1. Copy the script `bash -c "$(curl -fsSL https://raw.githubusercontent.com/six2dez/reconftw/master/Proxmox/reconftw_prox_deploy.sh)"` to your Proxmox server.
 
 4. Follow the prompts to configure your LXC container. You'll be asked for:
 - Container ID

From e8207aa5b1d6955b0e9d35fffe70a5c4fbca5162 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Thu, 31 Oct 2024 17:26:00 +0100
Subject: [PATCH 17/34] added merklemap-cli

---
 reconftw.sh | 1 +
 1 file changed, 1 insertion(+)

diff --git a/reconftw.sh b/reconftw.sh
index 8e3f0d71..c1cf7fe5 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -852,6 +852,7 @@ function sub_passive() {
 
 		# Run subfinder and check for errors
 		subfinder -all -d "$domain" -max-time "$SUBFINDER_ENUM_TIMEOUT" -silent -o .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null
+		merklemap-cli search $domain 2>/dev/null | awk -F' ' '{for(i=1;i<=NF;i++) if($i ~ /^domain=/) {split($i,a,"="); print a[2]}}' | anew -q .tmp/subfinder_psub.txt 2>>"$LOGFILE" >/dev/null
 
 		# Run github-subdomains if GITHUB_TOKENS is set and file is not empty
 		if [[ -s $GITHUB_TOKENS ]]; then

From 3dad0c8f4a84755fdf09a5717c3249ffcf358d07 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Mon, 11 Nov 2024 09:00:59 +0100
Subject: [PATCH 18/34] Fix prints

---
 reconftw.sh | 31 ++++++++++---------------------
 1 file changed, 10 insertions(+), 21 deletions(-)

diff --git a/reconftw.sh b/reconftw.sh
index c1cf7fe5..486230e5 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -732,7 +732,7 @@ function subdomains_full() {
 
 	# Check if domain is an IP address
 	if [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
-		printf "%b[%s] Scanning IP %s%s\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$domain" "$reset"
+		printf "%b[%s] Scanning IP %s%b\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$domain" "$reset"
 	else
 		printf "%b[%s] Subdomain Enumeration %s%b\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$domain" "$reset"
 	fi
@@ -820,7 +820,7 @@ function subdomains_full() {
 	fi
 
 	# Display results
-	printf "%b\n[%s] Total subdomains:%s\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+	printf "%b\n[%s] Total subdomains:%b\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 	notification "- ${NUMOFLINES_subs} alive" "good"
 
 	if [[ -s "subdomains/subdomains.txt" ]]; then
@@ -838,7 +838,7 @@ function subdomains_full() {
 	fi
 
 	notification "Subdomain Enumeration Finished" "good"
-	printf "%b[%s] Results are saved in %s/subdomains/subdomains.txt and webs/webs.txt%s\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$domain" "$reset"
+	printf "%b[%s] Results are saved in %s/subdomains/subdomains.txt and webs/webs.txt%b\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$domain" "$reset"
 	printf "%b#######################################################################%b\n\n" "$bgreen" "$reset"
 
 }
@@ -3089,7 +3089,7 @@ function portscan() {
 		fi
 
 		# Display resolved IPs without CDN
-		printf "%b\n[%s] Resolved IP addresses (No CDN):%s\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+		printf "%b\n[%s] Resolved IP addresses (No CDN):%b\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 		if [[ -s ".tmp/ips_nocdn.txt" ]]; then
 			sort ".tmp/ips_nocdn.txt"
 		else
@@ -3149,8 +3149,6 @@ function portscan() {
 
 		if [[ -s "hosts/portscan_active.xml" ]]; then
 			nmapurls <hosts/portscan_active.xml 2>>"$LOGFILE" | anew -q hosts/webs.txt
-		else
-			printf "%b[!] No portscan_active.xml found.%b\n" "$yellow" "$reset"
 		fi
 
 		if [[ -s "hosts/webs.txt" ]]; then
@@ -3160,8 +3158,6 @@ function portscan() {
 			fi
 			notification "Webs detected from port scan: ${NUMOFLINES} new websites" "good"
 			cat hosts/webs.txt
-		else
-			printf "%b[!] No webs detected from port scan.%b\n" "$yellow" "$reset"
 		fi
 
 		end_func "Results are saved in hosts/portscan_[passive|active|shodan].[txt|xml]" "${FUNCNAME[0]}"
@@ -3518,8 +3514,6 @@ function iishortname() {
 				xargs --null rm 2>>"$LOGFILE" >/dev/null
 
 			end_func "Results are saved in vulns/iis-shortname/" "${FUNCNAME[0]}"
-		else
-			end_func "No IIS sites detected, iishortname check skipped." "${FUNCNAME[0]}"
 		fi
 	else
 		# Handle cases where IIS_SHORTNAME is false or the function has already been processed
@@ -3596,8 +3590,6 @@ function cms_scanner() {
 				else
 					rm -rf "${tools}/CMSeeK/Result/${sub_out}" 2>>"$LOGFILE"
 				fi
-			else
-				printf "%b[!] cms.json does not exist or is empty for $sub_out.%b\n" "$yellow" "$reset"
 			fi
 		done <"webs/webs_all.txt"
 
@@ -3892,7 +3884,7 @@ function jschecks() {
 
 		if [[ -s ".tmp/url_extract_js.txt" ]]; then
 
-			printf "%bRunning: Fetching URLs 1/6%s\n" "$yellow" "$reset"
+			printf "%bRunning: Fetching URLs 1/6%b\n" "$yellow" "$reset"
 			if [[ $AXIOM != true ]]; then
 				subjs -ua "Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" -c 40 <.tmp/url_extract_js.txt |
 					grep "$domain" |
@@ -3913,7 +3905,7 @@ function jschecks() {
 			python3 "${tools}/urless/urless/urless.py" <.tmp/url_extract_js.txt |
 				anew -q js/url_extract_js.txt 2>>"$LOGFILE" >/dev/null
 
-			printf "%b[%s] Running: Resolving JS URLs 2/6%s\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			printf "%b[%s] Running: Resolving JS URLs 2/6%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 			if [[ $AXIOM != true ]]; then
 				if [[ -s "js/url_extract_js.txt" ]]; then
 					httpx -follow-redirects -random-agent -silent -timeout "$HTTPX_TIMEOUT" -threads "$HTTPX_THREADS" \
@@ -3938,7 +3930,7 @@ function jschecks() {
 				fi
 			fi
 
-			printf "%bRunning: Extracting JS from sourcemaps 3/6%s\n" "$yellow" "$reset"
+			printf "%bRunning: Extracting JS from sourcemaps 3/6%b\n" "$yellow" "$reset"
 			if ! mkdir -p .tmp/sourcemapper; then
 				printf "%b[!] Failed to create sourcemapper directory.%b\n" "$bred" "$reset"
 			fi
@@ -3956,7 +3948,7 @@ function jschecks() {
 					-o .tmp/sourcemapper 2>>"$LOGFILE" >/dev/null
 			fi
 
-			printf "%b[%s] Running: Gathering endpoints 4/6%s\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			printf "%b[%s] Running: Gathering endpoints 4/6%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 			if [[ -s "js/js_livelinks.txt" ]]; then
 				xnLinkFinder -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d "$XNLINKFINDER_DEPTH" \
 					-o .tmp/js_endpoints.txt 2>>"$LOGFILE" >/dev/null
@@ -3974,7 +3966,7 @@ function jschecks() {
 				printf "%b[!] No JavaScript endpoints found.%b\n" "$yellow" "$reset"
 			fi
 
-			printf "%b[%s] Running: Gathering secrets 5/6%s\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			printf "%b[%s] Running: Gathering secrets 5/6%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 			if [[ -s "js/js_livelinks.txt" ]]; then
 				axiom-scan js/js_livelinks.txt -m mantra -ua "$HEADER" -s -o js/js_secrets.txt "$AXIOM_EXTRA_ARGS" &>/dev/null
 				if [[ -s "js/js_secrets.txt" ]]; then
@@ -3990,7 +3982,7 @@ function jschecks() {
 				printf "%b[!] No live JavaScript links for secret gathering.%b\n" "$yellow" "$reset"
 			fi
 
-			printf "%b[%s] Running: Building wordlist 6/6%s\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			printf "%b[%s] Running: Building wordlist 6/6%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 			if [[ -s "js/js_livelinks.txt" ]]; then
 				interlace -tL js/js_livelinks.txt -threads "$INTERLACE_THREADS" \
 					-c "python3 ${tools}/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" >/dev/null
@@ -4957,8 +4949,6 @@ function prototype_pollution() {
 				# Filter and save relevant results
 				if [[ -s ".tmp/prototype_pollution.txt" ]]; then
 					grep "EXPL" ".tmp/prototype_pollution.txt" | anew -q "vulns/prototype_pollution.txt"
-				else
-					printf "%b[!] No Prototype Pollution findings detected.%b\n" "$bred" "$reset"
 				fi
 
 				end_func "Results are saved in vulns/prototype_pollution.txt" "${FUNCNAME[0]}"
@@ -6304,7 +6294,6 @@ while true; do
 	'-v' | '--vps')
 		command -v axiom-ls &>/dev/null || {
 			printf "\n Axiom is needed for this mode and is not installed \n You have to install it manually \n" && exit
-			allinstalled=false
 		}
 		AXIOM=true
 		shift

From 14d8998c1a6b292d5360161998c3929f061e5ebf Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Mon, 11 Nov 2024 11:21:22 +0100
Subject: [PATCH 19/34] fix fuzzing

---
 reconftw.sh | 99 ++++++++++++++++++-----------------------------------
 1 file changed, 33 insertions(+), 66 deletions(-)

diff --git a/reconftw.sh b/reconftw.sh
index 486230e5..e9e575d9 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -323,17 +323,12 @@ function github_repos() {
 
 			if ! enumerepo -token-string "$GH_TOKEN" -usernames .tmp/company_name.txt -o .tmp/company_repos.txt 2>>"$LOGFILE" >/dev/null; then
 				printf "%b[!] enumerepo command failed.%b\n" "$bred" "$reset"
-				return 1
 			fi
 
 			if [[ -s ".tmp/company_repos.txt" ]]; then
 				if ! jq -r '.[].repos[]|.url' <.tmp/company_repos.txt >.tmp/company_repos_url.txt 2>>"$LOGFILE"; then
 					printf "%b[!] jq command failed.%b\n" "$bred" "$reset"
-					return 1
 				fi
-			else
-				printf "%b[!] No repositories found for the company.%b\n" "$yellow" "$reset"
-				return 1
 			fi
 
 			mkdir -p .tmp/github_repos 2>>"$LOGFILE"
@@ -1632,7 +1627,7 @@ function sub_analytics() {
 			fi
 		fi
 
-		if ! NUMOFLINES=$(anew subdomains/subdomains.txt <.tmp/analytics_subs_resolved.txt | sed '/^$/d' | wc -l); then
+		if ! NUMOFLINES=$(anew subdomains/subdomains.txt <.tmp/analytics_subs_resolved.txt 2>/dev/null| sed '/^$/d' | wc -l); then
 			printf "%b[!] Failed to count new subdomains.%b\n" "$bred" "$reset"
 			NUMOFLINES=0
 		fi
@@ -2390,8 +2385,6 @@ function s3buckets() {
 				if ! sed -i '/^$/d' .tmp/s3buckets.txt; then
 					printf "%b[!] Failed to clean s3buckets.txt.%b\n" "$bred" "$reset"
 				fi
-			else
-				printf "%b[!] No s3buckets_tmp.txt found.%b\n" "$yellow" "$reset"
 			fi
 		fi
 
@@ -2498,7 +2491,6 @@ function s3buckets() {
 			fi
 		else
 			NUMOFLINES2=0
-			printf "%b[!] No s3buckets.txt found or it is empty.%b\n" "$yellow" "$reset"
 		fi
 
 		# Run trufflehog for S3 buckets
@@ -2506,8 +2498,6 @@ function s3buckets() {
 			while IFS= read -r bucket; do
 				trufflehog s3 --bucket="$bucket" -j 2>/dev/null | jq -c | anew -q subdomains/s3buckets_trufflehog.txt
 			done <subdomains/s3buckets.txt
-		else
-			printf "%b[!] No S3 buckets to scan with trufflehog.%b\n" "$yellow" "$reset"
 		fi
 
 		# Run trufflehog for open buckets found by CloudHunter
@@ -3384,10 +3374,7 @@ function nuclei_check() {
 function fuzz() {
 
 	# Create necessary directories
-	if ! mkdir -p .tmp/fuzzing webs fuzzing nuclei_output; then
-		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
-		return 1
-	fi
+	mkdir -p .tmp/fuzzing webs fuzzing nuclei_output; then
 
 	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $FUZZ == true ]] &&
@@ -3410,65 +3397,45 @@ function fuzz() {
 			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
 		fi
 
-		# Combine url_extract_nodupes.txt, subdomains.txt, and webs_all.txt into webs_subs.txt if it doesn't exist
-		if [[ ! -s ".tmp/webs_subs.txt" ]]; then
-			cat webs/url_extract_nodupes.txt subdomains/subdomains.txt webs/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt
-		fi
-
-		# If fuzzing_full.txt exists, process it and create webs_fuzz.txt
-		if [[ -s "$dir/fuzzing/fuzzing_full.txt" ]]; then
-			grep "^200" "$dir/fuzzing/fuzzing_full.txt" | cut -d " " -f3 | anew -q .tmp/webs_fuzz.txt
-		fi
-
-		# Combine webs_subs.txt and webs_fuzz.txt into webs_nuclei.txt and duplicate it
-		cat .tmp/webs_subs.txt .tmp/webs_fuzz.txt 2>>"$LOGFILE" | anew -q .tmp/webs_nuclei.txt | tee -a webs/webs_nuclei.txt
-
-		# Check if AXIOM is enabled
-		if [[ $AXIOM != true ]]; then
-			# Split severity levels into an array
-			IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY"
-
-			for crit in "${severity_array[@]}"; do
-				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Nuclei Severity: $crit ${reset}\n\n"
-
-				# Run nuclei for each severity level
-				nuclei $NUCLEI_FLAGS -severity "$crit" -nh -rl "$NUCLEI_RATELIMIT" -o "nuclei_output/${crit}.txt" <.tmp/webs_nuclei.txt
-			done
-			printf "\n\n"
-		else
-			# Check if webs_nuclei.txt exists and is not empty
-			if [[ -s ".tmp/webs_nuclei.txt" ]]; then
-				# Split severity levels into an array
-				IFS=',' read -ra severity_array <<<"$NUCLEI_SEVERITY"
-
-				for crit in "${severity_array[@]}"; do
-					printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Axiom Nuclei Severity: $crit. Check results in nuclei_output folder.${reset}\n\n"
-
-					# Run axiom-scan with nuclei module for each severity level
-					axiom-scan .tmp/webs_nuclei.txt -m nuclei \
-						--nuclei-templates "$NUCLEI_TEMPLATES_PATH" \
-						-severity "$crit" -nh -rl "$NUCLEI_RATELIMIT" \
-						-o "nuclei_output/${crit}.txt" "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
-
-					# Display the results if the output file exists and is not empty
-					if [[ -s "nuclei_output/${crit}.txt" ]]; then
-						cat "nuclei_output/${crit}.txt"
-					fi
+		if [[ -s "webs/webs_all.txt" ]]; then
+			if [[ $AXIOM != true ]]; then
+				interlace -tL webs/webs_all.txt -threads ${INTERLACE_THREADS} -c "ffuf ${FFUF_FLAGS} -t ${FFUF_THREADS} -rate ${FFUF_RATELIMIT} -H \"${HEADER}\" -w ${fuzz_wordlist} -maxtime ${FFUF_MAXTIME} -u _target_/FUZZ -o _output_/_cleantarget_.json" -o $dir/.tmp/fuzzing 2>>"$LOGFILE" >/dev/null
+				for sub in $(cat webs/webs_all.txt); do
+					sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
+
+					pushd "${tools}/ffufPostprocessing" >/dev/null || {
+						echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
+					}
+					./ffufPostprocessing -result-file $dir/.tmp/fuzzing/${sub_out}.json -overwrite-result-file
+					popd >/dev/null || {
+						echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
+					}
+
+					[ -s "$dir/.tmp/fuzzing/${sub_out}.json" ] && cat $dir/.tmp/fuzzing/${sub_out}.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort -k1 | anew -q $dir/fuzzing/${sub_out}.txt
 				done
-				printf "\n\n"
+				find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | sort -k1 | anew -q $dir/fuzzing/fuzzing_full.txt
+			else
+				axiom-exec "mkdir -p /home/op/lists/seclists/Discovery/Web-Content/" &>/dev/null
+				axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/fuzz_wordlist.txt" &>/dev/null
+				axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/seclists/Discovery/Web-Content/big.txt" &>/dev/null
+				axiom-scan webs/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				for sub in $(cat webs/webs_all.txt); do
+					sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
+					[ -s "$dir/.tmp/ffuf-content.json" ] && cat .tmp/ffuf-content.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | grep $sub | sort -k1 | anew -q fuzzing/${sub_out}.txt
+				done
+				find $dir/fuzzing/ -type f -iname "*.txt" -exec cat {} + 2>>"$LOGFILE" | sort -k1 | anew -q $dir/fuzzing/fuzzing_full.txt
 			fi
+			end_func "Results are saved in $domain/fuzzing/*subdomain*.txt" ${FUNCNAME[0]}
+		else
+			end_func "No $domain/web/webs.txts file found, fuzzing skipped " ${FUNCNAME[0]}
 		fi
 
 		end_func "Results are saved in $domain/nuclei_output folder" "${FUNCNAME[0]}"
 	else
-		# Handle cases where NUCLEICHECK is false or the function has already been processed
 		if [[ $FUZZ == false ]]; then
-			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped due to configuration settings.${reset}\n"
-		elif [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
-			# Domain is an IP address; skip the function
-			return
+			printf "\n${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset}\n"
 		else
-			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} has already been processed. To force execution, delete:\n    $called_fn_dir/.${FUNCNAME[0]}${reset}\n\n"
+			printf "${yellow}[$(date +'%Y-%m-%d %H:%M:%S')] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n    $called_fn_dir/.${FUNCNAME[0]} ${reset}\n\n"
 		fi
 	fi
 

From da10cdb2fcab06677a3c49acb986885e875f9275 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Mon, 11 Nov 2024 11:22:15 +0100
Subject: [PATCH 20/34] fix type

---
 reconftw.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/reconftw.sh b/reconftw.sh
index e9e575d9..96d46e37 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -3374,7 +3374,7 @@ function nuclei_check() {
 function fuzz() {
 
 	# Create necessary directories
-	mkdir -p .tmp/fuzzing webs fuzzing nuclei_output; then
+	mkdir -p .tmp/fuzzing webs fuzzing nuclei_output
 
 	# Check if the function should run
 	if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $FUZZ == true ]] &&

From 63a210e07606fc22fbec95b12eaacffdf3c15524 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Mon, 11 Nov 2024 11:26:01 +0100
Subject: [PATCH 21/34] ffuf-postprocessing for axiom

---
 reconftw.sh | 1 +
 1 file changed, 1 insertion(+)

diff --git a/reconftw.sh b/reconftw.sh
index 96d46e37..4cc8b323 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -3419,6 +3419,7 @@ function fuzz() {
 				axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/fuzz_wordlist.txt" &>/dev/null
 				axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/seclists/Discovery/Web-Content/big.txt" &>/dev/null
 				axiom-scan webs/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+				[ -s "$dir/.tmp/ffuf-content.json" ] && ./ffufPostprocessing -result-file $dir/.tmp/ffuf-content.json -overwrite-result-file
 				for sub in $(cat webs/webs_all.txt); do
 					sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
 					[ -s "$dir/.tmp/ffuf-content.json" ] && cat .tmp/ffuf-content.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | grep $sub | sort -k1 | anew -q fuzzing/${sub_out}.txt

From 6bc131cecead87ec07cd7bf46d821b90f7d95f67 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Tue, 12 Nov 2024 08:08:54 +0100
Subject: [PATCH 22/34] fix ffuf axiom postpro

---
 reconftw.sh | 8 +++++++-
 1 file changed, 7 insertions(+), 1 deletion(-)

diff --git a/reconftw.sh b/reconftw.sh
index 4cc8b323..0cdeb4d2 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -3419,7 +3419,13 @@ function fuzz() {
 				axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/fuzz_wordlist.txt" &>/dev/null
 				axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/seclists/Discovery/Web-Content/big.txt" &>/dev/null
 				axiom-scan webs/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-				[ -s "$dir/.tmp/ffuf-content.json" ] && ./ffufPostprocessing -result-file $dir/.tmp/ffuf-content.json -overwrite-result-file
+				pushd "${tools}/ffufPostprocessing" >/dev/null || {
+						echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
+					}
+					[ -s "$dir/.tmp/ffuf-content.json" ] && ./ffufPostprocessing -result-file $dir/.tmp/ffuf-content.json -overwrite-result-file
+				popd >/dev/null || {
+					echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
+				}
 				for sub in $(cat webs/webs_all.txt); do
 					sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||')
 					[ -s "$dir/.tmp/ffuf-content.json" ] && cat .tmp/ffuf-content.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | grep $sub | sort -k1 | anew -q fuzzing/${sub_out}.txt

From bcae2f5f336fba566968c9b7b81571d3db73adfd Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Thu, 14 Nov 2024 09:48:00 +0100
Subject: [PATCH 23/34] fix iis

---
 reconftw.sh | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/reconftw.sh b/reconftw.sh
index 0cdeb4d2..05eb35e9 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -3458,8 +3458,6 @@ function iishortname() {
 		if [[ -s "nuclei_output/info.txt" ]]; then
 			# Extract IIS version information and save to .tmp/iis_sites.txt
 			grep "iis-version" "nuclei_output/info.txt" | cut -d " " -f4 >.tmp/iis_sites.txt
-		else
-			printf "%b[!] nuclei_output/info.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 
 		# Proceed only if iis_sites.txt exists and is non-empty
@@ -3487,8 +3485,8 @@ function iishortname() {
 				xargs --null grep -Z 'Target is not vulnerable' |
 				xargs --null rm 2>>"$LOGFILE" >/dev/null
 
-			end_func "Results are saved in vulns/iis-shortname/" "${FUNCNAME[0]}"
 		fi
+		end_func "Results are saved in vulns/iis-shortname/" "${FUNCNAME[0]}"
 	else
 		# Handle cases where IIS_SHORTNAME is false or the function has already been processed
 		if [[ $IIS_SHORTNAME == false ]]; then

From 6f48dbcce5308e1b95c1e8af5615dcf808c089bc Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 15 Nov 2024 11:55:55 +0100
Subject: [PATCH 24/34] Fix comments, notifications and zip send

---
 reconftw.sh | 111 +++++++++++++++++++++-------------------------------
 1 file changed, 45 insertions(+), 66 deletions(-)

diff --git a/reconftw.sh b/reconftw.sh
index 05eb35e9..8341cf98 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1614,11 +1614,7 @@ function sub_analytics() {
 							2>>"$LOGFILE" >/dev/null
 					fi
 				fi
-			else
-				printf "%b[!] No analytics subdomains found.%b\n" "$yellow" "$reset"
 			fi
-		else
-			printf "%b[!] File .tmp/probed_tmp_scrap.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 
 		if [[ $INSCOPE == true ]]; then
@@ -1627,7 +1623,7 @@ function sub_analytics() {
 			fi
 		fi
 
-		if ! NUMOFLINES=$(anew subdomains/subdomains.txt <.tmp/analytics_subs_resolved.txt 2>/dev/null| sed '/^$/d' | wc -l); then
+		if ! NUMOFLINES=$(anew subdomains/subdomains.txt <.tmp/analytics_subs_resolved.txt 2>/dev/null | sed '/^$/d' | wc -l); then
 			printf "%b[!] Failed to count new subdomains.%b\n" "$bred" "$reset"
 			NUMOFLINES=0
 		fi
@@ -2642,8 +2638,6 @@ function webprobe_simple() {
 				grep "$domain" |
 				grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' |
 				sed 's/*.//' | anew -q .tmp/probed_tmp.txt
-		else
-			printf "%b[!] webs/web_full_info.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 
 		# Extract web info to plain text
@@ -2660,7 +2654,7 @@ function webprobe_simple() {
 		fi
 
 		# Count new websites
-		if ! NUMOFLINES=$(anew webs/webs.txt <.tmp/probed_tmp.txt 2>>"$LOGFILE" | sed '/^$/d' | wc -l); then
+		if ! NUMOFLINES=$(anew webs/webs.txt <.tmp/probed_tmp.txt 2>/dev/null | sed '/^$/d' | wc -l); then
 			printf "%b[!] Failed to count new websites.%b\n" "$bred" "$reset"
 			NUMOFLINES=0
 		fi
@@ -2723,8 +2717,6 @@ function webprobe_full() {
 					-silent -retries 2 -title -web-server -tech-detect -location -no-color -json \
 					-o .tmp/web_full_info_uncommon.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
 			fi
-		else
-			printf "%b[!] subdomains/subdomains.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 
 		# Process web_full_info_uncommon.txt
@@ -2774,8 +2766,6 @@ function webprobe_full() {
 				notification "Sending websites with uncommon ports to proxy" "info"
 				ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy "$proxy_url" 2>>"$LOGFILE" >/dev/null
 			fi
-		else
-			printf "%b[!] .tmp/web_full_info_uncommon.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 	else
 		if [[ $WEBPROBEFULL == false ]]; then
@@ -2810,14 +2800,10 @@ function screenshot() {
 		if [[ $AXIOM != true ]]; then
 			if [[ -s "webs/webs_all.txt" ]]; then
 				nuclei -headless -id screenshot -V dir='screenshots' <webs/webs_all.txt 2>>"$LOGFILE"
-			else
-				printf "%b[!] webs/webs_all.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 			fi
 		else
 			if [[ -s "webs/webs_all.txt" ]]; then
 				axiom-scan webs/webs_all.txt -m nuclei-screenshots -o screenshots "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
-			else
-				printf "%b[!] webs/webs_all.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 			fi
 		fi
 
@@ -2866,8 +2852,6 @@ function screenshot() {
 				notification "Sending websites with uncommon ports to proxy" "info"
 				ffuf -mc all -w webs/webs_uncommon_ports.txt -u FUZZ -replay-proxy "$proxy_url" 2>>"$LOGFILE" >/dev/null
 			fi
-		else
-			printf "%b[!] .tmp/web_full_info_uncommon.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 	else
 		if [[ $WEBSCREENSHOT == false ]]; then
@@ -2922,8 +2906,6 @@ function virtualhosts() {
 
 				if [[ -s $json_file ]]; then
 					jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' "$json_file" | sort | anew -q "$txt_file"
-				else
-					printf "%b[!] JSON file %s does not exist or is empty.%b\n" "$yellow" "$json_file" "$reset"
 				fi
 			done
 
@@ -2994,8 +2976,6 @@ function favicon() {
 
 			# Remove the JSON file
 			rm -f favicontest.json 2>>"$LOGFILE"
-		else
-			printf "%b[!] favicontest.json does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 
 		# Return to the original directory
@@ -3037,8 +3017,6 @@ function portscan() {
 			if [[ -s "subdomains/subdomains_dnsregs.json" ]]; then
 				# Extract host and IP from JSON
 				jq -r 'try . | "\(.host) \(.a[0])"' "subdomains/subdomains_dnsregs.json" | anew -q .tmp/subs_ips.txt
-			else
-				printf "%b[!] subdomains_dnsregs.json does not exist or is empty.%b\n" "$yellow" "$reset"
 			fi
 
 			if [[ -s ".tmp/subs_ips.txt" ]]; then
@@ -3186,8 +3164,6 @@ function cdnprovider() {
 				grep -aEiv "^(127|10|169\.254|172\.(1[6-9]|2[0-9]|3[01])|192\.168)\." |
 				grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" |
 				sort -u >.tmp/ips_cdn.txt
-		else
-			printf "%b[!] subdomains/subdomains_dnsregs.json does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 
 		# Check if ips_cdn.txt exists and is not empty
@@ -3693,8 +3669,6 @@ function urlchecks() {
 			else
 				printf "%b[!] No URLs extracted.%b\n" "$yellow" "$reset"
 			fi
-		else
-			printf "%b[!] webs/webs_all.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 	else
 		if [[ $URL_CHECK == false ]]; then
@@ -4011,8 +3985,6 @@ function wordlist_gen() {
 			# Extract words by removing punctuation
 			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Extracting words...${reset}\n"
 			tr "[:punct:]" "\n" <".tmp/url_extract_tmp.txt" | anew -q "webs/dict_words.txt"
-		else
-			printf "%b[!] .tmp/url_extract_tmp.txt does not exist or is empty.%b\n" "$yellow" "$reset"
 		fi
 
 		# Process js_endpoints.txt if it exists and is not empty
@@ -5198,7 +5170,6 @@ function zipSnedOutputFolder {
 	zip_name1=$(date +"%Y_%m_%d-%H.%M.%S")
 	zip_name="${zip_name1}_${domain}.zip" 2>>"$LOGFILE" >/dev/null
 	(cd "$dir" && zip -r "$zip_name" .) 2>>"$LOGFILE" >/dev/null
-
 	echo "Sending zip file "${dir}/${zip_name}""
 	if [[ -s "${dir}/$zip_name" ]]; then
 		sendToNotify "$dir/$zip_name"
@@ -5232,38 +5203,46 @@ function remove_big_files() {
 }
 
 function notification() {
-	if [[ -n $1 ]] && [[ -n $2 ]]; then
-		if [[ $NOTIFICATION == true ]]; then
-			NOTIFY="notify -silent"
-		else
-			NOTIFY="true"
-		fi
-		if [[ -z $3 ]]; then
-			current_date=$(date +'%Y-%m-%d %H:%M:%S')
-		else
-			current_date="$3"
-		fi
-		case $2 in
-		info)
-			text="\n${bblue}[$current_date] ${1} ${reset}"
-			printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY
-			;;
-		warn)
-			text="\n${yellow}[$current_date] ${1} ${reset}"
-			printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY
-			;;
-		error)
-			text="\n${bred}[$current_date] ${1} ${reset}"
-			printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY
-			;;
-		good)
-			text="\n${bgreen}[$current_date] ${1} ${reset}"
-			printf "${text}\n" && printf "${text} - ${domain}\n" | $NOTIFY
-			;;
-		esac
-	fi
+    if [[ -n $1 ]] && [[ -n $2 ]]; then
+        if [[ $NOTIFICATION == true ]]; then
+            NOTIFY="notify -silent"
+        else
+            NOTIFY=""
+        fi
+        if [[ -z $3 ]]; then
+            current_date=$(date +'%Y-%m-%d %H:%M:%S')
+        else
+            current_date="$3"
+        fi
+
+        case $2 in
+        info)
+            text="\n${bblue}[$current_date] ${1} ${reset}"
+            ;;
+        warn)
+            text="\n${yellow}[$current_date] ${1} ${reset}"
+            ;;
+        error)
+            text="\n${bred}[$current_date] ${1} ${reset}"
+            ;;
+        good)
+            text="\n${bgreen}[$current_date] ${1} ${reset}"
+            ;;
+        esac
+
+        # Print to terminal
+        printf "${text}\n"
+
+        # Send to notify if notifications are enabled
+        if [[ -n $NOTIFY ]]; then
+            # Remove color codes for the notification
+            clean_text=$(echo -e "${text} - ${domain}" | sed 's/\x1B\[[0-9;]*[JKmsu]//g')
+            echo -e "${clean_text}" | $NOTIFY >/dev/null 2>&1
+        fi
+    fi
 }
 
+
 function transfer {
 	if [[ $# -eq 0 ]]; then
 		echo "No arguments specified.\nUsage:\n transfer <file|directory>\n ... | transfer <file_name>" >&2
@@ -5302,14 +5281,14 @@ function sendToNotify {
 		fi
 		if grep -q '^ telegram\|^telegram\|^    telegram' $NOTIFY_CONFIG; then
 			notification "[$(date +'%Y-%m-%d %H:%M:%S')] Sending ${domain} data over Telegram" info
-			telegram_chat_id=$(cat ${NOTIFY_CONFIG} | grep '^    telegram_chat_id\|^telegram_chat_id\|^    telegram_chat_id' | xargs | cut -d' ' -f2)
-			telegram_key=$(cat ${NOTIFY_CONFIG} | grep '^    telegram_api_key\|^telegram_api_key\|^    telegram_apikey' | xargs | cut -d' ' -f2)
-			curl -F document=@${1} "https://api.telegram.org/bot${telegram_key}/sendDocument?chat_id=${telegram_chat_id}" 2>>"$LOGFILE" >/dev/null
+			telegram_chat_id=$(sed -n '/^telegram:/,/^[^ ]/p' ${NOTIFY_CONFIG} | sed -n 's/^[ ]*telegram_chat_id:[ ]*"\([^"]*\)".*/\1/p')
+			telegram_key=$(sed -n '/^telegram:/,/^[^ ]/p' ${NOTIFY_CONFIG} | sed -n 's/^[ ]*telegram_api_key:[ ]*"\([^"]*\)".*/\1/p')
+			curl -F "chat_id=${telegram_chat_id}" -F "document=@${1}" https://api.telegram.org/bot${telegram_key}/sendDocument 2>>"$LOGFILE" >/dev/null
 		fi
 		if grep -q '^ discord\|^discord\|^    discord' $NOTIFY_CONFIG; then
 			notification "[$(date +'%Y-%m-%d %H:%M:%S')] Sending ${domain} data over Discord" info
-			discord_url=$(cat ${NOTIFY_CONFIG} | grep '^ discord_webhook_url\|^discord_webhook_url\|^    discord_webhook_url' | xargs | cut -d' ' -f2)
-			curl -v -i -H "Accept: application/json" -H "Content-Type: multipart/form-data" -X POST -F file1=@${1} $discord_url 2>>"$LOGFILE" >/dev/null
+			discord_url=$(sed -n '/^discord:/,/^[^ ]/p' ${NOTIFY_CONFIG} | sed -n 's/^[ ]*discord_webhook_url:[ ]*"\([^"]*\)".*/\1/p')
+			curl -v -i -H "Accept: application/json" -H "Content-Type: multipart/form-data" -X POST -F 'payload_json={"username": "test", "content": "hello"}' -F file1=@${1} $discord_url 2>>"$LOGFILE" >/dev/null
 		fi
 		if [[ -n $slack_channel ]] && [[ -n $slack_auth ]]; then
 			notification "[$(date +'%Y-%m-%d %H:%M:%S')] Sending ${domain} data over Slack" info

From 607520862dbb1b9d0378d6169fdc28583a9f9e2c Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 15 Nov 2024 12:11:30 +0100
Subject: [PATCH 25/34] nuclei extra args added

---
 reconftw.cfg | 1 +
 reconftw.sh  | 4 ++--
 2 files changed, 3 insertions(+), 2 deletions(-)

diff --git a/reconftw.cfg b/reconftw.cfg
index 34b6e606..0e4402fd 100644
--- a/reconftw.cfg
+++ b/reconftw.cfg
@@ -99,6 +99,7 @@ WAF_DETECTION=true # Detect WAFs
 NUCLEICHECK=true # Enable or disable nuclei
 NUCLEI_TEMPLATES_PATH="$HOME/nuclei-templates" # Set nuclei templates path
 NUCLEI_SEVERITY="info,low,medium,high,critical" # Set templates criticity
+NUCLEI_EXTRA_ARGS="" # Additional nuclei extra flags, don't set the severity here but the exclusions like " -etags openssh"
 NUCLEI_FLAGS=" -silent -t ${NUCLEI_TEMPLATES_PATH}/ -retries 2" # Additional nuclei extra flags, don't set the severity here but the exclusions like " -etags openssh"
 NUCLEI_FLAGS_JS=" -silent -tags exposure,token -severity info,low,medium,high,critical" # Additional nuclei extra flags for js secrets
 URL_CHECK=true # Enable or disable URL collection
diff --git a/reconftw.sh b/reconftw.sh
index 8341cf98..13eb028e 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -3306,7 +3306,7 @@ function nuclei_check() {
 				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Nuclei Severity: $crit ${reset}\n\n"
 
 				# Run nuclei for each severity level
-				nuclei $NUCLEI_FLAGS -severity "$crit" -nh -rl "$NUCLEI_RATELIMIT" -o "nuclei_output/${crit}.txt" <.tmp/webs_nuclei.txt
+				nuclei $NUCLEI_FLAGS -severity "$crit" -nh -rl "$NUCLEI_RATELIMIT" "$NUCLEI_EXTRA_ARGS" -o "nuclei_output/${crit}.txt" <.tmp/webs_nuclei.txt
 			done
 			printf "\n\n"
 		else
@@ -3321,7 +3321,7 @@ function nuclei_check() {
 					axiom-scan .tmp/webs_nuclei.txt -m nuclei \
 						--nuclei-templates "$NUCLEI_TEMPLATES_PATH" \
 						-severity "$crit" -nh -rl "$NUCLEI_RATELIMIT" \
-						-o "nuclei_output/${crit}.txt" "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
+						"$NUCLEI_EXTRA_ARGS" -o "nuclei_output/${crit}.txt" "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
 
 					# Display the results if the output file exists and is not empty
 					if [[ -s "nuclei_output/${crit}.txt" ]]; then

From 96fbd2d91225c830e1764426ad531ca610876a7a Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 15 Nov 2024 12:27:20 +0100
Subject: [PATCH 26/34] zen mode added

---
 reconftw.sh | 62 +++++++++++++++++++++++++++++++++++++++++++++--------
 1 file changed, 53 insertions(+), 9 deletions(-)

diff --git a/reconftw.sh b/reconftw.sh
index 13eb028e..469a0430 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -6098,6 +6098,31 @@ function webs_menu() {
 	end
 }
 
+function zen_menu() {
+	if [[ $AXIOM == true ]]; then
+		axiom_launch
+		axiom_selected
+	fi
+	subdomains_full
+	webprobe_full
+	subtakeover
+	remove_big_files
+	s3buckets
+	screenshot
+	#	virtualhosts
+	cdnprovider
+	waf_checks
+	fuzz
+	iishortname
+	nuclei_check
+
+	if [[ $AXIOM == true ]]; then
+		axiom_shutdown
+	fi
+	cms_scanner
+	end
+}
+
 function help() {
 	printf "\n Usage: $0 [-d domain.tld] [-m name] [-l list.txt] [-x oos.txt] [-i in.txt] "
 	printf "\n           	      [-r] [-s] [-p] [-a] [-w] [-n] [-i] [-h] [-f] [--deep] [-o OUTPUT]\n\n"
@@ -6105,16 +6130,17 @@ function help() {
 	printf "   -d domain.tld     Target domain\n"
 	printf "   -m company        Target company name\n"
 	printf "   -l list.txt       Targets list (One on each line)\n"
-	printf "   -x oos.txt        Exclude subdomains list (Out Of Scope)\n"
-	printf "   -i in.txt         Include subdomains list\n"
+	printf "   -x oos.txt        Excludes subdomains list (Out Of Scope)\n"
+	printf "   -i in.txt         Includes subdomains list\n"
 	printf " \n"
 	printf " ${bblue}MODE OPTIONS${reset}\n"
-	printf "   -r, --recon       Recon - Perform full recon process (without attacks)\n"
-	printf "   -s, --subdomains  Subdomains - Perform Subdomain Enumeration, Web probing and check for sub-tko\n"
-	printf "   -p, --passive     Passive - Perform only passive steps\n"
-	printf "   -a, --all         All - Perform all checks and active exploitations\n"
-	printf "   -w, --web         Web - Perform web checks from list of subdomains\n"
-	printf "   -n, --osint       OSINT - Check for public intel data\n"
+	printf "   -r, --recon       Recon - Performs full recon process (without attacks)\n"
+	printf "   -s, --subdomains  Subdomains - Performs Subdomain Enumeration, Web probing and check for sub-tko\n"
+	printf "   -p, --passive     Passive - Performs only passive steps\n"
+	printf "   -a, --all         All - Performs all checks and active exploitations\n"
+	printf "   -w, --web         Web - Performs web checks from list of subdomains\n"
+	printf "   -n, --osint       OSINT - Checks for public intel data\n"
+	printf "   -z, --zen         Zen - Performs a recon process covering the basics and some vulns \n"
 	printf "   -c, --custom      Custom - Launches specific function against target, u need to know the function name first\n"
 	printf "   -h                Help - Show help section\n"
 	printf " \n"
@@ -6158,7 +6184,7 @@ if [[ $OSTYPE == "darwin"* ]]; then
 	PATH="/usr/local/opt/coreutils/libexec/gnubin:$PATH"
 fi
 
-PROGARGS=$(getopt -o 'd:m:l:x:i:o:f:q:c:rspanwvh::' --long 'domain:,list:,recon,subdomains,passive,all,web,osint,deep,help,vps' -n 'reconFTW' -- "$@")
+PROGARGS=$(getopt -o 'd:m:l:x:i:o:f:q:c:z:rspanwvh::' --long 'domain:,list:,recon,subdomains,passive,all,web,osint,zen,deep,help,vps' -n 'reconFTW' -- "$@")
 
 # Note the quotes around "$PROGARGS": they are essential!
 eval set -- "$PROGARGS"
@@ -6232,6 +6258,11 @@ while true; do
 		shift 2
 		continue
 		;;
+	'-z' | '--zen')
+		opt_mode='z'
+		shift
+		continue
+		;;
 	# extra stuff
 	'-o')
 		if [[ $2 != /* ]]; then
@@ -6468,6 +6499,19 @@ case $opt_mode in
 		end
 	fi
 	;;
+'z')
+	if [[ -n $list ]]; then
+		if [[ $AXIOM == true ]]; then
+			mode="zen_menu"
+		fi
+		sed -i 's/\r$//' $list
+		for domain in $(cat $list); do
+			zen_menu
+		done
+	else
+		zen_menu
+	fi
+	;;
 'c')
 	if [[ -n $multi ]]; then
 		if [[ $AXIOM == true ]]; then

From 94bf16c3bc0269bd024f660d86b123450d518f73 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 15 Nov 2024 12:31:25 +0100
Subject: [PATCH 27/34] init zen

---
 reconftw.sh | 1 +
 1 file changed, 1 insertion(+)

diff --git a/reconftw.sh b/reconftw.sh
index 469a0430..9aed0010 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -6099,6 +6099,7 @@ function webs_menu() {
 }
 
 function zen_menu() {
+	start
 	if [[ $AXIOM == true ]]; then
 		axiom_launch
 		axiom_selected

From cd0eac5e2cbf320e3901a37228f8071ed8fe517c Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Mon, 18 Nov 2024 11:32:25 +0100
Subject: [PATCH 28/34] urlfinder instead of waymore

---
 install.sh  |  4 ++--
 reconftw.sh | 11 ++---------
 2 files changed, 4 insertions(+), 11 deletions(-)

diff --git a/install.sh b/install.sh
index f5c945fb..bb708f35 100755
--- a/install.sh
+++ b/install.sh
@@ -98,6 +98,7 @@ declare -A gotools=(
 	["ppmap"]="go install -v github.com/kleiton0x00/ppmap@latest"
 	["sourcemapper"]="go install -v github.com/denandz/sourcemapper@latest"
 	["jsluice"]="go install -v github.com/BishopFox/jsluice/cmd/jsluice@latest"
+	["urlfinder"]="go install -v github.com/projectdiscovery/urlfinder/cmd/urlfinder@latest"
 )
 
 # Declare repositories and their paths
@@ -133,7 +134,6 @@ declare -A repos=(
 	["ffufPostprocessing"]="Damian89/ffufPostprocessing"
 	["misconfig-mapper"]="intigriti/misconfig-mapper"
 	["Spoofy"]="MattKeeley/Spoofy"
-	["Waymore"]="xnl-h4ck3r/waymore"
 	["xnLinkFinder"]="xnl-h4ck3r/xnLinkFinder"
 	["porch-pirate"]="MandConsultingGroup/porch-pirate"
 	["MetaFinder"]="Josue87/MetaFinder"
@@ -636,7 +636,7 @@ function initial_setup() {
 	"$SUDO" cp "${GOPATH}/bin/"* /usr/local/bin/ &>/dev/null || true
 
 	# Final reminders
-	echo -e "${yellow}Remember to set your API keys:\n- subfinder (${HOME}/.config/subfinder/provider-config.yaml)\n- GitHub (${HOME}/Tools/.github_tokens)\n- GitLab (${HOME}/Tools/.gitlab_tokens)\n- SSRF Server (COLLAB_SERVER in reconftw.cfg or env var)\n- Waymore (${HOME}/.config/waymore/config.yml)\n- Blind XSS Server (XSS_SERVER in reconftw.cfg or env var)\n- notify (${HOME}/.config/notify/provider-config.yaml)\n- WHOISXML API (WHOISXML_API in reconftw.cfg or env var)\n${reset}"
+	echo -e "${yellow}Remember to set your API keys:\n- subfinder (${HOME}/.config/subfinder/provider-config.yaml)\n- GitHub (${HOME}/Tools/.github_tokens)\n- GitLab (${HOME}/Tools/.gitlab_tokens)\n- SSRF Server (COLLAB_SERVER in reconftw.cfg or env var)\n- Blind XSS Server (XSS_SERVER in reconftw.cfg or env var)\n- notify (${HOME}/.config/notify/provider-config.yaml)\n- WHOISXML API (WHOISXML_API in reconftw.cfg or env var)\n${reset}"
 	echo -e "${bgreen}Finished!${reset}\n"
 	echo -e "${bgreen}#######################################################################${reset}"
 }
diff --git a/reconftw.sh b/reconftw.sh
index 9aed0010..0a1795c6 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -156,7 +156,7 @@ function tools_installed() {
 	declare -A tools_commands=(
 		["brutespray"]="brutespray"
 		["xnLinkFinder"]="xnLinkFinder"
-		["waymore"]="waymore"
+		["urlfinder"]="urlfinder"
 		["github-endpoints"]="github-endpoints"
 		["github-subdomains"]="github-subdomains"
 		["gitlab-subdomains"]="gitlab-subdomains"
@@ -3575,14 +3575,7 @@ function urlchecks() {
 		if [[ -s "webs/webs_all.txt" ]]; then
 			if [[ $AXIOM != true ]]; then
 				if [[ $URL_CHECK_PASSIVE == true ]]; then
-					if [[ $DEEP == true ]]; then
-						unfurl -u domains <webs/webs_all.txt >.tmp/waymore_input.txt
-						waymore -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null
-					else
-						unfurl -u domains <webs/webs_all.txt >.tmp/waymore_input.txt
-						waymore -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null
-					fi
-
+					urlfinder -d $domain -o .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null
 					if [[ -s $GITHUB_TOKENS ]]; then
 						github-endpoints -q -k -d "$domain" -t "$GITHUB_TOKENS" -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null
 						if [[ -s ".tmp/github-endpoints.txt" ]]; then

From b627019adbb38e6c8501677077f49f4cd7db95db Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 22 Nov 2024 10:10:53 +0100
Subject: [PATCH 29/34] removed verbose msgs

---
 reconftw.sh | 139 +++-------------------------------------------------
 1 file changed, 7 insertions(+), 132 deletions(-)

diff --git a/reconftw.sh b/reconftw.sh
index 0a1795c6..10fcce75 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -340,7 +340,6 @@ function github_repos() {
 					return 1
 				fi
 			else
-				printf "%b[!] No repository URLs found to clone.%b\n" "$yellow" "$reset"
 				end_func "Results are saved in $domain/osint/github_company_secrets.json" "${FUNCNAME[0]}"
 				return 1
 			fi
@@ -348,7 +347,6 @@ function github_repos() {
 			if [[ -d ".tmp/github_repos/" ]]; then
 				ls .tmp/github_repos >.tmp/github_repos_folders.txt
 			else
-				printf "%b[!] No repositories cloned.%b\n" "$yellow" "$reset"
 				end_func "Results are saved in $domain/osint/github_company_secrets.json" "${FUNCNAME[0]}"
 				return 1
 			fi
@@ -360,7 +358,6 @@ function github_repos() {
 					return 1
 				fi
 			else
-				printf "%b[!] No repository folders found for gitleaks.%b\n" "$yellow" "$reset"
 				end_func "Results are saved in $domain/osint/github_company_secrets.json" "${FUNCNAME[0]}"
 				return 1
 			fi
@@ -378,7 +375,6 @@ function github_repos() {
 					return 1
 				fi
 			else
-				printf "%b[!] No secrets found to compile.%b\n" "$yellow" "$reset"
 				end_func "Results are saved in $domain/osint/github_company_secrets.json" "${FUNCNAME[0]}"
 				return 1
 			fi
@@ -1036,9 +1032,6 @@ function sub_tls() {
 			grep "\.$domain$\|^$domain$" .tmp/subdomains_tlsx.txt |
 				grep -E '^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' |
 				sed "s/|__ //" | anew -q .tmp/subdomains_tlsx_clean.txt
-		else
-			printf "%b[!] No subdomains found in tlsx output.%b\n" "$yellow" "$reset"
-			return 0
 		fi
 
 		if [[ $AXIOM != true ]]; then
@@ -1052,9 +1045,6 @@ function sub_tls() {
 					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
 					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
 					2>>"$LOGFILE" >/dev/null
-			else
-				printf "%b[!] No subdomains to resolve.%b\n" "$yellow" "$reset"
-				return 0
 			fi
 		else
 			if ! resolvers_update_quick_axiom; then
@@ -1067,9 +1057,6 @@ function sub_tls() {
 					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
 					-o .tmp/subdomains_tlsx_resolved.txt $AXIOM_EXTRA_ARGS \
 					2>>"$LOGFILE" >/dev/null
-			else
-				printf "%b[!] No subdomains to resolve.%b\n" "$yellow" "$reset"
-				return 0
 			fi
 		fi
 
@@ -1288,9 +1275,6 @@ function sub_brute() {
 					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
 					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
 					2>>"$LOGFILE" >/dev/null
-			else
-				printf "%b[!] No subdomains found during bruteforce.%b\n" "$yellow" "$reset"
-				return 0
 			fi
 
 		else
@@ -1314,9 +1298,6 @@ function sub_brute() {
 					--resolvers-trusted /home/op/lists/resolvers_trusted.txt \
 					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
 					-o .tmp/subs_brute_valid.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
-			else
-				printf "%b[!] No subdomains found during bruteforce.%b\n" "$yellow" "$reset"
-				return 0
 			fi
 		fi
 
@@ -1549,8 +1530,6 @@ function sub_scraping() {
 			else
 				end_subfunc "Skipping Subdomains Web Scraping: Too Many Subdomains" "${FUNCNAME[0]}"
 			fi
-		else
-			end_subfunc "No subdomains to search (code scraping)" "${FUNCNAME[0]}"
 		fi
 
 	else
@@ -1911,9 +1890,6 @@ function sub_recursive_passive() {
 		# Passive recursive
 		if [[ -s "subdomains/subdomains.txt" ]]; then
 			dsieve -if subdomains/subdomains.txt -f 3 -top "$DEEP_RECURSIVE_PASSIVE" >.tmp/subdomains_recurs_top.txt
-		else
-			printf "%b[!] No subdomains to process.%b\n" "$yellow" "$reset"
-			return 1
 		fi
 
 		if [[ $AXIOM != true ]]; then
@@ -1926,14 +1902,12 @@ function sub_recursive_passive() {
 				subfinder -all -dL .tmp/subdomains_recurs_top.txt -max-time "${SUBFINDER_ENUM_TIMEOUT}" \
 					-silent -o .tmp/passive_recursive_tmp.txt 2>>"$LOGFILE"
 			else
-				printf "%b[!] No top subdomains to process.%b\n" "$yellow" "$reset"
 				return 1
 			fi
 
 			if [[ -s ".tmp/passive_recursive_tmp.txt" ]]; then
 				cat .tmp/passive_recursive_tmp.txt| anew -q .tmp/passive_recursive.txt
 			else
-				printf "%b[!] No passive recursive subdomains found.%b\n" "$yellow" "$reset"
 			fi
 
 			if [[ -s ".tmp/passive_recursive.txt" ]]; then
@@ -1941,8 +1915,6 @@ function sub_recursive_passive() {
 					-l "$PUREDNS_PUBLIC_LIMIT" --rate-limit-trusted "$PUREDNS_TRUSTED_LIMIT" \
 					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
 					2>>"$LOGFILE" >/dev/null
-			else
-				printf "%b[!] No subdomains to resolve.%b\n" "$yellow" "$reset"
 			fi
 
 		else
@@ -1954,14 +1926,11 @@ function sub_recursive_passive() {
 			if [[ -s ".tmp/subdomains_recurs_top.txt" ]]; then
 				axiom-scan .tmp/subdomains_recurs_top.txt -m subfinder -all -o .tmp/subfinder_prec.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
 			else
-				printf "%b[!] No top subdomains to process.%b\n" "$yellow" "$reset"
 				return 1
 			fi
 
 			if [[ -s ".tmp/subfinder_prec.txt" ]]; then
 				cat .tmp/subfinder_prec.txt | anew -q .tmp/passive_recursive.txt
-			else
-				printf "%b[!] No passive recursive subdomains found.%b\n" "$yellow" "$reset"
 			fi
 
 			if [[ -s ".tmp/passive_recursive.txt" ]]; then
@@ -1969,8 +1938,6 @@ function sub_recursive_passive() {
 					-r /home/op/lists/resolvers.txt --resolvers-trusted /home/op/lists/resolvers_trusted.txt \
 					--wildcard-tests "$PUREDNS_WILDCARDTEST_LIMIT" --wildcard-batch "$PUREDNS_WILDCARDBATCH_LIMIT" \
 					-o .tmp/passive_recurs_tmp.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
-			else
-				printf "%b[!] No subdomains to resolve.%b\n" "$yellow" "$reset"
 			fi
 		fi
 
@@ -2249,8 +2216,6 @@ function subtakeover() {
 				axiom-scan .tmp/webs_subs.txt -m nuclei --nuclei-templates "${NUCLEI_TEMPLATES_PATH}" \
 					-tags takeover -nh -severity info,low,medium,high,critical -retries 3 -rl "$NUCLEI_RATELIMIT" \
 					-t "${NUCLEI_TEMPLATES_PATH}" -o .tmp/tko.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-			else
-				printf "%b[!] No web subdomains to scan.%b\n" "$yellow" "$reset"
 			fi
 		fi
 
@@ -2261,8 +2226,6 @@ function subtakeover() {
 		if [[ -s ".tmp/subs_dns_tko.txt" ]]; then
 			cat .tmp/subs_dns_tko.txt 2>/dev/null | dnstake -c "$DNSTAKE_THREADS" -s 2>>"$LOGFILE" |
 				sed '/^$/d' | anew -q .tmp/tko.txt
-		else
-			printf "%b[!] No subdomains for DNS takeover scan.%b\n" "$yellow" "$reset"
 		fi
 
 		# Remove empty lines from tko.txt
@@ -2320,8 +2283,6 @@ function zonetransfer() {
 			if ! grep -q "Transfer failed" "subdomains/zonetransfer.txt"; then
 				notification "Zone transfer found on ${domain}!" "info"
 			fi
-		else
-			printf "%b[!] No zone transfer data collected.%b\n" "$yellow" "$reset"
 		fi
 
 		end_func "Results are saved in $domain/subdomains/zonetransfer.txt" "${FUNCNAME[0]}"
@@ -2368,8 +2329,6 @@ function s3buckets() {
 		if [[ $AXIOM != true ]]; then
 			if [[ -s "subdomains/subdomains.txt" ]]; then
 				s3scanner scan -f subdomains/subdomains.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt
-			else
-				printf "%b[!] No subdomains to scan with s3scanner.%b\n" "$yellow" "$reset"
 			fi
 		else
 			axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
@@ -2473,7 +2432,6 @@ function s3buckets() {
 			fi
 		else
 			NUMOFLINES1=0
-			printf "%b[!] No cloudhunter_open_buckets.txt found or it is empty.%b\n" "$yellow" "$reset"
 		fi
 
 		# Process s3buckets results
@@ -2509,8 +2467,6 @@ function s3buckets() {
 					trufflehog gcs --bucket="$bucket_name" -j 2>/dev/null | jq -c | anew -q subdomains/cloudhunter_buckets_trufflehog.txt
 				fi
 			done <subdomains/cloudhunter_open_buckets.txt
-		else
-			printf "%b[!] No cloudhunter_open_buckets.txt found or it is empty.%b\n" "$yellow" "$reset"
 		fi
 
 		end_func "Results are saved in subdomains/s3buckets.txt, subdomains/cloud_assets.txt, subdomains/s3buckets_trufflehog.txt, and subdomains/cloudhunter_buckets_trufflehog.txt" "${FUNCNAME[0]}"
@@ -2559,8 +2515,6 @@ function geo_info() {
 						grep -aEiv "^(127|10|169\.254|172\.1[6-9]|172\.2[0-9]|172\.3[0-1]|192\.168)\." |
 						grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" |
 						anew -q hosts/ips.txt
-				else
-					printf "%b[!] No valid IPs found in subs_ips_vhosts.txt.%b\n" "$yellow" "$reset"
 				fi
 			else
 				printf "%b\n" "$domain" |
@@ -2578,8 +2532,6 @@ function geo_info() {
 			while IFS= read -r ip; do
 				curl -s "https://ipinfo.io/widget/demo/$ip" >>"${dir}/hosts/ipinfo.txt"
 			done <"$ips_file"
-		else
-			printf "%b[!] No IPs to process in %s.%b\n" "$yellow" "$ips_file" "$reset"
 		fi
 
 		end_func "Results are saved in hosts/ipinfo.txt" "${FUNCNAME[0]}"
@@ -2752,8 +2704,6 @@ function webprobe_full() {
 			# Display new uncommon ports websites
 			if [[ -s "webs/webs_uncommon_ports.txt" ]]; then
 				cat "webs/webs_uncommon_ports.txt"
-			else
-				printf "%b[!] No new websites with uncommon ports found.%b\n" "$yellow" "$reset"
 			fi
 
 			# Update webs_all.txt
@@ -2838,8 +2788,6 @@ function screenshot() {
 			# Display new uncommon ports websites
 			if [[ -s "webs/webs_uncommon_ports.txt" ]]; then
 				cat "webs/webs_uncommon_ports.txt"
-			else
-				printf "%b[!] No new websites with uncommon ports found.%b\n" "$yellow" "$reset"
 			fi
 
 			# Update webs_all.txt
@@ -3022,15 +2970,11 @@ function portscan() {
 			if [[ -s ".tmp/subs_ips.txt" ]]; then
 				# Reorder fields and sort
 				awk '{ print $2 " " $1}' ".tmp/subs_ips.txt" | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt
-			else
-				printf "%b[!] No IPs found in subs_ips.txt.%b\n" "$yellow" "$reset"
 			fi
 
 			if [[ -s "hosts/subs_ips_vhosts.txt" ]]; then
 				# Extract IPs, filter out private ranges
 				awk '{print $1}' "hosts/subs_ips_vhosts.txt" | grep -aEiv "^(127|10|169\.254|172\.1[6-9]|172\.2[0-9]|172\.3[0-1]|192\.168)\." | grep -oE '\b([0-9]{1,3}\.){3}[0-9]{1,3}\b' | anew -q hosts/ips.txt
-			else
-				printf "%b[!] No data in subs_ips_vhosts.txt.%b\n" "$yellow" "$reset"
 			fi
 
 		else
@@ -3042,8 +2986,6 @@ function portscan() {
 		if [[ ! -s "hosts/cdn_providers.txt" ]]; then
 			if [[ -s "hosts/ips.txt" ]]; then
 				cdncheck -silent -resp -cdn -waf -nc <hosts/ips.txt 2>/dev/null >hosts/cdn_providers.txt
-			else
-				printf "%b[!] No IPs found in hosts/ips.txt.%b\n" "$yellow" "$reset"
 			fi
 		fi
 
@@ -3052,16 +2994,12 @@ function portscan() {
 			comm -23 <(sort -u hosts/ips.txt) <(cut -d'[' -f1 hosts/cdn_providers.txt | sed 's/[[:space:]]*$//' | sort -u) \
 				| grep -aEiv "^(127|10|169\.254|172\.1[6-9]|172\.2[0-9]|172\.3[0-1]|192\.168)\." | grep -oE '\b([0-9]{1,3}\.){3}[0-9]{1,3}\b' \
 				| sort -u | anew -q .tmp/ips_nocdn.txt
-		else
-			printf "%b[!] No IPs to process in hosts/ips.txt.%b\n" "$yellow" "$reset"
 		fi
 
 		# Display resolved IPs without CDN
 		printf "%b\n[%s] Resolved IP addresses (No CDN):%b\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
 		if [[ -s ".tmp/ips_nocdn.txt" ]]; then
 			sort ".tmp/ips_nocdn.txt"
-		else
-			printf "%b[!] No IPs found after CDN filtering.%b\n" "$yellow" "$reset"
 		fi
 
 		printf "%b\n[%s] Scanning ports...%b\n\n" "$bblue" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
@@ -3102,15 +3040,11 @@ function portscan() {
 			if [[ $AXIOM != true ]]; then
 				if [[ -s ".tmp/ips_nocdn.txt" ]]; then
 					"$SUDO" nmap $PORTSCAN_ACTIVE_OPTIONS -iL .tmp/ips_nocdn.txt -oA hosts/portscan_active 2>>"$LOGFILE" >/dev/null
-				else
-					printf "%b[!] No IPs to scan for active port scan.%b\n" "$yellow" "$reset"
 				fi
 			else
 				if [[ -s ".tmp/ips_nocdn.txt" ]]; then
 					axiom-scan .tmp/ips_nocdn.txt -m nmapx $PORTSCAN_ACTIVE_OPTIONS \
 					-oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
-				else
-					printf "%b[!] No IPs to scan for active port scan.%b\n" "$yellow" "$reset"
 				fi
 			fi
 		fi
@@ -3170,8 +3104,6 @@ function cdnprovider() {
 		if [[ -s ".tmp/ips_cdn.txt" ]]; then
 			# Run cdncheck on the IPs and save to cdn_providers.txt
 			cdncheck -silent -resp -nc <.tmp/ips_cdn.txt | anew -q "$dir/hosts/cdn_providers.txt"
-		else
-			printf "%b[!] No IPs found for CDN provider check.%b\n" "$yellow" "$reset"
 		fi
 
 		end_func "Results are saved in hosts/cdn_providers.txt" "${FUNCNAME[0]}"
@@ -3382,7 +3314,7 @@ function fuzz() {
 					pushd "${tools}/ffufPostprocessing" >/dev/null || {
 						echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
 					}
-					./ffufPostprocessing -result-file $dir/.tmp/fuzzing/${sub_out}.json -overwrite-result-file
+					./ffufPostprocessing -result-file $dir/.tmp/fuzzing/${sub_out}.json -overwrite-result-file 2>>"$LOGFILE" >/dev/null
 					popd >/dev/null || {
 						echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
 					}
@@ -3398,7 +3330,7 @@ function fuzz() {
 				pushd "${tools}/ffufPostprocessing" >/dev/null || {
 						echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
 					}
-					[ -s "$dir/.tmp/ffuf-content.json" ] && ./ffufPostprocessing -result-file $dir/.tmp/ffuf-content.json -overwrite-result-file
+					[ -s "$dir/.tmp/ffuf-content.json" ] && ./ffufPostprocessing -result-file $dir/.tmp/ffuf-content.json -overwrite-result-file 2>>"$LOGFILE" >/dev/null
 				popd >/dev/null || {
 					echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
 				}
@@ -3659,8 +3591,6 @@ function urlchecks() {
 					notification "Sending URLs to proxy" "info"
 					ffuf -mc all -w webs/url_extract.txt -u FUZZ -replay-proxy "$proxy_url" 2>>"$LOGFILE" >/dev/null
 				fi
-			else
-				printf "%b[!] No URLs extracted.%b\n" "$yellow" "$reset"
 			fi
 		fi
 	else
@@ -3771,7 +3701,6 @@ function url_ext() {
 
 			# Iterate over extensions and extract matching URLs
 			for t in "${ext[@]}"; do
-				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Processing extension: $t${reset}\n"
 
 				# Extract unique matching URLs
 				matches=$(grep -aEi "\.(${t})($|/|\?)" ".tmp/url_extract_tmp.txt" | sort -u | sed '/^$/d')
@@ -3837,21 +3766,17 @@ function jschecks() {
 				grep -Eiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" .tmp/subjslinks.txt |
 					anew -q js/nojs_links.txt
 				grep -iE "\.js($|\?)" .tmp/subjslinks.txt | anew -q .tmp/url_extract_js.txt
-			else
-				printf "%b[!] No subjslinks found.%b\n" "$yellow" "$reset"
 			fi
 
 			python3 "${tools}/urless/urless/urless.py" <.tmp/url_extract_js.txt |
 				anew -q js/url_extract_js.txt 2>>"$LOGFILE" >/dev/null
 
-			printf "%b[%s] Running: Resolving JS URLs 2/6%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			printf "%bRunning: Resolving JS URLs 2/6%b\n" "$yellow" "$reset"
 			if [[ $AXIOM != true ]]; then
 				if [[ -s "js/url_extract_js.txt" ]]; then
 					httpx -follow-redirects -random-agent -silent -timeout "$HTTPX_TIMEOUT" -threads "$HTTPX_THREADS" \
 						-rl "$HTTPX_RATELIMIT" -status-code -content-type -retries 2 -no-color <js/url_extract_js.txt |
 						grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
-				else
-					printf "%b[!] No JavaScript URLs to resolve.%b\n" "$yellow" "$reset"
 				fi
 			else
 				if [[ -s "js/url_extract_js.txt" ]]; then
@@ -3861,11 +3786,7 @@ function jschecks() {
 					if [[ -s ".tmp/js_livelinks.txt" ]]; then
 						cat .tmp/js_livelinks.txt | anew .tmp/web_full_info.txt |
 							grep "[200]" | grep "javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
-					else
-						printf "%b[!] No live JavaScript links found.%b\n" "$yellow" "$reset"
 					fi
-				else
-					printf "%b[!] No JavaScript URLs to resolve.%b\n" "$yellow" "$reset"
 				fi
 			fi
 
@@ -3877,8 +3798,6 @@ function jschecks() {
 				interlace -tL js/js_livelinks.txt -threads "$INTERLACE_THREADS" \
 					-c "sourcemapper -jsurl '_target_' -output _output_/_cleantarget_" \
 					-o .tmp/sourcemapper 2>>"$LOGFILE" >/dev/null
-			else
-				printf "%b[!] No live JavaScript links for sourcemapping.%b\n" "$yellow" "$reset"
 			fi
 
 			if [[ -s ".tmp/url_extract_jsmap.txt" ]]; then
@@ -3887,12 +3806,10 @@ function jschecks() {
 					-o .tmp/sourcemapper 2>>"$LOGFILE" >/dev/null
 			fi
 
-			printf "%b[%s] Running: Gathering endpoints 4/6%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			printf "%bRunning: Gathering endpoints 4/6%b\n" "$yellow" "$reset"
 			if [[ -s "js/js_livelinks.txt" ]]; then
 				xnLinkFinder -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d "$XNLINKFINDER_DEPTH" \
 					-o .tmp/js_endpoints.txt 2>>"$LOGFILE" >/dev/null
-			else
-				printf "%b[!] No live JavaScript links for endpoint extraction.%b\n" "$yellow" "$reset"
 			fi
 
 			find .tmp/sourcemapper/ \( -name "*.js" -o -name "*.ts" \) -type f |
@@ -3901,11 +3818,8 @@ function jschecks() {
 			if [[ -s ".tmp/js_endpoints.txt" ]]; then
 				sed -i '/^\//!d' .tmp/js_endpoints.txt
 				cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt
-			else
-				printf "%b[!] No JavaScript endpoints found.%b\n" "$yellow" "$reset"
 			fi
-
-			printf "%b[%s] Running: Gathering secrets 5/6%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			printf "%bRunning: Gathering secrets 5/6%b\n" "$yellow" "$reset"
 			if [[ -s "js/js_livelinks.txt" ]]; then
 				axiom-scan js/js_livelinks.txt -m mantra -ua "$HEADER" -s -o js/js_secrets.txt "$AXIOM_EXTRA_ARGS" &>/dev/null
 				if [[ -s "js/js_secrets.txt" ]]; then
@@ -3914,24 +3828,14 @@ function jschecks() {
 					trufflehog filesystem .tmp/sourcemapper/ -j 2>/dev/null |
 						jq -c | anew -q js/js_secrets_trufflehog.txt
 					sed -r "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2};?)?)?[mGK]//g" -i js/js_secrets.txt
-				else
-					printf "%b[!] No secrets found in JavaScript files.%b\n" "$yellow" "$reset"
 				fi
-			else
-				printf "%b[!] No live JavaScript links for secret gathering.%b\n" "$yellow" "$reset"
 			fi
-
-			printf "%b[%s] Running: Building wordlist 6/6%b\n" "$yellow" "$(date +'%Y-%m-%d %H:%M:%S')" "$reset"
+			printf "%bRunning: Building wordlist 6/6%b\n" "$yellow" "$reset"
 			if [[ -s "js/js_livelinks.txt" ]]; then
 				interlace -tL js/js_livelinks.txt -threads "$INTERLACE_THREADS" \
 					-c "python3 ${tools}/getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>>"$LOGFILE" >/dev/null
-			else
-				printf "%b[!] No live JavaScript links for wordlist building.%b\n" "$yellow" "$reset"
 			fi
-
 			end_func "Results are saved in $domain/js folder" "${FUNCNAME[0]}"
-		else
-			end_func "No JS URLs found for $domain, function skipped" "${FUNCNAME[0]}"
 		fi
 	else
 		if [[ $JSCHECKS == false ]]; then
@@ -3948,7 +3852,7 @@ function jschecks() {
 function wordlist_gen() {
 
 	# Create necessary directories
-	if ! mkdir -p .tmp webs gf; then
+	if ! mkdir -p .tmp webs; then
 		printf "%b[!] Failed to create directories.%b\n" "$bred" "$reset"
 		return 1
 	fi
@@ -3980,28 +3884,8 @@ function wordlist_gen() {
 			tr "[:punct:]" "\n" <".tmp/url_extract_tmp.txt" | anew -q "webs/dict_words.txt"
 		fi
 
-		# Process js_endpoints.txt if it exists and is not empty
-		if [[ -s ".tmp/js_endpoints.txt" ]]; then
-			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Processing js_endpoints.txt...${reset}\n"
-			unfurl -u format '%s://%d%p' ".tmp/js_endpoints.txt" 2>>"$LOGFILE" |
-				anew -q "webs/all_paths.txt"
-		fi
-
-		# Process url_extract_tmp.txt if it exists and is not empty
-		if [[ -s ".tmp/url_extract_tmp.txt" ]]; then
-			printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Processing url_extract_tmp.txt...${reset}\n"
-			unfurl -u format '%s://%d%p' ".tmp/url_extract_tmp.txt" 2>>"$LOGFILE" |
-				anew -q "webs/all_paths.txt"
-		fi
-
 		end_func "Results are saved in $domain/webs/dict_[words|paths].txt" "${FUNCNAME[0]}"
 
-		# Handle proxy if conditions are met
-		if [[ $PROXY == true ]] && [[ -n $proxy_url ]] && [[ "$(wc -l <webs/all_paths.txt)" -le $DEEP_LIMIT2 ]]; then
-			notification "Sending URLs to proxy" info
-			ffuf -mc all -w "webs/all_paths.txt" -u "FUZZ" -replay-proxy "$proxy_url" 2>>"$LOGFILE" >/dev/null
-		fi
-
 	else
 		# Handle cases where WORDLIST is false or function already processed
 		if [[ $WORDLIST == false ]]; then
@@ -4085,7 +3969,6 @@ function password_dict() {
 		word="${domain%%.*}"
 
 		# Run pydictor.py with specified parameters
-		printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: pydictor.py for Password Dictionary Generation${reset}\n\n"
 		python3 "${tools}/pydictor/pydictor.py" -extend "$word" --leet 0 1 2 11 21 --len "$PASSWORD_MIN_LENGTH" "$PASSWORD_MAX_LENGTH" -o "$dir/webs/password_dict.txt" 2>>"$LOGFILE" >/dev/null
 		end_func "Results are saved in $domain/webs/password_dict.txt" "${FUNCNAME[0]}"
 
@@ -4834,8 +4717,6 @@ function 4xxbypass() {
 			# Append unique bypassed URLs to the vulns directory
 			if [[ -s "$dir/.tmp/4xxbypass.txt" ]]; then
 				cat "$dir/.tmp/4xxbypass.txt" | anew -q "vulns/4xxbypass.txt"
-			else
-				printf "%b[!] No bypassed URLs found in 4xxbypass.txt.%b\n" "$bred" "$reset"
 			fi
 
 			end_func "Results are saved in vulns/4xxbypass.txt" "${FUNCNAME[0]}"
@@ -4960,8 +4841,6 @@ function smuggling() {
 			# Append unique smuggling results to vulns directory
 			if [[ -s "$dir/.tmp/smuggling.txt" ]]; then
 				cat "$dir/.tmp/smuggling.txt" | grep "EXPL" | anew -q "vulns/prototype_pollution.txt"
-			else
-				printf "%b[!] No smuggling results found in smuggling.txt.%b\n" "$bred" "$reset"
 			fi
 
 			end_func "Results are saved in vulns/smuggling_log.txt and findings in vulns/smuggling/" "${FUNCNAME[0]}"
@@ -5030,8 +4909,6 @@ function webcache() {
 			# Append unique findings to vulns/webcache.txt
 			if [[ -s "$dir/.tmp/webcache.txt" ]]; then
 				cat "$dir/.tmp/webcache.txt" | anew -q "vulns/webcache.txt"
-			else
-				printf "%b[!] No findings found in webcache.txt.%b\n" "$bred" "$reset"
 			fi
 
 			end_func "Results are saved in vulns/webcache.txt" "${FUNCNAME[0]}"
@@ -5106,8 +4983,6 @@ function fuzzparams() {
 			# Append unique results to vulns/fuzzparams.txt
 			if [[ -s ".tmp/fuzzparams.txt" ]]; then
 				cat ".tmp/fuzzparams.txt" | anew -q "vulns/fuzzparams.txt"
-			else
-				printf "%b[!] No results found in fuzzparams.txt.%b\n" "$bred" "$reset"
 			fi
 
 			end_func "Results are saved in vulns/fuzzparams.txt" "${FUNCNAME[0]}"

From 32c90d1b9231cd2ef0c7ec2cd79a7731fdf36989 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 22 Nov 2024 10:54:19 +0100
Subject: [PATCH 30/34] Fix mantra and cludhunter requirements

---
 reconftw.sh      | 47 +++++++++++++++++++----------------------------
 requirements.txt |  4 ++--
 2 files changed, 21 insertions(+), 30 deletions(-)

diff --git a/reconftw.sh b/reconftw.sh
index 10fcce75..c93ff179 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -3505,17 +3505,18 @@ function urlchecks() {
 		fi
 
 		if [[ -s "webs/webs_all.txt" ]]; then
-			if [[ $AXIOM != true ]]; then
-				if [[ $URL_CHECK_PASSIVE == true ]]; then
-					urlfinder -d $domain -o .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null
-					if [[ -s $GITHUB_TOKENS ]]; then
-						github-endpoints -q -k -d "$domain" -t "$GITHUB_TOKENS" -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null
-						if [[ -s ".tmp/github-endpoints.txt" ]]; then
-							cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt
-						fi
+
+			if [[ $URL_CHECK_PASSIVE == true ]]; then
+				urlfinder -d $domain -o .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null
+				if [[ -s $GITHUB_TOKENS ]]; then
+					github-endpoints -q -k -d "$domain" -t "$GITHUB_TOKENS" -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null
+					if [[ -s ".tmp/github-endpoints.txt" ]]; then
+						cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt
 					fi
 				fi
+			fi
 
+			if [[ $AXIOM != true ]]; then
 				diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt 2>>"$LOGFILE") <(sort -u webs/webs_all.txt 2>>"$LOGFILE") | wc -l)
 				if [[ $diff_webs != "0" ]] || [[ ! -s ".tmp/katana.txt" ]]; then
 					if [[ $URL_CHECK_ACTIVE == true ]]; then
@@ -3527,22 +3528,6 @@ function urlchecks() {
 					fi
 				fi
 			else
-				if [[ $URL_CHECK_PASSIVE == true ]]; then
-					if [[ $DEEP == true ]]; then
-						unfurl -u domains <webs/webs_all.txt >.tmp/waymore_input.txt
-						axiom-scan .tmp/waymore_input.txt -m waymore -o .tmp/url_extract_tmp.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
-					else
-						axiom-scan webs/webs_all.txt -m gau -o .tmp/url_extract_tmp.txt "$AXIOM_EXTRA_ARGS" 2>>"$LOGFILE" >/dev/null
-					fi
-
-					if [[ -s $GITHUB_TOKENS ]]; then
-						github-endpoints -q -k -d "$domain" -t "$GITHUB_TOKENS" -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null
-						if [[ -s ".tmp/github-endpoints.txt" ]]; then
-							cat .tmp/github-endpoints.txt | anew -q .tmp/url_extract_tmp.txt
-						fi
-					fi
-				fi
-
 				diff_webs=$(diff <(sort -u .tmp/probed_tmp.txt) <(sort -u webs/webs_all.txt) | wc -l)
 				if [[ $diff_webs != "0" ]] || [[ ! -s ".tmp/katana.txt" ]]; then
 					if [[ $URL_CHECK_ACTIVE == true ]]; then
@@ -3806,22 +3791,27 @@ function jschecks() {
 					-o .tmp/sourcemapper 2>>"$LOGFILE" >/dev/null
 			fi
 
+			find .tmp/sourcemapper/ \( -name "*.js" -o -name "*.ts" \) -type f |
+				jsluice urls | jq -r .url | anew -q .tmp/js_endpoints.txt
+
 			printf "%bRunning: Gathering endpoints 4/6%b\n" "$yellow" "$reset"
 			if [[ -s "js/js_livelinks.txt" ]]; then
 				xnLinkFinder -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d "$XNLINKFINDER_DEPTH" \
 					-o .tmp/js_endpoints.txt 2>>"$LOGFILE" >/dev/null
 			fi
 
-			find .tmp/sourcemapper/ \( -name "*.js" -o -name "*.ts" \) -type f |
-				jsluice urls | jq -r .url | anew -q .tmp/js_endpoints.txt
-
 			if [[ -s ".tmp/js_endpoints.txt" ]]; then
 				sed -i '/^\//!d' .tmp/js_endpoints.txt
 				cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt
 			fi
+
 			printf "%bRunning: Gathering secrets 5/6%b\n" "$yellow" "$reset"
 			if [[ -s "js/js_livelinks.txt" ]]; then
-				axiom-scan js/js_livelinks.txt -m mantra -ua "$HEADER" -s -o js/js_secrets.txt "$AXIOM_EXTRA_ARGS" &>/dev/null
+				if [[ $AXIOM != true ]]; then
+					cat js/js_livelinks.txt | mantra -ua "$HEADER" -s -o js/js_secrets.txt 2>>"$LOGFILE" >/dev/null
+				else
+					axiom-scan js/js_livelinks.txt -m mantra -ua "$HEADER" -s -o js/js_secrets.txt "$AXIOM_EXTRA_ARGS" &>/dev/null
+				fi
 				if [[ -s "js/js_secrets.txt" ]]; then
 					trufflehog filesystem js/js_secrets.txt -j 2>/dev/null |
 						jq -c | anew -q js/js_secrets_trufflehog.txt
@@ -3830,6 +3820,7 @@ function jschecks() {
 					sed -r "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2};?)?)?[mGK]//g" -i js/js_secrets.txt
 				fi
 			fi
+
 			printf "%bRunning: Building wordlist 6/6%b\n" "$yellow" "$reset"
 			if [[ -s "js/js_livelinks.txt" ]]; then
 				interlace -tL js/js_livelinks.txt -threads "$INTERLACE_THREADS" \
diff --git a/requirements.txt b/requirements.txt
index 093bb24c..e237b29b 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -11,7 +11,7 @@ colorclass              # dnsvalidator
 dank                    # regulator
 datetime                # JSA
 datrie                  # regulator
-dnspython               # ip2provider
+dnspython               # multiple
 emailfinder             # Tool
 editdistance            # regulator
 fake-useragent          # fav-up
@@ -34,8 +34,8 @@ tldextract              # dorks_hunter
 tqdm                    # multiple
 ujson                   # multiple
 urllib3                 # multiple
+xmltodict               # CloudHunter
 porch-pirate            # Tool
 p1radup                 # Tool
 jsbeautifier            # Tool
-git+https://github.com/xnl-h4ck3r/waymore.git                 # Tool
 git+https://github.com/xnl-h4ck3r/xnLinkFinder.git            # Tool

From 0a048f3aacd62d9ae132ebe96d4f127662cf46ea Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 22 Nov 2024 11:06:31 +0100
Subject: [PATCH 31/34] fix typo tlsx

---
 reconftw.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/reconftw.sh b/reconftw.sh
index c93ff179..a9da9601 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1011,7 +1011,7 @@ function sub_tls() {
 		if [[ $DEEP == true ]]; then
 			if [[ $AXIOM != true ]]; then
 				tlsx -san -cn -silent -ro -c "$TLSX_THREADS" \
-					-p "$TLS_PORTS" -q .tmp/subdomains_tlsx.txt <subdomains/subdomains.txt \
+					-p "$TLS_PORTS" -o .tmp/subdomains_tlsx.txt <subdomains/subdomains.txt \
 					2>>"$LOGFILE" >/dev/null
 			else
 				axiom-scan subdomains/subdomains.txt -m tlsx \

From 26c93014f33cca822d9b9f14bfcba6e291b5425c Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 22 Nov 2024 11:09:54 +0100
Subject: [PATCH 32/34] fix typo

---
 reconftw.sh | 1 -
 1 file changed, 1 deletion(-)

diff --git a/reconftw.sh b/reconftw.sh
index a9da9601..2b6c8ead 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1907,7 +1907,6 @@ function sub_recursive_passive() {
 
 			if [[ -s ".tmp/passive_recursive_tmp.txt" ]]; then
 				cat .tmp/passive_recursive_tmp.txt| anew -q .tmp/passive_recursive.txt
-			else
 			fi
 
 			if [[ -s ".tmp/passive_recursive.txt" ]]; then

From 695b29b4bb1f593870807a96e9ee0c720f676fe1 Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 22 Nov 2024 12:38:10 +0100
Subject: [PATCH 33/34] fix msgs

---
 reconftw.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/reconftw.sh b/reconftw.sh
index 2b6c8ead..86e2725d 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1602,7 +1602,7 @@ function sub_analytics() {
 			fi
 		fi
 
-		if ! NUMOFLINES=$(anew subdomains/subdomains.txt <.tmp/analytics_subs_resolved.txt 2>/dev/null | sed '/^$/d' | wc -l); then
+		if ! NUMOFLINES=$(anew subdomains/subdomains.txt 2>/dev/null <.tmp/analytics_subs_resolved.txt 2>/dev/null | sed '/^$/d' | wc -l); then
 			printf "%b[!] Failed to count new subdomains.%b\n" "$bred" "$reset"
 			NUMOFLINES=0
 		fi

From 221a04950d0f4ead455383b2c1be441e7ac73a4d Mon Sep 17 00:00:00 2001
From: six2dez <alexis.fernandez@visma.com>
Date: Fri, 22 Nov 2024 12:41:42 +0100
Subject: [PATCH 34/34] format options

---
 reconftw.sh | 104 ++++++++++++++++++++++++++--------------------------
 1 file changed, 51 insertions(+), 53 deletions(-)

diff --git a/reconftw.sh b/reconftw.sh
index 86e2725d..7458b874 100755
--- a/reconftw.sh
+++ b/reconftw.sh
@@ -1906,7 +1906,7 @@ function sub_recursive_passive() {
 			fi
 
 			if [[ -s ".tmp/passive_recursive_tmp.txt" ]]; then
-				cat .tmp/passive_recursive_tmp.txt| anew -q .tmp/passive_recursive.txt
+				cat .tmp/passive_recursive_tmp.txt | anew -q .tmp/passive_recursive.txt
 			fi
 
 			if [[ -s ".tmp/passive_recursive.txt" ]]; then
@@ -2742,7 +2742,7 @@ function screenshot() {
 
 		# Combine webs.txt and webs_uncommon_ports.txt into webs_all.txt if it doesn't exist
 		if [[ ! -s "webs/webs_all.txt" ]]; then
-			 cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
+			cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt
 		fi
 
 		# Run nuclei or axiom-scan based on AXIOM flag
@@ -2990,9 +2990,9 @@ function portscan() {
 
 		if [[ -s "hosts/ips.txt" ]]; then
 			# Remove CDN IPs
-			comm -23 <(sort -u hosts/ips.txt) <(cut -d'[' -f1 hosts/cdn_providers.txt | sed 's/[[:space:]]*$//' | sort -u) \
-				| grep -aEiv "^(127|10|169\.254|172\.1[6-9]|172\.2[0-9]|172\.3[0-1]|192\.168)\." | grep -oE '\b([0-9]{1,3}\.){3}[0-9]{1,3}\b' \
-				| sort -u | anew -q .tmp/ips_nocdn.txt
+			comm -23 <(sort -u hosts/ips.txt) <(cut -d'[' -f1 hosts/cdn_providers.txt | sed 's/[[:space:]]*$//' | sort -u) |
+				grep -aEiv "^(127|10|169\.254|172\.1[6-9]|172\.2[0-9]|172\.3[0-1]|192\.168)\." | grep -oE '\b([0-9]{1,3}\.){3}[0-9]{1,3}\b' |
+				sort -u | anew -q .tmp/ips_nocdn.txt
 		fi
 
 		# Display resolved IPs without CDN
@@ -3043,7 +3043,7 @@ function portscan() {
 			else
 				if [[ -s ".tmp/ips_nocdn.txt" ]]; then
 					axiom-scan .tmp/ips_nocdn.txt -m nmapx $PORTSCAN_ACTIVE_OPTIONS \
-					-oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
+						-oA hosts/portscan_active $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
 				fi
 			fi
 		fi
@@ -3327,9 +3327,9 @@ function fuzz() {
 				axiom-exec "wget -q -O - ${fuzzing_remote_list} > /home/op/lists/seclists/Discovery/Web-Content/big.txt" &>/dev/null
 				axiom-scan webs/webs_all.txt -m ffuf_base -H "${HEADER}" $FFUF_FLAGS -s -maxtime $FFUF_MAXTIME -o $dir/.tmp/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null
 				pushd "${tools}/ffufPostprocessing" >/dev/null || {
-						echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
-					}
-					[ -s "$dir/.tmp/ffuf-content.json" ] && ./ffufPostprocessing -result-file $dir/.tmp/ffuf-content.json -overwrite-result-file 2>>"$LOGFILE" >/dev/null
+					echo "Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO}"
+				}
+				[ -s "$dir/.tmp/ffuf-content.json" ] && ./ffufPostprocessing -result-file $dir/.tmp/ffuf-content.json -overwrite-result-file 2>>"$LOGFILE" >/dev/null
 				popd >/dev/null || {
 					echo "Failed to popd in ${FUNCNAME[0]} @ line ${LINENO}"
 				}
@@ -4105,8 +4105,8 @@ function xss() {
 			# Run Dalfox with Katana output
 			if [[ -s ".tmp/xss_reflected.txt" ]]; then
 				printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: Dalfox with Katana${reset}\n\n"
-				dalfox pipe --silence --no-color --no-spinner --only-poc r --ignore-return 302,404,403 --skip-bav $OPTIONS -d "$DEPTH" <".tmp/xss_reflected.txt" 2>>"$LOGFILE" \
-				| anew -q "vulns/xss.txt"
+				dalfox pipe --silence --no-color --no-spinner --only-poc r --ignore-return 302,404,403 --skip-bav $OPTIONS -d "$DEPTH" <".tmp/xss_reflected.txt" 2>>"$LOGFILE" |
+					anew -q "vulns/xss.txt"
 			fi
 		else
 			# Using Axiom
@@ -4253,7 +4253,6 @@ function ssrf_checks() {
 		# Handle COLLAB_SERVER configuration
 		if [[ -z $COLLAB_SERVER ]]; then
 			interactsh-client &>.tmp/ssrf_callback.txt &
-			INTERACTSH_PID=$!
 			sleep 2
 
 			# Extract FFUFHASH from interactsh_callback.txt
@@ -4502,7 +4501,7 @@ function sqli() {
 				if [[ $SQLMAP == true ]]; then
 					printf "${yellow}\n[$(date +'%Y-%m-%d %H:%M:%S')] Running: SQLMap for SQLi Checks${reset}\n\n"
 					python3 "${tools}/sqlmap/sqlmap.py" -m ".tmp/tmp_sqli.txt" -b -o --smart \
-					--batch --disable-coloring --random-agent --output-dir="vulns/sqlmap" 2>>"$LOGFILE" >/dev/null
+						--batch --disable-coloring --random-agent --output-dir="vulns/sqlmap" 2>>"$LOGFILE" >/dev/null
 				fi
 
 				# Check if GHAURI is enabled and run Ghauri
@@ -5061,45 +5060,44 @@ function remove_big_files() {
 }
 
 function notification() {
-    if [[ -n $1 ]] && [[ -n $2 ]]; then
-        if [[ $NOTIFICATION == true ]]; then
-            NOTIFY="notify -silent"
-        else
-            NOTIFY=""
-        fi
-        if [[ -z $3 ]]; then
-            current_date=$(date +'%Y-%m-%d %H:%M:%S')
-        else
-            current_date="$3"
-        fi
-
-        case $2 in
-        info)
-            text="\n${bblue}[$current_date] ${1} ${reset}"
-            ;;
-        warn)
-            text="\n${yellow}[$current_date] ${1} ${reset}"
-            ;;
-        error)
-            text="\n${bred}[$current_date] ${1} ${reset}"
-            ;;
-        good)
-            text="\n${bgreen}[$current_date] ${1} ${reset}"
-            ;;
-        esac
-
-        # Print to terminal
-        printf "${text}\n"
-
-        # Send to notify if notifications are enabled
-        if [[ -n $NOTIFY ]]; then
-            # Remove color codes for the notification
-            clean_text=$(echo -e "${text} - ${domain}" | sed 's/\x1B\[[0-9;]*[JKmsu]//g')
-            echo -e "${clean_text}" | $NOTIFY >/dev/null 2>&1
-        fi
-    fi
-}
+	if [[ -n $1 ]] && [[ -n $2 ]]; then
+		if [[ $NOTIFICATION == true ]]; then
+			NOTIFY="notify -silent"
+		else
+			NOTIFY=""
+		fi
+		if [[ -z $3 ]]; then
+			current_date=$(date +'%Y-%m-%d %H:%M:%S')
+		else
+			current_date="$3"
+		fi
 
+		case $2 in
+		info)
+			text="\n${bblue}[$current_date] ${1} ${reset}"
+			;;
+		warn)
+			text="\n${yellow}[$current_date] ${1} ${reset}"
+			;;
+		error)
+			text="\n${bred}[$current_date] ${1} ${reset}"
+			;;
+		good)
+			text="\n${bgreen}[$current_date] ${1} ${reset}"
+			;;
+		esac
+
+		# Print to terminal
+		printf "${text}\n"
+
+		# Send to notify if notifications are enabled
+		if [[ -n $NOTIFY ]]; then
+			# Remove color codes for the notification
+			clean_text=$(echo -e "${text} - ${domain}" | sed 's/\x1B\[[0-9;]*[JKmsu]//g')
+			echo -e "${clean_text}" | $NOTIFY >/dev/null 2>&1
+		fi
+	fi
+}
 
 function transfer {
 	if [[ $# -eq 0 ]]; then
@@ -5322,7 +5320,7 @@ function start() {
 	global_start=$(date +%s)
 
 	printf "\n${bgreen}#######################################################################${reset}"
-	notification "Recon succesfully started on ${domain}" good $(date +'%Y-%m-%d %H:%M:%S')
+	notification "Recon succesfully started on ${domain}" "good" "$(date +'%Y-%m-%d %H:%M:%S')"
 	[ "$SOFT_NOTIFICATION" = true ] && echo "$(date +'%Y-%m-%d %H:%M:%S') Recon succesfully started on ${domain}" | notify -silent
 	printf "${bgreen}#######################################################################${reset}\n"
 	if [[ $upgrade_before_running == true ]]; then
@@ -5414,7 +5412,7 @@ function end() {
 	global_end=$(date +%s)
 	getElapsedTime $global_start $global_end
 	printf "${bgreen}#######################################################################${reset}\n"
-	notification "Finished Recon on: ${domain} under ${finaldir} in: ${runtime}" good $(date +'%Y-%m-%d %H:%M:%S')
+	notification "Finished Recon on: ${domain} under ${finaldir} in: ${runtime}" good "$(date +'%Y-%m-%d %H:%M:%S')"
 	[ "$SOFT_NOTIFICATION" = true ] && echo "[$(date +'%Y-%m-%d %H:%M:%S')] Finished Recon on: ${domain} under ${finaldir} in: ${runtime}" | notify -silent
 	printf "${bgreen}#######################################################################${reset}\n"
 	#Separator for more clear messges in telegram_Bot