diff --git a/reconftw.sh b/reconftw.sh index f1862db6..38fabf7d 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -691,13 +691,10 @@ function s3buckets(){ if { [ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ] || [ "$DIFF" = true ]; } && [ "$S3BUCKETS" = true ] && ! [[ $domain =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9] ]]; then start_func ${FUNCNAME[0]} "AWS S3 buckets search" # S3Scanner - [ ! -s ".tmp/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q .tmp/webs_all.txt if [ ! "$AXIOM" = true ]; then [ -s "subdomains/subdomains.txt" ] && s3scanner scan -f subdomains/subdomains.txt | anew -q .tmp/s3buckets.txt - [ -s ".tmp/webs_all.txt" ] && s3scanner scan -f .tmp/webs_all.txt | anew -q .tmp/s3buckets.txt else axiom-scan subdomains/subdomains.txt -m s3scanner -o .tmp/s3buckets_tmp.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null - axiom-scan .tmp/webs_all.txt -m s3scanner -o .tmp/s3buckets_tmp2.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null [ -s ".tmp/s3buckets_tmp.txt" ] && cat .tmp/s3buckets_tmp.txt .tmp/s3buckets_tmp2.txt 2>>"$LOGFILE" | anew -q .tmp/s3buckets.txt && sed -i '/^$/d' .tmp/s3buckets.txt fi # Cloudenum @@ -708,7 +705,7 @@ function s3buckets(){ if [ "$NUMOFLINES1" -gt 0 ]; then notification "${NUMOFLINES1} new cloud assets found" info fi - NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -iv "not_exist" | grep -iv "Warning:" | grep -iv "invalid_name" | grep -iv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l) + NUMOFLINES2=$(cat .tmp/s3buckets.txt 2>>"$LOGFILE" | grep -aiv "not_exist" | grep -aiv "Warning:" | grep -aiv "invalid_name" | grep -aiv "^http" | awk 'NF' | anew subdomains/s3buckets.txt | sed '/^$/d' | wc -l) if [ "$NUMOFLINES2" -gt 0 ]; then notification "${NUMOFLINES2} new S3 buckets found" info fi @@ -1059,7 +1056,7 @@ function fuzz(){ else axiom-exec 'wget -O /home/op/lists/fuzz_wordlist.txt https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt' &>/dev/null axiom-scan .tmp/webs_all.txt -m ffuf -w /home/op/lists/fuzz_wordlist.txt -H \"${HEADER}\" $FFUF_FLAGS -maxtime $FFUF_MAXTIME -of json -o $dir/.tmp/fuzzing/ffuf-content.json $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" &>/dev/null - [ -s "$dir/.tmp/fuzzing/ffuf-content.json" ] && $dir/.tmp/fuzzing/ffuf-content.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort | $dir/.tmp/fuzzing/ffuf-content.tmp + [ -s "$dir/.tmp/fuzzing/ffuf-content.json" ] && cat $dir/.tmp/fuzzing/ffuf-content.json | jq -r 'try .results[] | "\(.status) \(.length) \(.url)"' | sort | $dir/.tmp/fuzzing/ffuf-content.tmp for sub in $(cat .tmp/webs_all.txt); do sub_out=$(echo $sub | sed -e 's|^[^/]*//||' -e 's|/.*$||') grep "$sub" $dir/.tmp/fuzzing/ffuf-content.tmp | anew -q $dir/fuzzing/${sub_out}.txt