Skip to content

Commit

Permalink
Merge pull request #633 from six2dez/dev
Browse files Browse the repository at this point in the history
Fix params
  • Loading branch information
six2dez authored Jan 8, 2023
2 parents 14645aa + dadc0d8 commit 1cf1736
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions reconftw.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ SUBSCRAPING=true # Subdomains extraction from web crawling
SUBPERMUTE=true # DNS permutations
SUBREGEXPERMUTE=true # Permutations by regex analysis
PERMUTATIONS_OPTION=gotator # The alternative is "ripgen" (faster, not deeper)
GOTATOR_FLAGS="-depth 1 -numbers 3 -mindup -adv -md" # Flags for gotator
GOTATOR_FLAGS=" -depth 1 -numbers 3 -mindup -adv -md" # Flags for gotator
SUBTAKEOVER=false # Check subdomain takeovers, false by default cuz nuclei already check this
SUB_RECURSIVE_PASSIVE=false # Uses a lot of API keys queries
DEEP_RECURSIVE_PASSIVE=10 # Number of top subdomains for recursion
Expand Down Expand Up @@ -95,8 +95,8 @@ CDN_IP=true # Check which IPs belongs to CDN
WAF_DETECTION=true # Detect WAFs
NUCLEICHECK=true # Enable or disable nuclei
NUCLEI_SEVERITY="info,low,medium,high,critical" # Set templates criticity
NUCLEI_FLAGS="-silent -t ~/nuclei-templates/ -retries 2" # Additional nuclei extra flags, don't set the severity here but the exclusions like "-etags openssh"
NUCLEI_FLAGS_JS="-silent -tags exposure,token -severity info,low,medium,high,critical" # Additional nuclei extra flags for js secrets
NUCLEI_FLAGS=" -silent -t ~/nuclei-templates/ -retries 2" # Additional nuclei extra flags, don't set the severity here but the exclusions like " -etags openssh"
NUCLEI_FLAGS_JS=" -silent -tags exposure,token -severity info,low,medium,high,critical" # Additional nuclei extra flags for js secrets
URL_CHECK=true # Enable or disable URL collection
URL_CHECK_PASSIVE=true # Search for urls, passive methods from Archive, OTX, CommonCrawl, etc
URL_CHECK_ACTIVE=true # Search for urls by crawling the websites
Expand Down Expand Up @@ -141,8 +141,8 @@ REMOVELOG=false # Delete logs after execution
PROXY=false # Send to proxy the websites found
SENDZIPNOTIFY=false # Send to zip the results (over notify)
PRESERVE=true # set to true to avoid deleting the .called_fn files on really large scans
FFUF_FLAGS="-mc all -fc 404 -ac -sf" # Ffuf flags
HTTPX_FLAGS="-follow-redirects -random-agent -status-code -silent -title -web-server -tech-detect -location" # Httpx flags for simple web probing
FFUF_FLAGS=" -mc all -fc 404 -ac -sf" # Ffuf flags
HTTPX_FLAGS=" -follow-redirects -random-agent -status-code -silent -title -web-server -tech-detect -location" # Httpx flags for simple web probing

# HTTP options
HEADER="User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" # Default header
Expand Down Expand Up @@ -204,7 +204,7 @@ AXIOM_FLEET_SHUTDOWN=true # # Enable or disable delete the fleet after the execu
# This is a script on your reconftw host that might prep things your way...
#AXIOM_POST_START="~/Tools/axiom_config.sh" # Useful to send your config files to the fleet
AXIOM_EXTRA_ARGS="" # Leave empty if you don't want to add extra arguments
#AXIOM_EXTRA_ARGS="--rm-logs" # Example
#AXIOM_EXTRA_ARGS=" --rm-logs" # Example

# BBRF
BBRF_CONNECTION=false
Expand Down

0 comments on commit 1cf1736

Please sign in to comment.