-
-
Notifications
You must be signed in to change notification settings - Fork 1.2k
Expand file tree
/
Copy pathreconftw.cfg
More file actions
460 lines (424 loc) · 23.8 KB
/
reconftw.cfg
File metadata and controls
460 lines (424 loc) · 23.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
#############################################
# reconFTW config file #
#############################################
# General values
tools=$HOME/Tools # Path installed tools
if [[ -z "${SCRIPTPATH:-}" ]]; then
if [[ -n "${BASH_SOURCE[0]:-}" ]]; then
SCRIPTPATH="$( cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 ; pwd -P )" # Get current script's path
else
SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" # Get current script's path
fi
fi
_detected_shell="${SHELL:-/bin/bash}"
profile_shell=".$(basename "${_detected_shell}")rc" # Get current shell profile
if git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
reconftw_version="$(git rev-parse --abbrev-ref HEAD)-$(git describe --tags 2>/dev/null || git rev-parse --short HEAD)"
else
reconftw_version="standalone"
fi # Fetch current reconftw version
DATA_DIR="${SCRIPTPATH}/data"
WORDLISTS_DIR="${DATA_DIR}/wordlists"
PATTERNS_DIR="${DATA_DIR}/patterns"
generate_resolvers=false # Generate custom resolvers with dnsvalidator
update_resolvers=true # Fetch and rewrite resolvers from trickest/resolvers before DNS resolution
resolvers_url="https://raw.githubusercontent.com/trickest/resolvers/main/resolvers.txt"
resolvers_trusted_url="https://gist.githubusercontent.com/six2dez/ae9ed7e5c786461868abd3f2344401b6/raw/trusted_resolvers.txt"
RESOLVER_DOWNLOAD_CONNECT_TIMEOUT=10 # Seconds to wait for resolver download TCP connection
RESOLVER_DOWNLOAD_MAX_TIME=120 # Hard cap in seconds for resolver downloads
RESOLVER_DOWNLOAD_RETRY=2 # Retry count for resolver downloads
RESOLVER_DOWNLOAD_RETRY_DELAY=2 # Delay in seconds between resolver download retries
fuzzing_remote_list="https://raw.githubusercontent.com/six2dez/OneListForAll/main/onelistforallmicro.txt" # Used to send to axiom(if used) on fuzzing
proxy_url="http://127.0.0.1:8080/" # Proxy url
install_golang=true # Set it to false if you already have Golang configured and ready
upgrade_tools=true
upgrade_before_running=false # Upgrade tools before running
#dir_output=/custom/output/path
SHOW_COMMANDS=false # Set true to log every executed command to the per-target log (verbose; may include sensitive data)
MIN_DISK_SPACE_GB=2 # Minimum required disk space in GB before starting reconnaissance (0 to disable check)
# Incremental mode configuration
INCREMENTAL_MODE=false # Only scan new findings since last run (use --incremental flag to enable)
MONITOR_MODE=false # Continuous monitor mode (enabled by --monitor)
MONITOR_INTERVAL_MIN=60 # Minutes between monitoring cycles
MONITOR_MAX_CYCLES=0 # 0 = run forever until interrupted
MONITOR_MIN_SEVERITY=high # Monitor nuclei deltas at/above: critical|high|medium|low|info
ALERT_SUPPRESSION=true # Suppress repeated monitor alerts using fingerprint history
ALERT_SEEN_FILE=".incremental/alerts_seen.hashes" # Fingerprint store for alert suppression
# Adaptive rate limiting configuration
ADAPTIVE_RATE_LIMIT=false # Automatically adjust rate limits when encountering 429/503 errors (use --adaptive-rate flag to enable)
MIN_RATE_LIMIT=10 # Minimum rate limit (requests per second)
MAX_RATE_LIMIT=500 # Maximum rate limit (requests per second)
RATE_LIMIT_BACKOFF_FACTOR=0.5 # Multiply rate by this when errors occur (0.5 = half speed)
RATE_LIMIT_INCREASE_FACTOR=1.2 # Multiply rate by this on success (1.2 = 20% faster)
# Cache configuration
CACHE_MAX_AGE_DAYS=30 # Maximum age in days for cached wordlists/resolvers (30 = 1 month)
CACHE_MAX_AGE_DAYS_RESOLVERS=7 # Resolver cache TTL
CACHE_MAX_AGE_DAYS_WORDLISTS=30 # Wordlist cache TTL
CACHE_MAX_AGE_DAYS_TOOLS=14 # Tool metadata cache TTL
CACHE_REFRESH=false # Force-refresh cached resources (can be enabled with --refresh-cache)
# Log rotation
MAX_LOG_FILES=10 # Maximum number of log files to keep per target
MAX_LOG_AGE_DAYS=30 # Delete log files older than this many days
# Structured logging configuration (JSON format)
STRUCTURED_LOGGING=false # Enable JSON structured logging for advanced log analysis
# Golang Vars (Comment or change on your own)
export GOROOT="${GOROOT:-/usr/local/go}"
export GOPATH="${GOPATH:-$HOME/go}"
case ":${PATH}:" in
*":$GOPATH/bin:"*) ;;
*) PATH="$GOPATH/bin:$PATH" ;;
esac
case ":${PATH}:" in
*":$GOROOT/bin:"*) ;;
*) PATH="$GOROOT/bin:$PATH" ;;
esac
case ":${PATH}:" in
*":$HOME/.local/bin:"*) ;;
*) PATH="$HOME/.local/bin:$PATH" ;;
esac
export PATH
# Rust Vars (Comment or change on your own)
export PATH="$HOME/.cargo/bin:$PATH"
# Tools config files
#NOTIFY_CONFIG=~/.config/notify/provider-config.yaml # No need to define
GITHUB_TOKENS=${tools}/.github_tokens
GITLAB_TOKENS=${tools}/.gitlab_tokens
#CUSTOM_CONFIG=custom_config_path.txt # In case you use a custom config file, uncomment this line and set your files path
# APIs/TOKENS - Set via environment variables (preferred) or uncomment and edit below.
# Environment variables take precedence if set.
SHODAN_API_KEY="${SHODAN_API_KEY:-}"
WHOISXML_API="${WHOISXML_API:-}"
PDCP_API_KEY="${PDCP_API_KEY:-}"
XSS_SERVER="${XSS_SERVER:-}"
COLLAB_SERVER="${COLLAB_SERVER:-}"
slack_channel="${slack_channel:-}"
slack_auth="${slack_auth:-}"
# For additional secrets, create a secrets.cfg file (gitignored) and it will be auto-sourced
# File descriptors
DEBUG_STD="&>/dev/null" # Skips STD output on installer
DEBUG_ERROR="2>/dev/null" # Skips ERR output on installer
# Osint
OSINT=true # Enable or disable the whole OSINT module
GOOGLE_DORKS=true
GITHUB_DORKS=true
GITHUB_REPOS=true
METADATA=true # Fetch metadata from indexed office documents
EMAILS=true # Fetch emails from differents sites
DOMAIN_INFO=true # whois info
IP_INFO=true # Reverse IP search, geolocation and whois
API_LEAKS=true # Check for API leaks
API_LEAKS_POSTLEAKS=true # Enhance API leaks with postleaksNg
POSTLEAKS_THREADS=10 # Threads for postleaksNg
POSTLEAKS_INCLUDE="" # Optional include filter for postleaksNg URL results
POSTLEAKS_EXCLUDE="" # Optional exclude filter for postleaksNg URL results
THIRD_PARTIES=true # Check for 3rd parties misconfigs
SPOOF=true # Check spoofable domains
MAIL_HYGIENE=true # Check DMARC/SPF records
CLOUD_ENUM=true # Enumerate cloud storage across providers with cloud_enum
GITHUB_LEAKS=true # Search for leaked secrets across all of GitHub with ghleaks
GHLEAKS_THREADS=5 # Concurrent download threads for ghleaks
SECRETS_ENGINE="hybrid" # gitleaks|titus|noseyparker|hybrid
SECRETS_SCAN_GIT_HISTORY=true # Include git history scans when supported by selected secrets engine
SECRETS_VALIDATE=false # Validate secrets against provider APIs when supported (currently titus)
GITHUB_ACTIONS_AUDIT=true # Audit GitHub Actions artifacts/workflows with gato
GATO_INCLUDE_ALL_ARTIFACT_SECRETS=true # Include noisy artifact secret matches in gato results
# Subdomains
SUBDOMAINS_GENERAL=true # Enable or disable the whole Subdomains module
SUBPASSIVE=true # Passive subdomains search
SUBCRT=true # crtsh search
CTR_LIMIT=999999 # Limit the number of results
ASN_ENUM=true # ASN enumeration with asnmap for CIDR discovery
DNS_TIME_FENCE_DAYS=0 # Filter crt.sh results to last N days (0 = disabled, recommended: 90)
DEEP_WILDCARD_FILTER=false # Enable deep wildcard detection to filter wildcards at all subdomain levels
EXCLUDE_SENSITIVE=false # Exclude sensitive domains (gov, mil, edu, banks) - see config/sensitive_domains.txt
SUBNOERROR=false # Check DNS NOERROR response and BF on them
SUBANALYTICS=true # Google Analytics search
SUBBRUTE=true # DNS bruteforcing
SUBSCRAPING=true # Subdomains extraction from web crawling
SUBPERMUTE=true # DNS permutations
SUBIAPERMUTE=true # Permutations by AI analysis
SUBREGEXPERMUTE=true # Permutations by regex analysis
GOTATOR_FLAGS=" -depth 1 -numbers 3 -mindup -adv -md" # Flags for gotator
PERMUTATIONS_WORDLIST_MODE=auto # auto|full|short (auto: short if subs > threshold, full if DEEP)
PERMUTATIONS_SHORT_THRESHOLD=100 # Use short wordlist when subdomain count exceeds this
SUBTAKEOVER=true # Check subdomain takeovers, false by default cuz nuclei already check this
SUB_RECURSIVE_PASSIVE=false # Uses a lot of API keys queries
DEEP_RECURSIVE_PASSIVE=10 # Number of top subdomains for recursion
SUB_RECURSIVE_BRUTE=false # Needs big disk space and time to resolve
ZONETRANSFER=true # Check zone transfer
S3BUCKETS=true # Check S3 buckets misconfigs
REVERSE_IP=false # Check reverse IP subdomain search (set True if your target is CIDR/IP)
TLS_PORTS=$(cat "${SCRIPTPATH}/config/tls_ports.txt" 2>/dev/null | tr -d '\n') # TLS ports for certificate grabbing
PTR_SWEEP=false # Reverse PTR sweep over ASN CIDRs (can be slow on large ranges)
PTR_SWEEP_MAX_IPS=50000 # Safety limit: maximum IPs to expand from CIDRs for PTR sweep
SRV_ENUM=true # SRV record enumeration for service discovery (~25 DNS queries)
NS_DELEGATION=true # Check NS delegation and attempt AXFR on delegated subdomains
INSCOPE=false # Uses inscope tool to filter the scope, requires .scope file in reconftw folder
# Web detection
WEBPROBEFULL=true # Unified web probing over configured ports
WEBSCREENSHOT=true # Webs screenshooting
VIRTUALHOSTS=false # Check virtualhosts by fuzzing HOST header
TLS_IP_PIVOTS=false # Harvest TLS certificates from discovered IPs for new subdomains
TLS_IP_SNI_BATCH_SIZE=1000 # Max IPs to SNI-probe in a single batch
TLS_IP_DELTA_PROBE=true # Run targeted httpx on newly discovered subs from TLS pivoting
UNCOMMON_PORTS_WEB=$(cat "${SCRIPTPATH}/config/uncommon_ports_web.txt" 2>/dev/null | tr -d '\n') # Uncommon web ports for full probing
WEBPROBE_PORTS="80,443,${UNCOMMON_PORTS_WEB}" # Ports used by webprobe_full
# Host
FAVIRECON=true # Favicon-based technology recon over discovered web targets
FAVIRECON_CONCURRENCY=50 # favirecon concurrency
FAVIRECON_TIMEOUT=10 # favirecon timeout seconds
FAVIRECON_RATE_LIMIT=0 # favirecon rate limit per second (0 = unlimited)
FAVIRECON_PROXY="" # Optional proxy URL for favirecon
PORTSCANNER=true # Enable or disable the whole Port scanner module
GEO_INFO=true # Fetch Geolocalization info
PORTSCAN_PASSIVE=true # Port scanner with Shodan
PORTSCAN_ACTIVE=true # Port scanner with nmap
PORTSCAN_ACTIVE_OPTIONS="--top-ports 200 -sV -n -Pn --open --max-retries 2"
PORTSCAN_DEEP_OPTIONS="--top-ports 1000 -sV -n -Pn --open --max-retries 2 --script vulners"
PORTSCAN_STRATEGY=legacy # legacy|naabu_nmap
NAABU_ENABLE=true # Use naabu pre-discovery when PORTSCAN_STRATEGY=naabu_nmap
NAABU_RATE=1000 # naabu requests per second
NAABU_PORTS="--top-ports 1000" # e.g. "--top-ports 1000" or "-p -"
SERVICE_FINGERPRINT=true # Fingerprint exposed services with nerva after active scan
SERVICE_FINGERPRINT_ENGINE="nerva" # nerva
SERVICE_FINGERPRINT_TIMEOUT_MS=2000 # nerva per-target timeout (ms)
PORTSCAN_UDP=false # Optional UDP scan (requires privileges)
PORTSCAN_UDP_OPTIONS="--top-ports 20 -sU -sV -n -Pn --open"
CDN_IP=true # Check which IPs belongs to CDN
CDN_BYPASS=true # Try origin IP discovery for CDN-fronted assets with hakoriginfinder
# Web analysis
WAF_DETECTION=true # Detect WAFs
NUCLEICHECK=true # Enable or disable nuclei
NUCLEI_TEMPLATES_PATH="$HOME/nuclei-templates" # Set nuclei templates path
NUCLEI_SEVERITY="info,low,medium,high,critical" # Set templates criticity
NUCLEI_EXTRA_ARGS="" # Additional nuclei extra flags, don't set the severity here but the exclusions like " -etags openssh"
#NUCLEI_EXTRA_ARGS="-etags openssh,ssl -eid node-express-dev-env,keycloak-xss,CVE-2023-24044,CVE-2021-20323,header-sql,header-reflection" # Additional nuclei extra flags, don't set the severity here but the exclusions like " -etags openssh"
URL_CHECK=true # Enable or disable URL collection
URL_CHECK_PASSIVE=true # Search for urls, passive methods from Archive, OTX, CommonCrawl, etc
URL_CHECK_ACTIVE=true # Search for urls by crawling the websites
WAYMORE_TIMEOUT=30m # Timeout for waymore passive URL collection
WAYMORE_LIMIT=5000 # Optional URL collection limit for waymore (0 means tool default behavior)
URL_GF=true # Url patterns classification
URL_EXT=true # Returns a list of files divided by extension
JSCHECKS=true # JS analysis
JS_SUB_EXTRACT=true # Extract hostnames from JS/crawl outputs and resolve new subdomains
WELLKNOWN_PIVOTS=false # Check well-known metadata endpoints (security.txt, openid, oauth) for hostnames
WELLKNOWN_MAX_TARGETS=200 # Max web targets to check for well-known endpoints
# Python to run getjswords.py (set to venv python if pip is system-restricted)
# Example: GETJSWORDS_PYTHON="$HOME/Tools/reconftw_venv/bin/python3"
GETJSWORDS_PYTHON=python3
# Optional: venv to activate for getjswords (takes precedence over GETJSWORDS_PYTHON)
# Example: GETJSWORDS_VENV="$SCRIPTPATH/.venv"
GETJSWORDS_VENV="$SCRIPTPATH/.venv"
FUZZ=true # Web fuzzing
FUZZ_RECURSION_DEPTH=2 # ffuf recursion depth used in DEEP mode
IIS_SHORTNAME=true
CMS_SCANNER=true # CMS scanner
WORDLIST=true # Wordlist generation
ROBOTSWORDLIST=true # Check historic disallow entries on waybackMachine (DEEP mode only)
PASSWORD_DICT=true # Generate password dictionary
PASSWORD_DICT_ENGINE=cewler # cewler|pydictor
PASSWORD_DICT_MAX_TARGETS=50 # Max web targets processed by cewler when not DEEP
PASSWORD_DICT_CEWLER_DEPTH=1 # cewler crawl depth
PASSWORD_DICT_CEWLER_TIMEOUT=45 # timeout (seconds) per target when cewler runs
PASSWORD_MIN_LENGTH=5 # Min password length
PASSWORD_MAX_LENGTH=14 # Max password length
KATANA_HEADLESS_PROFILE=off # off|smart|full
KATANA_HEADLESS_SMART_LIMIT=15 # smart mode enables headless only for small target sets
CLOUD_ENUM_S3_PROFILE=optimized # optimized: quickscan (-qs + safe -m/-b paths) | exhaustive: -m/-b ${tools}/cloud_enum/enum_tools/fuzz.txt (missing fuzz => optimized)
CLOUD_ENUM_S3_THREADS=20 # Threads used by cloud_enum in s3buckets/cloud enumeration
# Vulns
VULNS_GENERAL=false # Enable or disable the vulnerability module (very intrusive and slow)
XSS=true # Check for xss with dalfox
TEST_SSL=true # SSL misconfigs
SSRF_CHECKS=true # SSRF checks
SSRF_ALT_MATCH_REGEX="169\\.254\\.169\\.254|latest/meta-data|root:|127\\.0\\.0\\.1|localhost|gopher://|dict://|file://" # Body/content regex for alternate SSRF payload checks
CRLF_CHECKS=true # CRLF checks
LFI=true # LFI by fuzzing
SSTI=true # SSTI by fuzzing
SSTI_ENGINE="TInjA" # SSTI engine: TInjA|SSTImap
TInjA_RATELIMIT=0 # TInjA requests per second (0 = unlimited)
TInjA_TIMEOUT=15 # TInjA request timeout seconds
SSTIMAP_LEVEL=1 # SSTImap escaping level (1-5, higher=slower+deeper)
SSTIMAP_DELAY=0 # SSTImap delay between requests in seconds
SSTIMAP_LEGACY=false # SSTImap include legacy engine payloads
SSTIMAP_GENERIC=false # SSTImap try generic engine payloads
SQLI=true # Check SQLI
SQLMAP=true # Check SQLI with sqlmap
GHAURI=false # Check SQLI with ghauri
BROKENLINKS=true # Check for brokenlinks
BROKENLINKS_ENGINE="second-order" # Broken links engine: second-order|legacy
SECOND_ORDER_CONFIG="${tools}/second-order/config/takeover.json" # second-order config file path
SECOND_ORDER_DEPTH=1 # second-order crawl depth
SECOND_ORDER_THREADS=10 # second-order thread count
SECOND_ORDER_INSECURE=false # second-order accept invalid TLS
SPRAY=true # Performs password spraying
SPRAY_ENGINE="brutespray" # brutespray|brutus
SPRAY_BRUTUS_ONLY_DEEP=true # Run brutus spraying only in DEEP mode (unless explicitly disabled)
BRUTUS_USERNAMES="" # Optional comma-separated usernames for brutus
BRUTUS_PASSWORDS="" # Optional comma-separated passwords for brutus
BRUTUS_KEY_FILE="" # Optional SSH private key path for brutus
COMM_INJ=true # Check for command injections with commix
SMUGGLING=true # Check for HTTP request smuggling flaws
WEBCACHE=true # Check for Web Cache issues
WEBCACHE_TOXICACHE=true # Complement webcache module with toxicache engine
TOXICACHE_THREADS=70 # toxicache worker threads
TOXICACHE_USER_AGENT="Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0" # toxicache user-agent
BYPASSER4XX=true # Check for 4XX bypasses
FUZZPARAMS=true # Fuzz parameters values
NUCLEI_DAST=true # Run dedicated nuclei -dast module (forced on when VULNS_GENERAL=true, e.g. -a) over webs/urls/gf candidates
NUCLEI_DAST_TEMPLATE_PATH="${NUCLEI_TEMPLATES_PATH}/dast" # DAST templates path
NUCLEI_DAST_EXTRA_ARGS="" # Optional extra args only for nuclei_dast
# Parallelization
PARALLEL_MODE=true # Run independent functions in parallel (faster, uses more resources). Disable with --no-parallel or set false.
PERF_PROFILE="balanced" # Performance profile: low|balanced|max
CONTINUE_ON_TOOL_ERROR=true # Continue recon when a tool/module fails in parallel batches (set false for fail-fast).
PARALLEL_LOG_MODE="summary" # Parallel output mode: summary (compact) | tail (last N lines) | full (cat all)
PARALLEL_TAIL_LINES=20 # Number of tail lines shown per job in 'tail' mode (doubled on failure)
PARALLEL_UI_MODE="clean" # Parallel terminal UX: clean | balanced | trace
PARALLEL_PROGRESS_SHOW_ETA=true # Show ETA in live progress once estimate is stable
PARALLEL_PROGRESS_SHOW_ACTIVE=true # Show active tasks list in live parallel progress
PARALLEL_PROGRESS_COMPACT_ACTIVE_MAX=4 # Max active/done items shown before compacting with "+N more"
PARALLEL_TRACE_SLOW_SECONDS=30 # In balanced mode, show finished-line for jobs slower than this threshold
# Parallel batch sizing per module group (effective concurrency is still capped by PARALLEL_MAX_JOBS and backpressure)
PAR_OSINT_GROUP1_SIZE=5
PAR_OSINT_GROUP2_SIZE=5
PAR_SUB_PASSIVE_GROUP_SIZE=4
PAR_SUB_DEP_ACTIVE_GROUP_SIZE=3
PAR_SUB_POST_ACTIVE_GROUP_SIZE=2
PAR_SUB_BRUTE_GROUP_SIZE=2 # Legacy/no-op for brute phase (now forced sequential); kept for backward compatibility.
PAR_WEB_DETECT_GROUP_SIZE=3
PAR_VULNS_GROUP1_SIZE=4
PAR_VULNS_GROUP2_SIZE=4
PAR_VULNS_GROUP4_SIZE=3
# Terminal output verbosity
OUTPUT_VERBOSITY=1 # 0=quiet (errors+final summary only), 1=normal (default), 2=verbose (PIDs, full parallel output, timestamps)
# Extra features
NOTIFICATION=false # Notification for every function
SOFT_NOTIFICATION=false # Only for start/end
DEEP=false # DEEP mode, really slow and don't care about the number of results
DEEP_LIMIT=500 # First limit to not run unless you run DEEP
DEEP_LIMIT2=1500 # Second limit to not run unless you run DEEP
DIFF=false # Diff function, run every module over an already scanned target, printing only new findings (but save everything)
REMOVETMP=false # Delete temporary files after execution (to free up space)
REMOVELOG=false # Delete logs after execution
PROXY=false # Send to proxy the websites found
SENDZIPNOTIFY=false # Send to zip the results (over notify)
PRESERVE=true # set to true to avoid deleting the .called_fn files on really large scans
FFUF_FLAGS=" -mc all -fc 404 -sf -noninteractive -of json" # Ffuf flags
# HTTP options
HEADER="User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0" # Default header
# Threads (auto-scaled based on CPU cores, override to set fixed values)
AVAILABLE_CORES=$(nproc 2>/dev/null || sysctl -n hw.ncpu 2>/dev/null || echo 4)
FFUF_THREADS=$((AVAILABLE_CORES * 10))
HTTPX_THREADS=$((AVAILABLE_CORES * 12))
HTTPX_UNCOMMONPORTS_THREADS=$((AVAILABLE_CORES * 25))
KATANA_THREADS=$((AVAILABLE_CORES * 5))
BRUTESPRAY_CONCURRENCE=$((AVAILABLE_CORES * 2))
DNSTAKE_THREADS=$((AVAILABLE_CORES * 10)) # Reduced from 25x to avoid rate limiting
DALFOX_THREADS=$((AVAILABLE_CORES * 15)) # Reduced from 50x to avoid triggering WAFs
DNS_RESOLVER=auto # auto|puredns|dnsx (auto: detects NAT/CGNAT → dnsx for home, puredns for VPS)
PUREDNS_PUBLIC_LIMIT=5000 # Safe default; set 0 for unlimited on VPS with dedicated resolver
PUREDNS_TRUSTED_LIMIT=400
PUREDNS_WILDCARDTEST_LIMIT=30
PUREDNS_WILDCARDBATCH_LIMIT=1500000
DNSX_THREADS=100 # Threads for dnsx when behind NAT
DNSX_RATE_LIMIT=500 # QPS for dnsx when behind NAT
DNSVALIDATOR_THREADS=200
INTERLACE_THREADS=10
TLSX_THREADS=1000
XNLINKFINDER_DEPTH=3
# Rate limits
HTTPX_RATELIMIT=150
NUCLEI_RATELIMIT=150
FFUF_RATELIMIT=0
# Timeouts
SUBFINDER_ENUM_TIMEOUT=180 # Minutes
CMSSCAN_TIMEOUT=3600 # Seconds
FFUF_MAXTIME=900 # Seconds
HTTPX_TIMEOUT=10 # Seconds
HTTPX_UNCOMMONPORTS_TIMEOUT=10 # Seconds
PERMUTATIONS_LIMIT=2147483648 # Bytes, default is 2 GB (prevents accidental disk exhaustion)
DNS_BRUTE_TIMEOUT=0 # timeout/gtimeout duration for DNS bruteforce (0 disables hard-timeout, e.g. 4h)
DNS_RESOLVE_TIMEOUT=0 # timeout/gtimeout duration for DNS resolve (0 disables hard-timeout, e.g. 6h)
DNS_HEARTBEAT_INTERVAL_SECONDS=20 # Progress heartbeat interval for long DNS jobs
# lists
fuzz_wordlist=${WORDLISTS_DIR}/fuzz_wordlist.txt
lfi_wordlist=${WORDLISTS_DIR}/lfi_wordlist.txt
ssti_wordlist=${WORDLISTS_DIR}/ssti_wordlist.txt
subs_wordlist=${WORDLISTS_DIR}/subdomains.txt
subs_wordlist_big=${tools}/subdomains_n0kovo_big.txt
headers_inject=${WORDLISTS_DIR}/headers_inject.txt
resolvers=${tools}/resolvers.txt
resolvers_trusted=${tools}/resolvers_trusted.txt
# Axiom Fleet
# Resolver paths on Axiom instances (change if your fleet uses a different home dir)
AXIOM_RESOLVERS_PATH="/home/op/lists/resolvers.txt"
AXIOM_RESOLVERS_TRUSTED_PATH="/home/op/lists/resolvers_trusted.txt"
# Will not start a new fleet if one exist w/ same name and size (or larger)
# AXIOM=false Uncomment only to overwrite command line flags
AXIOM_FLEET_LAUNCH=true # Enable or disable spin up a new fleet, if false it will use the current fleet with the AXIOM_FLEET_NAME prefix
AXIOM_FLEET_NAME="reconFTW" # Fleet's prefix name
AXIOM_FLEET_COUNT=10 # Fleet's number
AXIOM_FLEET_REGIONS="eu-central" # Fleet's region
AXIOM_FLEET_SHUTDOWN=true # # Enable or disable delete the fleet after the execution
AXIOM_AUTO_FIX_HOSTKEY=true # Auto-repair known_hosts entries on SSH host-key mismatch before fallback to local mode
# This is a script on your reconftw host that might prep things your way...
#AXIOM_POST_START="~/Tools/axiom_config.sh" # Useful to send your config files to the fleet
AXIOM_EXTRA_ARGS="" # Leave empty if you don't want to add extra arguments
#AXIOM_EXTRA_ARGS=" --rm-logs" # Example
# Faraday-Server
FARADAY=false # Enable or disable Faraday integration
FARADAY_WORKSPACE="reconftw" # Faraday workspace
# AI
AI_EXECUTABLE="python3" # Python executable fallback if reconftw_ai venv python is not available
AI_MODEL="llama3:8b" # Model to use
AI_REPORT_TYPE="md" # Report type to use (md, txt)
AI_REPORT_PROFILE="bughunter" # Report profile to use (executive, brief, or bughunter)
AI_PROMPTS_FILE="" # Optional custom prompts file for reconftw_ai (empty uses default prompts.json)
AI_MAX_CHARS_PER_FILE=50000 # Max chars loaded per file before truncation
AI_MAX_FILES_PER_CATEGORY=200 # Max files loaded per category for AI context
AI_REDACT=true # Redact sensitive indicators before AI analysis
AI_ALLOW_MODEL_PULL=false # Allow reconftw_ai to auto-pull missing model
AI_STRICT=false # Fail AI analysis if one or more categories have no data
# API & Advanced Web Checks
GRAPHQL_CHECK=true # Detect GraphQL endpoints and introspection
GQLSPECTION=false # Run GQLSpection deep introspection on detected GraphQL endpoints (heavier)
PARAM_DISCOVERY=true # Parameter discovery with arjun
GRPC_SCAN=false # Attempt basic gRPC reflection on common ports
LLM_PROBE=true # Probe discovered web/API endpoints for exposed LLM services using julius
LLM_PROBE_AUGUSTUS=false # Include augustus generator config in julius output
# IPv6
IPV6_SCAN=true # Attempt IPv6 discovery/portscan where addresses exist
# Wordlists / threads for new modules
ARJUN_THREADS=10
# Data & Automation
ASSET_STORE=true # Append assets/findings to assets.jsonl
EXPORT_FORMAT="" # Export artifacts at end of scan: json|html|csv|all (empty = disabled)
REPORT_ONLY=false # Rebuild reports from existing results without running scans (use --report-only)
QUICK_RESCAN=false # Skip heavy steps if no new subdomains/webs
CHUNK_LIMIT=2000 # Split very large lists into chunks (urls, webs)
HOTLIST_TOP=50 # Number of top risky assets to highlight
# Performance
RESOLVER_IQ=false # Prefer fast/healthy resolvers (experimental)
# Estimated durations for skipped heavy modules (seconds)
TIME_EST_NUCLEI=600
TIME_EST_FUZZ=900
TIME_EST_URLCHECKS=300
TIME_EST_JSCHECKS=300
TIME_EST_API=300
TIME_EST_GQL=180
TIME_EST_PARAM=240
TIME_EST_GRPC=120
TIME_EST_IIS=60
# TERM COLORS
bred='\033[1;31m'
bblue='\033[1;34m'
bgreen='\033[1;32m'
byellow='\033[1;33m'
red='\033[0;31m'
blue='\033[0;34m'
green='\033[0;32m'
cyan='\033[0;36m'
yellow='\033[0;33m'
reset='\033[0m'