Skip to content

Commit

Permalink
feat(config): load wordlists from URLs and config values (#498)
Browse files Browse the repository at this point in the history
  • Loading branch information
ocervell authored Nov 29, 2024
1 parent e074428 commit d537952
Show file tree
Hide file tree
Showing 8 changed files with 118 additions and 63 deletions.
138 changes: 82 additions & 56 deletions secator/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,10 @@ class Runners(StrictModel):
remove_duplicates: bool = False


class Security(StrictModel):
allow_local_file_access: bool = True


class HTTP(StrictModel):
socks5_proxy: str = 'socks5://127.0.0.1:9050'
http_proxy: str = 'https://127.0.0.1:9080'
Expand Down Expand Up @@ -117,7 +121,8 @@ class Wordlists(StrictModel):
defaults: Dict[str, str] = {'http': 'bo0m_fuzz', 'dns': 'combined_subdomains'}
templates: Dict[str, str] = {
'bo0m_fuzz': 'https://raw.githubusercontent.com/Bo0oM/fuzz.txt/master/fuzz.txt',
'combined_subdomains': 'https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/combined_subdomains.txt' # noqa: E501
'combined_subdomains': 'https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/combined_subdomains.txt', # noqa: E501
'directory_list_small': 'https://raw.githubusercontent.com/danielmiessler/SecLists/refs/heads/master/Discovery/Web-Content/directory-list-2.3-small.txt', # noqa: E501
}
lists: Dict[str, List[str]] = {}

Expand Down Expand Up @@ -166,6 +171,7 @@ class SecatorConfig(StrictModel):
payloads: Payloads = Payloads()
wordlists: Wordlists = Wordlists()
addons: Addons = Addons()
security: Security = Security()
offline_mode: bool = False


Expand Down Expand Up @@ -496,56 +502,81 @@ def download_files(data: dict, target_folder: Path, offline_mode: bool, type: st
offline_mode (bool): Offline mode.
"""
for name, url_or_path in data.items():
if url_or_path.startswith('git+'):
# Clone Git repository
git_url = url_or_path[4:] # remove 'git+' prefix
repo_name = git_url.split('/')[-1]
if repo_name.endswith('.git'):
repo_name = repo_name[:-4]
target_path = target_folder / repo_name
if not target_path.exists():
console.print(f'[bold turquoise4]Cloning git {type} [bold magenta]{repo_name}[/] ...[/] ', end='')
target_path = download_file(url_or_path, target_folder, offline_mode, type, name=name)
if target_path:
data[name] = target_path


def download_file(url_or_path, target_folder: Path, offline_mode: bool, type: str, name: str = None):
"""Download remote file to target folder, clone git repos, or symlink local files.
Args:
data (dict): Dict of name to url or local path prefixed with 'git+' for Git repos.
target_folder (Path): Target folder for storing files or repos.
offline_mode (bool): Offline mode.
type (str): Type of files to handle.
name (str, Optional): Name of object.
Returns:
path (Path): Path to downloaded file / folder.
"""
if url_or_path.startswith('git+'):
# Clone Git repository
git_url = url_or_path[4:] # remove 'git+' prefix
repo_name = git_url.split('/')[-1]
if repo_name.endswith('.git'):
repo_name = repo_name[:-4]
target_path = target_folder / repo_name
if not target_path.exists():
console.print(f'[bold turquoise4]Cloning git {type} [bold magenta]{repo_name}[/] ...[/] ', end='')
if offline_mode:
console.print('[bold orange1]skipped [dim][offline[/].[/]')
return
try:
call(['git', 'clone', git_url, str(target_path)], stderr=DEVNULL, stdout=DEVNULL)
console.print('[bold green]ok.[/]')
except Exception as e:
console.print(f'[bold red]failed ({str(e)}).[/]')
return target_path.resolve()
elif Path(url_or_path).exists():
# Create a symbolic link for a local file
local_path = Path(url_or_path)
target_path = target_folder / local_path.name
if not name:
name = url_or_path.split('/')[-1]
if not CONFIG.security.allow_local_file_access:
console.print(f'[bold red]Cannot reference local file {url_or_path}(disabled for security reasons)[/]')
return
if not target_path.exists():
console.print(f'[bold turquoise4]Symlinking {type} [bold magenta]{name}[/] ...[/] ', end='')
try:
target_path.symlink_to(local_path)
console.print('[bold green]ok.[/]')
except Exception as e:
console.print(f'[bold red]failed ({str(e)}).[/]')
return target_path.resolve()
else:
# Download file from URL
ext = url_or_path.split('.')[-1]
if not name:
name = url_or_path.split('/')[-1]
filename = f'{name}.{ext}' if not name.endswith(ext) else name
target_path = target_folder / filename
if not target_path.exists():
try:
console.print(f'[bold turquoise4]Downloading {type} [bold magenta]{filename}[/] ...[/] ', end='')
if offline_mode:
console.print('[bold orange1]skipped [dim][offline[/].[/]')
continue
try:
call(['git', 'clone', git_url, str(target_path)], stderr=DEVNULL, stdout=DEVNULL)
console.print('[bold green]ok.[/]')
except Exception as e:
console.print(f'[bold red]failed ({str(e)}).[/]')
data[name] = target_path.resolve()
elif Path(url_or_path).exists():
# Create a symbolic link for a local file
local_path = Path(url_or_path)
target_path = target_folder / local_path.name
if not target_path.exists():
console.print(f'[bold turquoise4]Symlinking {type} [bold magenta]{name}[/] ...[/] ', end='')
try:
target_path.symlink_to(local_path)
console.print('[bold green]ok.[/]')
except Exception as e:
console.print(f'[bold red]failed ({str(e)}).[/]')
data[name] = target_path.resolve()
else:
# Download file from URL
ext = url_or_path.split('.')[-1]
filename = f'{name}.{ext}' if not name.endswith(ext) else name
target_path = target_folder / filename
if not target_path.exists():
try:
console.print(f'[bold turquoise4]Downloading {type} [bold magenta]{filename}[/] ...[/] ', end='')
if offline_mode:
console.print('[bold orange1]skipped [dim](offline)[/].[/]')
continue
resp = requests.get(url_or_path, timeout=3)
resp.raise_for_status()
with open(target_path, 'wb') as f:
f.write(resp.content)
console.print('[bold green]ok.[/]')
except requests.RequestException as e:
console.print(f'[bold red]failed ({str(e)}).[/]')
continue
data[name] = target_path.resolve()
console.print('[bold orange1]skipped [dim](offline)[/].[/]')
return
resp = requests.get(url_or_path, timeout=3)
resp.raise_for_status()
with open(target_path, 'wb') as f:
f.write(resp.content)
console.print('[bold green]ok.[/]')
except requests.RequestException as e:
console.print(f'[bold red]failed ({str(e)}).[/]')
return
return target_path.resolve()


# Load default_config
Expand Down Expand Up @@ -577,13 +608,8 @@ def download_files(data: dict, target_folder: Path, offline_mode: bool, type: st
dir.mkdir(parents=False)
console.print('[bold green]ok.[/]')

# Download wordlists and set defaults
# Download wordlists and payloads
download_files(CONFIG.wordlists.templates, CONFIG.dirs.wordlists, CONFIG.offline_mode, 'wordlist')
for category, name in CONFIG.wordlists.defaults.items():
if name in CONFIG.wordlists.templates.keys():
CONFIG.wordlists.defaults[category] = str(CONFIG.wordlists.templates[name])

# Download payloads
download_files(CONFIG.payloads.templates, CONFIG.dirs.payloads, CONFIG.offline_mode, 'payload')

# Print config
Expand Down
4 changes: 2 additions & 2 deletions secator/configs/workflows/subdomain_recon.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@ tasks:
# input: vhost
# domain_:
# - target.name
# wordlist: /usr/share/seclists/Discovery/DNS/combined_subdomains.txt
# wordlist: combined_subdomains
# gobuster:
# input: dns
# domain_:
# - target.name
# wordlist: /usr/share/seclists/Discovery/DNS/combined_subdomains.txt
# wordlist: combined_subdomains
_group:
nuclei:
description: Check for subdomain takeovers
Expand Down
2 changes: 1 addition & 1 deletion secator/configs/workflows/url_dirsearch.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ input_types:
tasks:
ffuf:
description: Search for HTTP directories
wordlist: /usr/share/seclists/Discovery/Web-Content/directory-list-2.3-small.txt
wordlist: directory_list_small
targets_:
- type: target
field: '{name}/FUZZ'
Expand Down
1 change: 1 addition & 0 deletions secator/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,7 @@ def decorator(f):
conf.pop('shlex', None)
conf.pop('meta', None)
conf.pop('supported', None)
conf.pop('process', None)
reverse = conf.pop('reverse', False)
long = f'--{opt_name}'
short = f'-{short_opt}' if short_opt else f'-{opt_name}'
Expand Down
5 changes: 5 additions & 0 deletions secator/runners/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -666,6 +666,11 @@ def _process_opts(
debug('skipped (falsy)', obj={'name': opt_name, 'value': opt_val}, obj_after=False, sub='command.options', verbose=True) # noqa: E501
continue

# Apply process function on opt value
if 'process' in opt_conf:
func = opt_conf['process']
opt_val = func(opt_val)

# Convert opt value to expected command opt value
mapped_opt_val = opt_value_map.get(opt_name)
if mapped_opt_val:
Expand Down
4 changes: 2 additions & 2 deletions secator/tasks/_categories.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from secator.output_types import Ip, Port, Subdomain, Tag, Url, UserAccount, Vulnerability
from secator.config import CONFIG
from secator.runners import Command
from secator.utils import debug
from secator.utils import debug, process_wordlist


OPTS = {
Expand All @@ -39,7 +39,7 @@
THREADS: {'type': int, 'help': 'Number of threads to run', 'default': 50},
TIMEOUT: {'type': int, 'help': 'Request timeout'},
USER_AGENT: {'type': str, 'short': 'ua', 'help': 'User agent, e.g "Mozilla Firefox 1.0"'},
WORDLIST: {'type': str, 'short': 'w', 'default': CONFIG.wordlists.defaults.http, 'help': 'Wordlist to use'}
WORDLIST: {'type': str, 'short': 'w', 'default': 'http', 'process': process_wordlist, 'help': 'Wordlist to use'}
}

OPTS_HTTP = [
Expand Down
3 changes: 2 additions & 1 deletion secator/tasks/dnsxbrute.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from secator.output_types import Subdomain
from secator.serializers import JSONSerializer
from secator.tasks._categories import ReconDns
from secator.utils import process_wordlist


@task()
Expand All @@ -19,7 +20,7 @@ class dnsxbrute(ReconDns):
THREADS: 'threads',
}
opts = {
WORDLIST: {'type': str, 'short': 'w', 'default': CONFIG.wordlists.defaults.dns, 'help': 'Wordlist'},
WORDLIST: {'type': str, 'short': 'w', 'default': CONFIG.wordlists.defaults.dns, 'process': process_wordlist, 'help': 'Wordlist to use'}, # noqa: E501
'trace': {'is_flag': True, 'default': False, 'help': 'Perform dns tracing'},
}
item_loaders = [JSONSerializer()]
Expand Down
24 changes: 23 additions & 1 deletion secator/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import yaml

from secator.definitions import (DEBUG_COMPONENT, VERSION, DEV_PACKAGE)
from secator.config import CONFIG, ROOT_FOLDER, LIB_FOLDER
from secator.config import CONFIG, ROOT_FOLDER, LIB_FOLDER, download_file
from secator.rich import console

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -689,3 +689,25 @@ def merge_two_dicts(dict1, dict2):

# Use reduce to apply merge_two_dicts to all dictionaries in dicts
return reduce(merge_two_dicts, dicts, {})


def process_wordlist(val):
"""Pre-process wordlist option value to allow referencing wordlists from remote URLs or from config keys.
Args:
val (str): Can be a config value in CONFIG.wordlists.defaults or CONFIG.wordlists.templates, or a local path,
or a URL.
"""
default_wordlist = getattr(CONFIG.wordlists.defaults, val)
if default_wordlist:
val = default_wordlist
template_wordlist = getattr(CONFIG.wordlists.templates, val)
if template_wordlist:
return template_wordlist
else:
return download_file(
val,
target_folder=CONFIG.dirs.wordlists,
offline_mode=CONFIG.offline_mode,
type='wordlist'
)

0 comments on commit d537952

Please sign in to comment.