Files
feroxbuster/ferox-config.toml.example

70 lines
2.1 KiB
Plaintext
Raw Normal View History

# Example configuration for feroxbuster
#
2020-09-25 20:21:07 -05:00
# If you wish to provide persistent settings to feroxbuster, rename this file to ferox-config.toml and make sure
# it resides in the same directory as the feroxbuster binary.
#
# After that, uncomment any line to override the default value provided by the binary itself.
#
# Any setting used here can be overridden by the corresponding command line option/argument
#
# wordlist = "/wordlists/seclists/Discovery/Web-Content/raft-medium-directories.txt"
2020-10-29 16:17:50 -05:00
# status_codes = [200, 500]
# filter_status = [301]
# threads = 1
# timeout = 5
# proxy = "http://127.0.0.1:8080"
# replay_proxy = "http://127.0.0.1:8081"
# replay_codes = [200, 302]
# verbosity = 1
2021-02-08 05:51:53 -06:00
# parallel = 8
2020-10-24 15:59:53 -05:00
# scan_limit = 6
2021-02-02 12:19:21 -06:00
# rate_limit = 250
# quiet = true
# silent = true
# auto_tune = true
# auto_bail = true
2020-11-23 20:16:46 -06:00
# json = true
2020-09-03 06:22:05 -05:00
# output = "/targets/ellingson_mineral_company/gibson.txt"
# debug_log = "/var/log/find-the-derp.log"
2020-10-29 16:17:50 -05:00
# user_agent = "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/47.0"
2021-09-19 18:18:18 -05:00
# random_agent = false
2020-09-03 19:47:05 -05:00
# redirects = true
2020-09-03 06:22:05 -05:00
# insecure = true
2022-03-01 17:55:14 -06:00
# collect_words = true
2022-02-17 19:44:29 -06:00
# collect_backups = true
# collect_extensions = true
2020-09-03 09:05:35 -05:00
# extensions = ["php", "html"]
# dont_collect = ["png", "gif", "jpg", "jpeg"]
# methods = ["GET", "POST"]
2021-12-30 19:18:51 +03:00
# data = [11, 12, 13, 14, 15]
2021-06-15 14:31:01 -05:00
# url_denylist = ["http://dont-scan.me", "https://also-not.me"]
# regex_denylist = ["/deny.*"]
2020-10-29 16:17:50 -05:00
# no_recursion = true
# add_slash = true
# stdin = true
2020-10-29 16:17:50 -05:00
# dont_filter = true
2020-10-22 05:55:54 -05:00
# extract_links = true
# depth = 1
# force_recursion = true
2020-10-29 16:17:50 -05:00
# filter_size = [5174]
# filter_regex = ["^ignore me$"]
# filter_similar = ["https://somesite.com/soft404"]
2020-11-13 13:19:34 -06:00
# filter_word_count = [993]
# filter_line_count = [35, 36]
# queries = [["name","value"], ["rick", "astley"]]
# save_state = false
# time_limit = "10m"
2020-09-03 19:42:48 -05:00
# headers can be specified on multiple lines or as an inline table
2020-09-03 09:05:35 -05:00
#
# inline example
# headers = {"stuff" = "things"}
#
# multi-line example
# note: if multi-line is used, all key/value pairs under it belong to the headers table until the next table
# is found or the end of the file is reached
#
# [headers]
# stuff = "things"
# more = "headers"