ail-framework/configs/core.cfg.sample

292 lines
5.6 KiB
Text
Raw Normal View History

[Directories]
bloomfilters = Blooms
2016-08-09 12:23:36 +00:00
dicofilters = Dicos
2014-08-19 17:07:07 +00:00
pastes = PASTES
hash = HASHS
2018-08-09 15:42:21 +00:00
crawled = crawled
har = CRAWLED_SCREENSHOT
screenshot = CRAWLED_SCREENSHOT/screenshot
2016-07-15 07:08:38 +00:00
wordtrending_csv = var/www/static/csv/wordstrendingdata
wordsfile = files/wordfile
2016-07-15 07:08:38 +00:00
protocolstrending_csv = var/www/static/csv/protocolstrendingdata
protocolsfile = files/protocolsfile
tldstrending_csv = var/www/static/csv/tldstrendingdata
2016-07-22 07:32:13 +00:00
tldsfile = faup/src/data/mozilla.tlds
2016-07-15 07:08:38 +00:00
domainstrending_csv = var/www/static/csv/domainstrendingdata
pystemonpath = /home/pystemon/pystemon/
sentiment_lexicon_file = sentiment/vader_lexicon.zip/vader_lexicon/vader_lexicon.txt
##### Notifications ######
[Notifications]
ail_domain = https://localhost:7000
sender = sender@example.com
sender_host = smtp.example.com
sender_port = 1337
2018-11-05 13:20:12 +00:00
sender_pw = None
# Only needed when the credentials for email server needs a username instead of an email address
#sender_user = sender
sender_user =
2018-03-30 09:35:37 +00:00
# optional for using with authenticated SMTP over SSL
# sender_pw = securepassword
2016-07-15 07:10:44 +00:00
##### Flask #####
[Flask]
#Proxying requests to the app
baseUrl = /
2020-07-06 15:09:42 +00:00
#Host to bind to
host = 127.0.0.1
#Flask server port
port = 7000
2018-08-07 11:07:08 +00:00
#Number of logs to display in the dashboard
max_dashboard_logs = 15
2016-07-15 07:08:38 +00:00
#Maximum number of character to display in the toolip
max_preview_char = 250
2016-07-15 07:08:38 +00:00
#Maximum number of character to display in the modal
max_preview_modal = 800
2016-07-15 07:08:38 +00:00
#Default number of header to display in trending graphs
default_display = 10
2016-08-09 12:23:36 +00:00
#Number of minutes displayed for the number of processed pastes.
minute_processed_paste = 10
#Maximum line length authorized to make a diff between duplicates
DiffMaxLineLength = 10000
#### Modules ####
2018-07-26 13:31:58 +00:00
[BankAccount]
max_execution_time = 60
[Categ]
#Minimum number of match between the paste and the category file
matchingThreshold=1
[Credential]
#Minimum length that a credential must have to be considered as such
minimumLengthThreshold=3
#Will be pushed as alert if the number of credentials is greater to that number
criticalNumberToAlert=8
#Will be considered as false positive if less that X matches from the top password list
minTopPassList=5
[Curve]
max_execution_time = 90
2019-01-29 08:46:03 +00:00
[Onion]
max_execution_time = 180
[PgpDump]
max_execution_time = 60
[Base64]
path = Base64/
max_execution_time = 60
2018-07-18 09:45:19 +00:00
[Binary]
path = Base64/
max_execution_time = 60
[Hex]
path = Base64/
max_execution_time = 60
[Modules_Duplicates]
#Number of month to look back
maximum_month_range = 3
2016-08-09 12:23:36 +00:00
#The value where two pastes are considerate duplicate for ssdeep.
threshold_duplicate_ssdeep = 50
#The value where two pastes are considerate duplicate for tlsh.
2018-05-09 11:03:46 +00:00
threshold_duplicate_tlsh = 52
2016-07-18 13:52:53 +00:00
#Minimum size of the paste considered
min_paste_size = 0.3
2016-12-22 09:06:35 +00:00
[Module_ModuleInformation]
#Threshold to deduce if a module is stuck or not, in seconds.
threshold_stucked_module=600
[Module_Mixer]
#Define the configuration of the mixer, possible value: 1, 2 or 3
operation_mode = 3
#Define the time that a paste will be considerate duplicate. in seconds (1day = 86400)
ttl_duplicate = 86400
default_unnamed_feed_name = unnamed_feeder
[TermTrackerMod]
max_execution_time = 120
[RegexTracker]
max_execution_time = 60
##### Redis #####
[Redis_Cache]
host = localhost
port = 6379
db = 0
2014-12-22 15:50:25 +00:00
[Redis_Log]
host = localhost
port = 6380
db = 0
2018-06-05 14:58:04 +00:00
[Redis_Log_submit]
host = localhost
port = 6380
db = 1
[Redis_Queues]
host = localhost
port = 6381
db = 0
[Redis_Mixer_Cache]
host = localhost
port = 6381
db = 1
2018-05-07 12:50:40 +00:00
##### ARDB #####
[ARDB_Curve]
2016-08-09 12:23:36 +00:00
host = localhost
2016-08-23 07:59:39 +00:00
port = 6382
db = 1
2018-05-07 12:50:40 +00:00
[ARDB_Sentiment]
2016-08-23 07:59:39 +00:00
host = localhost
port = 6382
db = 4
2018-05-07 12:50:40 +00:00
[ARDB_TermFreq]
2016-08-23 07:59:39 +00:00
host = localhost
port = 6382
db = 2
2016-08-09 12:23:36 +00:00
2018-05-07 12:50:40 +00:00
[ARDB_TermCred]
2017-07-18 14:57:15 +00:00
host = localhost
port = 6382
db = 5
2018-05-07 12:50:40 +00:00
[ARDB_DB]
host = localhost
2018-05-07 12:50:40 +00:00
port = 6382
db = 0
2018-05-07 12:50:40 +00:00
[ARDB_Trending]
2016-07-15 07:08:38 +00:00
host = localhost
2016-08-23 07:59:39 +00:00
port = 6382
db = 3
2016-07-15 07:08:38 +00:00
[ARDB_Tracker]
host = localhost
port = 6382
db = 3
2018-05-07 12:50:40 +00:00
[ARDB_Hashs]
host = localhost
db = 1
2018-05-09 11:03:46 +00:00
[ARDB_Tags]
host = localhost
port = 6382
db = 6
[ARDB_Metadata]
host = localhost
port = 6382
db = 7
2018-06-19 13:09:26 +00:00
[ARDB_Statistics]
host = localhost
port = 6382
db = 8
2018-08-09 15:42:21 +00:00
[ARDB_Onion]
host = localhost
port = 6382
db = 9
[ARDB_Objects]
host = localhost
port = 6382
db = 10
[Kvrocks_Meta]
host = localhost
port = 6383
db = 0
[Url]
cc_critical = DE
[DomClassifier]
cc = DE
cc_tld = r'\.de$'
dns = 8.8.8.8
[Mail]
dns = 8.8.8.8
[Web]
dns = 149.13.33.69
# Indexer configuration
[Indexer]
type = whoosh
path = indexdir
register = indexdir/all_index.txt
2017-03-15 11:14:41 +00:00
#size in Mb
index_max_size = 2000
[ailleakObject]
maxDuplicateToPushToMISP=10
###############################################################################
# For multiple feed, add them with "," without space
# e.g.: tcp://127.0.0.1:5556,tcp://127.0.0.1:5557
[ZMQ_Global]
2016-02-04 14:32:50 +00:00
#address = tcp://crf.circl.lu:5556
address = tcp://127.0.0.1:5556,tcp://crf.circl.lu:5556
channel = 102
bind = tcp://127.0.0.1:5556
[ZMQ_Url]
address = tcp://127.0.0.1:5004
channel = urls
[ZMQ_FetchedOnion]
address = tcp://127.0.0.1:5005
channel = FetchedOnion
[RedisPubSub]
host = localhost
port = 6381
db = 0
2018-08-09 15:42:21 +00:00
[Crawler]
activate_crawler = False
2018-08-09 15:42:21 +00:00
crawler_depth_limit = 1
default_crawler_har = True
default_crawler_png = True
default_crawler_closespider_pagecount = 50
default_crawler_user_agent = Mozilla/5.0 (Windows NT 10.0; rv:78.0) Gecko/20100101 Firefox/78.0
splash_url = http://127.0.0.1
splash_port = 8050-8052
domain_proxy = onion.foundation
2019-09-23 16:22:25 +00:00
[IP]
# list of comma-separated CIDR that you wish to be alerted for. e.g:
#networks = 192.168.34.0/24,10.0.0.0/8,192.168.33.0/24
networks =
2021-04-28 13:24:33 +00:00
[SubmitPaste]
# 1 Mb Max text paste size for text submission
TEXT_MAX_SIZE = 1000000
# 1 Gb Max file size for file submission
FILE_MAX_SIZE = 1000000000
# Managed file extenions for file submission, comma separated
# TODO add zip, gz and tar.gz
FILE_ALLOWED_EXTENSIONS = txt,sh,pdf