AIL-framework/configs/core.cfg.sample

289 lines
5.6 KiB
Plaintext
Raw Normal View History

[Directories]
bloomfilters = Blooms
2016-08-09 14:23:36 +02:00
dicofilters = Dicos
2014-08-19 19:07:07 +02:00
pastes = PASTES
hash = HASHS
2018-08-09 17:42:21 +02:00
crawled = crawled
har = CRAWLED_SCREENSHOT
screenshot = CRAWLED_SCREENSHOT/screenshot
2016-07-15 09:08:38 +02:00
wordtrending_csv = var/www/static/csv/wordstrendingdata
wordsfile = files/wordfile
2016-07-15 09:08:38 +02:00
protocolstrending_csv = var/www/static/csv/protocolstrendingdata
protocolsfile = files/protocolsfile
tldstrending_csv = var/www/static/csv/tldstrendingdata
2016-07-22 09:32:13 +02:00
tldsfile = faup/src/data/mozilla.tlds
2016-07-15 09:08:38 +02:00
domainstrending_csv = var/www/static/csv/domainstrendingdata
pystemonpath = /home/pystemon/pystemon/
sentiment_lexicon_file = sentiment/vader_lexicon.zip/vader_lexicon/vader_lexicon.txt
##### Notifications ######
[Notifications]
ail_domain = https://localhost:7000
sender = sender@example.com
sender_host = smtp.example.com
sender_port = 1337
2018-11-05 14:20:12 +01:00
sender_pw = None
# Only needed when the credentials for email server needs a username instead of an email address
#sender_user = sender
sender_user =
2018-03-30 11:35:37 +02:00
# optional for using with authenticated SMTP over SSL
# sender_pw = securepassword
2016-07-15 09:10:44 +02:00
##### Flask #####
[Flask]
#Proxying requests to the app
baseUrl = /
2020-07-06 17:09:42 +02:00
#Host to bind to
host = 127.0.0.1
#Flask server port
port = 7000
2018-08-07 13:07:08 +02:00
#Number of logs to display in the dashboard
max_dashboard_logs = 15
2016-07-15 09:08:38 +02:00
#Maximum number of character to display in the toolip
max_preview_char = 250
2016-07-15 09:08:38 +02:00
#Maximum number of character to display in the modal
max_preview_modal = 800
2016-07-15 09:08:38 +02:00
#Default number of header to display in trending graphs
default_display = 10
2016-08-09 14:23:36 +02:00
#Number of minutes displayed for the number of processed pastes.
minute_processed_paste = 10
#Maximum line length authorized to make a diff between duplicates
DiffMaxLineLength = 10000
#### Modules ####
2018-07-26 15:31:58 +02:00
[BankAccount]
max_execution_time = 60
[Categ]
#Minimum number of match between the paste and the category file
matchingThreshold=1
[Credential]
#Minimum length that a credential must have to be considered as such
minimumLengthThreshold=3
#Will be pushed as alert if the number of credentials is greater to that number
criticalNumberToAlert=8
#Will be considered as false positive if less that X matches from the top password list
minTopPassList=5
[Curve]
max_execution_time = 90
2019-01-29 09:46:03 +01:00
[Onion]
save_i2p = False
2019-01-29 09:46:03 +01:00
max_execution_time = 180
[PgpDump]
max_execution_time = 60
[Base64]
path = Base64/
max_execution_time = 60
2018-07-18 11:45:19 +02:00
[Binary]
path = Base64/
max_execution_time = 60
[Hex]
path = Base64/
max_execution_time = 60
[Modules_Duplicates]
#Number of month to look back
maximum_month_range = 3
2016-08-09 14:23:36 +02:00
#The value where two pastes are considerate duplicate for ssdeep.
threshold_duplicate_ssdeep = 50
#The value where two pastes are considerate duplicate for tlsh.
2018-05-09 13:03:46 +02:00
threshold_duplicate_tlsh = 52
2016-07-18 15:52:53 +02:00
#Minimum size of the paste considered
min_paste_size = 0.3
2016-12-22 10:06:35 +01:00
[Module_ModuleInformation]
#Threshold to deduce if a module is stuck or not, in seconds.
threshold_stucked_module=600
[Module_Mixer]
#Define the configuration of the mixer, possible value: 1, 2 or 3
operation_mode = 3
#Define the time that a paste will be considerate duplicate. in seconds (1day = 86400)
ttl_duplicate = 86400
default_unnamed_feed_name = unnamed_feeder
[Tracker_Term]
max_execution_time = 120
[Tracker_Regex]
max_execution_time = 60
##### Redis #####
[Redis_Cache]
host = localhost
port = 6379
db = 0
2014-12-22 16:50:25 +01:00
[Redis_Log]
host = localhost
port = 6380
db = 0
2018-06-05 16:58:04 +02:00
[Redis_Log_submit]
host = localhost
port = 6380
db = 1
[Redis_Queues]
host = localhost
port = 6381
db = 0
[Redis_Mixer_Cache]
host = localhost
port = 6381
db = 1
2018-05-07 14:50:40 +02:00
##### ARDB #####
[ARDB_Curve]
2016-08-09 14:23:36 +02:00
host = localhost
2016-08-23 09:59:39 +02:00
port = 6382
db = 1
2018-05-07 14:50:40 +02:00
[ARDB_Sentiment]
2016-08-23 09:59:39 +02:00
host = localhost
port = 6382
db = 4
2018-05-07 14:50:40 +02:00
[ARDB_TermFreq]
2016-08-23 09:59:39 +02:00
host = localhost
port = 6382
db = 2
2016-08-09 14:23:36 +02:00
2018-05-07 14:50:40 +02:00
[ARDB_TermCred]
2017-07-18 16:57:15 +02:00
host = localhost
port = 6382
db = 5
2018-05-07 14:50:40 +02:00
[ARDB_DB]
host = localhost
2018-05-07 14:50:40 +02:00
port = 6382
db = 0
2018-05-07 14:50:40 +02:00
[ARDB_Trending]
2016-07-15 09:08:38 +02:00
host = localhost
2016-08-23 09:59:39 +02:00
port = 6382
db = 3
2016-07-15 09:08:38 +02:00
[ARDB_Tracker]
host = localhost
port = 6382
db = 3
2018-05-07 14:50:40 +02:00
[ARDB_Hashs]
host = localhost
db = 1
2018-05-09 13:03:46 +02:00
[ARDB_Tags]
host = localhost
port = 6382
db = 6
[ARDB_Metadata]
host = localhost
port = 6382
db = 7
2018-06-19 15:09:26 +02:00
[ARDB_Statistics]
host = localhost
port = 6382
db = 8
2018-08-09 17:42:21 +02:00
[ARDB_Onion]
host = localhost
port = 6382
db = 9
[ARDB_Objects]
host = localhost
port = 6382
db = 10
[Kvrocks_Meta]
host = localhost
port = 6383
db = 0
[Url]
cc_critical = DE
[DomClassifier]
cc = DE
cc_tld = r'\.de$'
dns = 8.8.8.8
[Mail]
dns = 8.8.8.8
[Web]
dns = 149.13.33.69
# Indexer configuration
[Indexer]
type = whoosh
path = indexdir
register = indexdir/all_index.txt
2017-03-15 12:14:41 +01:00
#size in Mb
index_max_size = 2000
[ailleakObject]
maxDuplicateToPushToMISP=10
###############################################################################
# For multiple feed, add them with "," without space
# e.g.: tcp://127.0.0.1:5556,tcp://127.0.0.1:5557
[ZMQ_Global]
2016-02-04 15:32:50 +01:00
#address = tcp://crf.circl.lu:5556
address = tcp://127.0.0.1:5556,tcp://crf.circl.lu:5556
channel = 102
bind = tcp://127.0.0.1:5556
[ZMQ_FetchedOnion]
address = tcp://127.0.0.1:5005
channel = FetchedOnion
[RedisPubSub]
host = localhost
port = 6381
db = 0
2018-08-09 17:42:21 +02:00
[Crawler]
activate_crawler = False
2018-08-09 17:42:21 +02:00
crawler_depth_limit = 1
default_crawler_har = True
default_crawler_png = True
default_crawler_closespider_pagecount = 50
default_crawler_user_agent = Mozilla/5.0 (Windows NT 10.0; rv:78.0) Gecko/20100101 Firefox/78.0
splash_url = http://127.0.0.1
splash_port = 8050-8052
domain_proxy = onion.foundation
2019-09-23 18:22:25 +02:00
[IP]
# list of comma-separated CIDR that you wish to be alerted for. e.g:
#networks = 192.168.34.0/24,10.0.0.0/8,192.168.33.0/24
networks =
2021-04-28 15:24:33 +02:00
[SubmitPaste]
# 1 Mb Max text paste size for text submission
TEXT_MAX_SIZE = 1000000
# 1 Gb Max file size for file submission
FILE_MAX_SIZE = 1000000000
# Managed file extenions for file submission, comma separated
# TODO add zip, gz and tar.gz
FILE_ALLOWED_EXTENSIONS = txt,sh,pdf