AIL-framework/bin/packages/config.cfg.sample

152 lines
2.9 KiB
Plaintext
Raw Normal View History

[Directories]
bloomfilters = Blooms
2016-08-09 14:23:36 +02:00
dicofilters = Dicos
2014-08-19 19:07:07 +02:00
pastes = PASTES
2016-07-15 09:08:38 +02:00
wordtrending_csv = var/www/static/csv/wordstrendingdata
wordsfile = files/wordfile
2016-07-15 09:08:38 +02:00
protocolstrending_csv = var/www/static/csv/protocolstrendingdata
protocolsfile = files/protocolsfile
tldstrending_csv = var/www/static/csv/tldstrendingdata
2016-07-22 09:32:13 +02:00
tldsfile = faup/src/data/mozilla.tlds
2016-07-15 09:08:38 +02:00
domainstrending_csv = var/www/static/csv/domainstrendingdata
pystemonpath = /home/pystemon/pystemon/
sentiment_lexicon_file = sentiment/vader_lexicon.zip/vader_lexicon/vader_lexicon.txt
2016-07-15 09:10:44 +02:00
##### Flask #####
[Flask]
2016-07-15 09:08:38 +02:00
#Maximum number of character to display in the toolip
2016-08-09 14:23:36 +02:00
max_preview_char = 250
2016-07-15 09:08:38 +02:00
#Maximum number of character to display in the modal
2016-08-09 14:23:36 +02:00
max_preview_modal = 800
2016-07-15 09:08:38 +02:00
#Default number of header to display in trending graphs
default_display = 10
2016-08-09 14:23:36 +02:00
#Number of minutes displayed for the number of processed pastes.
minute_processed_paste = 10
2016-07-15 09:08:38 +02:00
#### Modules ####
[Modules_Duplicates]
#Number of month to look back
maximum_month_range = 3
2016-08-09 14:23:36 +02:00
#The value where two pastes are considerate duplicate for ssdeep.
threshold_duplicate_ssdeep = 50
#The value where two pastes are considerate duplicate for tlsh.
threshold_duplicate_tlsh = 100
2016-07-18 15:52:53 +02:00
#Minimum size of the paste considered
min_paste_size = 0.3
2016-12-22 10:06:35 +01:00
[Module_ModuleInformation]
#Threshold to deduce if a module is stuck or not, in seconds.
threshold_stucked_module=600
[Module_Mixer]
#Define the configuration of the mixer, possible value: 1 or 2
operation_mode = 1
#Define the time that a paste will be considerate duplicate. in seconds (1day = 86400)
ttl_duplicate = 86400
##### Redis #####
[Redis_Cache]
host = localhost
port = 6379
db = 0
2014-12-22 16:50:25 +01:00
[Redis_Log]
host = localhost
port = 6380
db = 0
[Redis_Queues]
host = localhost
port = 6381
db = 0
[Redis_Data_Merging]
host = localhost
port = 6379
db = 1
2016-08-23 09:59:39 +02:00
[Redis_Paste_Name]
host = localhost
port = 6379
db = 2
[Redis_Mixer_Cache]
host = localhost
port = 6381
db = 1
##### LevelDB #####
2016-08-09 14:23:36 +02:00
[Redis_Level_DB_Curve]
host = localhost
2016-08-23 09:59:39 +02:00
port = 6382
db = 1
[Redis_Level_DB_Sentiment]
host = localhost
port = 6382
db = 4
[Redis_Level_DB_TermFreq]
host = localhost
port = 6382
db = 2
2016-08-09 14:23:36 +02:00
[Redis_Level_DB]
host = localhost
2016-07-21 09:19:13 +02:00
port = 2016
db = 0
[Redis_Level_DB_Trending]
2016-07-15 09:08:38 +02:00
host = localhost
2016-08-23 09:59:39 +02:00
port = 6382
db = 3
2016-07-15 09:08:38 +02:00
[Redis_Level_DB_Hashs]
host = localhost
2016-07-21 09:19:13 +02:00
port = 2016
db = 1
[Url]
cc_critical = DE
[DomClassifier]
cc = DE
cc_tld = r'\.de$'
# Indexer configuration
[Indexer]
type = whoosh
path = indexdir
register = indexdir/all_index.txt
2017-03-15 12:14:41 +01:00
#size in Mb
index_max_size = 2000
###############################################################################
# For multiple feed, add them with "," without space
# e.g.: tcp://127.0.0.1:5556,tcp://127.0.0.1:5557
[ZMQ_Global]
2016-02-04 15:32:50 +01:00
#address = tcp://crf.circl.lu:5556
address = tcp://127.0.0.1:5556
channel = 102
bind = tcp://127.0.0.1:5556
[ZMQ_Url]
address = tcp://127.0.0.1:5004
channel = urls
[ZMQ_FetchedOnion]
address = tcp://127.0.0.1:5005
channel = FetchedOnion
[RedisPubSub]
host = localhost
port = 6381
db = 0