[Directories] bloomfilters = Blooms dicofilters = Dicos pastes = PASTES hash = HASHS crawled = crawled crawled_screenshot = CRAWLED_SCREENSHOT wordtrending_csv = var/www/static/csv/wordstrendingdata wordsfile = files/wordfile protocolstrending_csv = var/www/static/csv/protocolstrendingdata protocolsfile = files/protocolsfile tldstrending_csv = var/www/static/csv/tldstrendingdata tldsfile = faup/src/data/mozilla.tlds domainstrending_csv = var/www/static/csv/domainstrendingdata pystemonpath = /home/pystemon/pystemon/ sentiment_lexicon_file = sentiment/vader_lexicon.zip/vader_lexicon/vader_lexicon.txt ##### Notifications ###### [Notifications] ail_domain = https://localhost:7000 sender = sender@example.com sender_host = smtp.example.com sender_port = 1337 sender_pw = None # optional for using with authenticated SMTP over SSL # sender_pw = securepassword ##### Flask ##### [Flask] #Proxying requests to the app baseUrl = / #Number of logs to display in the dashboard max_dashboard_logs = 15 #Maximum number of character to display in the toolip max_preview_char = 250 #Maximum number of character to display in the modal max_preview_modal = 800 #Default number of header to display in trending graphs default_display = 10 #Number of minutes displayed for the number of processed pastes. minute_processed_paste = 10 #Maximum line length authorized to make a diff between duplicates DiffMaxLineLength = 10000 #### Modules #### [BankAccount] max_execution_time = 60 [Categ] #Minimum number of match between the paste and the category file matchingThreshold=1 [Credential] #Minimum length that a credential must have to be considered as such minimumLengthThreshold=3 #Will be pushed as alert if the number of credentials is greater to that number criticalNumberToAlert=8 #Will be considered as false positive if less that X matches from the top password list minTopPassList=5 [Curve] max_execution_time = 90 [Onion] max_execution_time = 180 [PgpDump] max_execution_time = 60 [Base64] path = Base64/ max_execution_time = 60 [Binary] path = Base64/ max_execution_time = 60 [Hex] path = Base64/ max_execution_time = 60 [Modules_Duplicates] #Number of month to look back maximum_month_range = 3 #The value where two pastes are considerate duplicate for ssdeep. threshold_duplicate_ssdeep = 50 #The value where two pastes are considerate duplicate for tlsh. threshold_duplicate_tlsh = 52 #Minimum size of the paste considered min_paste_size = 0.3 [Module_ModuleInformation] #Threshold to deduce if a module is stuck or not, in seconds. threshold_stucked_module=600 [Module_Mixer] #Define the configuration of the mixer, possible value: 1, 2 or 3 operation_mode = 3 #Define the time that a paste will be considerate duplicate. in seconds (1day = 86400) ttl_duplicate = 86400 default_unnamed_feed_name = unnamed_feeder [TermTrackerMod] max_execution_time = 120 [RegexTracker] max_execution_time = 60 ##### Redis ##### [Redis_Cache] host = localhost port = 6379 db = 0 [Redis_Log] host = localhost port = 6380 db = 0 [Redis_Log_submit] host = localhost port = 6380 db = 1 [Redis_Queues] host = localhost port = 6381 db = 0 [Redis_Data_Merging] host = localhost port = 6379 db = 1 [Redis_Paste_Name] host = localhost port = 6379 db = 2 [Redis_Mixer_Cache] host = localhost port = 6381 db = 1 ##### ARDB ##### [ARDB_Curve] host = localhost port = 6382 db = 1 [ARDB_Sentiment] host = localhost port = 6382 db = 4 [ARDB_TermFreq] host = localhost port = 6382 db = 2 [ARDB_TermCred] host = localhost port = 6382 db = 5 [ARDB_DB] host = localhost port = 6382 db = 0 [ARDB_Trending] host = localhost port = 6382 db = 3 [ARDB_Tracker] host = localhost port = 6382 db = 3 [ARDB_Hashs] host = localhost db = 1 [ARDB_Tags] host = localhost port = 6382 db = 6 [ARDB_Metadata] host = localhost port = 6382 db = 7 [ARDB_Statistics] host = localhost port = 6382 db = 8 [ARDB_Onion] host = localhost port = 6382 db = 9 [Url] cc_critical = DE [DomClassifier] cc = DE cc_tld = r'\.de$' dns = 8.8.8.8 [Mail] dns = 8.8.8.8 [Web] dns = 149.13.33.69 # Indexer configuration [Indexer] type = whoosh path = indexdir register = indexdir/all_index.txt #size in Mb index_max_size = 2000 [ailleakObject] maxDuplicateToPushToMISP=10 ############################################################################### # For multiple feed, add them with "," without space # e.g.: tcp://127.0.0.1:5556,tcp://127.0.0.1:5557 [ZMQ_Global] #address = tcp://crf.circl.lu:5556 address = tcp://127.0.0.1:5556,tcp://crf.circl.lu:5556 channel = 102 bind = tcp://127.0.0.1:5556 [ZMQ_Url] address = tcp://127.0.0.1:5004 channel = urls [ZMQ_FetchedOnion] address = tcp://127.0.0.1:5005 channel = FetchedOnion [RedisPubSub] host = localhost port = 6381 db = 0 [Crawler] activate_crawler = False crawler_depth_limit = 1 default_crawler_har = True default_crawler_png = True default_crawler_closespider_pagecount = 50 default_crawler_user_agent = Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0 splash_url = http://127.0.0.1 splash_port = 8050-8052 [IP] # list of comma-separated CIDR that you wish to be alerted for. e.g: #networks = 192.168.34.0/24,10.0.0.0/8,192.168.33.0/24 networks =