[Directories] bloomfilters = Blooms dicofilters = Dicos pastes = PASTES hash = HASHS crawled = crawled har = CRAWLED_SCREENSHOT screenshot = CRAWLED_SCREENSHOT/screenshot wordtrending_csv = var/www/static/csv/wordstrendingdata wordsfile = files/wordfile protocolstrending_csv = var/www/static/csv/protocolstrendingdata protocolsfile = files/protocolsfile tldstrending_csv = var/www/static/csv/tldstrendingdata tldsfile = faup/src/data/mozilla.tlds domainstrending_csv = var/www/static/csv/domainstrendingdata pystemonpath = /home/pystemon/pystemon/ sentiment_lexicon_file = sentiment/vader_lexicon.zip/vader_lexicon/vader_lexicon.txt ##### Logs ###### [Logs] # activate syslog ail_logs_syslog = False ail_logs_syslog_server = # default=514 ail_logs_syslog_port = # ['auth', 'authpriv', 'cron', 'daemon', 'ftp', 'kern', 'lpr', 'mail', 'news', 'syslog', 'user', 'uucp', 'local0', 'local1', 'local2', 'local3', 'local4', 'local5', 'local6', 'local7'] ail_logs_syslog_facility = # ['DEBUG', 'INFO', 'NOTICE', 'WARNING', 'ERROR', 'CRITICAL'] ail_logs_syslog_level = ##### Notifications ###### [Notifications] ail_domain = https://localhost:7000 sender = sender@example.com sender_host = smtp.example.com sender_port = 1337 sender_pw = None # Only needed when the credentials for email server needs a username instead of an email address #sender_user = sender sender_user = # optional for using with authenticated SMTP over SSL # sender_pw = securepassword ##### Flask ##### [Flask] #Proxying requests to the app baseUrl = / #Host to bind to host = 127.0.0.1 #Flask server port port = 7000 #Number of logs to display in the dashboard max_dashboard_logs = 15 #Maximum number of character to display in the toolip max_preview_char = 250 #Maximum number of character to display in the modal max_preview_modal = 800 #Default number of header to display in trending graphs default_display = 10 #Number of minutes displayed for the number of processed pastes. minute_processed_paste = 10 #Maximum line length authorized to make a diff between duplicates DiffMaxLineLength = 10000 [AIL_2_AIL] server_host = 0.0.0.0 server_port = 4443 local_addr = #### Modules #### [BankAccount] max_execution_time = 60 [Categ] #Minimum number of match between the paste and the category file matchingThreshold=1 [Credential] #Minimum length that a credential must have to be considered as such minimumLengthThreshold=3 #Will be pushed as alert if the number of credentials is greater to that number criticalNumberToAlert=8 #Will be considered as false positive if less that X matches from the top password list minTopPassList=5 [Onion] save_i2p = False max_execution_time = 180 [PgpDump] max_execution_time = 60 [Base64] path = Base64/ max_execution_time = 60 [Binary] path = Base64/ max_execution_time = 60 [Hex] path = Base64/ max_execution_time = 60 [Modules_Duplicates] #Number of month to look back maximum_month_range = 3 #The value where two pastes are considerate duplicate for ssdeep. threshold_duplicate_ssdeep = 50 #The value where two pastes are considerate duplicate for tlsh. threshold_duplicate_tlsh = 52 #Minimum size of the paste considered min_paste_size = 0.3 [Module_ModuleInformation] #Threshold to deduce if a module is stuck or not, in seconds. threshold_stucked_module=600 [Module_Mixer] #Define the configuration of the mixer, possible value: 1, 2 or 3 operation_mode = 3 #Define the time that a paste will be considerate duplicate. in seconds (1day = 86400) ttl_duplicate = 86400 default_unnamed_feed_name = unnamed_feeder [Tracker_Term] max_execution_time = 120 [Tracker_Regex] max_execution_time = 60 ##### Redis ##### [Redis_Cache] host = localhost port = 6379 db = 0 [Redis_Log] host = localhost port = 6380 db = 0 [Redis_Log_submit] host = localhost port = 6380 db = 1 [Redis_Queues] host = localhost port = 6381 db = 0 [Redis_Mixer_Cache] host = localhost port = 6381 db = 1 ##### ARDB ##### [ARDB_DB] host = localhost port = 6382 db = 0 [DB_Tracking] host = localhost port = 6382 db = 2 [ARDB_Sentiment] host = localhost port = 6382 db = 4 [ARDB_TermCred] host = localhost port = 6382 db = 5 [ARDB_Trending] host = localhost port = 6382 db = 3 [ARDB_Tracker] host = localhost port = 6382 db = 3 [ARDB_Hashs] host = localhost db = 1 [ARDB_Tags] host = localhost port = 6382 db = 6 [ARDB_Metadata] host = localhost port = 6382 db = 7 [ARDB_Statistics] host = localhost port = 6382 db = 8 [ARDB_Onion] host = localhost port = 6382 db = 9 [ARDB_Objects] host = localhost port = 6382 db = 10 [Kvrocks_DB] host = localhost port = 6383 db = 0 [Url] cc_critical = DE [DomClassifier] cc = DE cc_tld = r'\.de$' dns = 8.8.8.8 [Mail] dns = 8.8.8.8 [Web] dns = 8.8.8.8 # Indexer configuration [Indexer] type = whoosh path = indexdir register = indexdir/all_index.txt #size in Mb index_max_size = 2000 [ailleakObject] maxDuplicateToPushToMISP=10 ############################################################################### # For multiple feed, add them with "," without space # e.g.: tcp://127.0.0.1:5556,tcp://127.0.0.1:5557 [ZMQ_Global] #address = tcp://crf.circl.lu:5556 # address = tcp://127.0.0.1:5556,tcp://crf.circl.lu:5556 address = tcp://127.0.0.1:5556 channel = 102 bind = tcp://127.0.0.1:5556 [ZMQ_FetchedOnion] address = tcp://127.0.0.1:5005 channel = FetchedOnion [RedisPubSub] host = localhost port = 6381 db = 0 [Crawler] activate_crawler = False default_depth_limit = 1 default_har = True default_screenshot = True onion_proxy = onion.foundation [IP] # list of comma-separated CIDR that you wish to be alerted for. e.g: #networks = 192.168.34.0/24,10.0.0.0/8,192.168.33.0/24 networks = [SubmitPaste] # 1 Mb Max text paste size for text submission TEXT_MAX_SIZE = 1000000 # 1 Gb Max file size for file submission FILE_MAX_SIZE = 1000000000 # Managed file extenions for file submission, comma separated # TODO add zip, gz and tar.gz FILE_ALLOWED_EXTENSIONS = txt,sh,pdf