chg: [DB] remove ARDB + temp disable MISP import and auto export

pull/594/head
Terrtia 2023-04-04 10:25:01 +02:00
parent 54a0bcb022
commit 80efc9cdbb
No known key found for this signature in database
GPG Key ID: 1E1B1F50D84613D0
11 changed files with 328 additions and 330 deletions

View File

@ -215,6 +215,8 @@ function launching_scripts {
# sleep 0.1 # sleep 0.1
echo -e $GREEN"\t* Launching scripts"$DEFAULT echo -e $GREEN"\t* Launching scripts"$DEFAULT
screen -S "Script_AIL" -X screen -t "Mixer" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Mixer.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "Global" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Global.py; read x" screen -S "Script_AIL" -X screen -t "Global" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Global.py; read x"
sleep 0.1 sleep 0.1
screen -S "Script_AIL" -X screen -t "Categ" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Categ.py; read x" screen -S "Script_AIL" -X screen -t "Categ" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Categ.py; read x"
@ -310,10 +312,8 @@ function launching_scripts {
################################## ##################################
screen -S "Script_AIL" -X screen -t "ModuleInformation" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./ModulesInformationV2.py -k 0 -c 1; read x" screen -S "Script_AIL" -X screen -t "ModuleInformation" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./ModulesInformationV2.py -k 0 -c 1; read x"
sleep 0.1 sleep 0.1
screen -S "Script_AIL" -X screen -t "Mixer" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Mixer.py; read x" # screen -S "Script_AIL" -X screen -t "MISPtheHIVEfeeder" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./MISP_The_Hive_feeder.py; read x"
sleep 0.1 # sleep 0.1
screen -S "Script_AIL" -X screen -t "MISPtheHIVEfeeder" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./MISP_The_Hive_feeder.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "IPAddress" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./IPAddress.py; read x" screen -S "Script_AIL" -X screen -t "IPAddress" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./IPAddress.py; read x"
@ -610,7 +610,6 @@ function launch_all {
checking_configuration; checking_configuration;
update; update;
launch_redis; launch_redis;
launch_ardb;
launch_kvrocks; launch_kvrocks;
launch_logs; launch_logs;
launch_queues; launch_queues;

View File

@ -1,24 +1,24 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*-coding:UTF-8 -* # -*-coding:UTF-8 -*
import os # import os
import sys # import sys
import uuid # import uuid
#
sys.path.append(os.environ['AIL_BIN']) # sys.path.append(os.environ['AIL_BIN'])
################################## # ##################################
# Import Project packages # # Import Project packages
################################## # ##################################
from lib.ConfigLoader import ConfigLoader # from lib.ConfigLoader import ConfigLoader
## LOAD CONFIG ## ## LOAD CONFIG ##
config_loader = ConfigLoader() # config_loader = ConfigLoader()
#
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata") ###################################### # r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata") ######################################
config_loader = None # config_loader = None
## -- ## ## -- ##
sys.path.append('../../configs/keys') # sys.path.append('../../configs/keys')
################################## ##################################
# Import Keys # Import Keys
################################## ##################################

View File

@ -1,248 +1,248 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*-coding:UTF-8 -* # -*-coding:UTF-8 -*
import os # import os
import sys # import sys
import uuid # import uuid
#
from hashlib import sha1, sha256 # from hashlib import sha1, sha256
#
sys.path.append(os.environ['AIL_BIN']) # sys.path.append(os.environ['AIL_BIN'])
from lib.objects import ail_objects # from lib.objects import ail_objects
#
#
from lib.objects import Items # from lib.objects import Items
#
# MISP # # MISP
from pymisp import MISPEvent, MISPObject, PyMISP # from pymisp import MISPEvent, MISPObject, PyMISP
#
# # TODO: deplace me in another fil # # # TODO: deplace me in another fil
def get_global_id(obj_type, obj_id, obj_subtype=None): # def get_global_id(obj_type, obj_id, obj_subtype=None):
if obj_subtype: # if obj_subtype:
return '{}:{}:{}'.format(obj_type, obj_subtype, obj_id) # return '{}:{}:{}'.format(obj_type, obj_subtype, obj_id)
else: # else:
return '{}:{}'.format(obj_type, obj_id) # return '{}:{}'.format(obj_type, obj_id)
#
# sub type # # sub type
# obj type # # obj type
# obj value # # obj value
def get_global_id_from_id(global_id): # def get_global_id_from_id(global_id):
obj_meta = {} # obj_meta = {}
global_id = global_id.split(':', 3) # global_id = global_id.split(':', 3)
if len(global_id) > 2: # if len(global_id) > 2:
obj_meta['type'] = global_id[0] # obj_meta['type'] = global_id[0]
obj_meta['subtype'] = global_id[1] # obj_meta['subtype'] = global_id[1]
obj_meta['id'] = global_id[2] # obj_meta['id'] = global_id[2]
else: # else:
obj_meta['type'] = global_id[0] # obj_meta['type'] = global_id[0]
obj_meta['subtype'] = '' # obj_meta['subtype'] = ''
obj_meta['id'] = global_id[1] # obj_meta['id'] = global_id[1]
return obj_meta # return obj_meta
#
def get_import_dir(): # def get_import_dir():
return os.path.join(os.environ['AIL_HOME'], 'temp/import') # return os.path.join(os.environ['AIL_HOME'], 'temp/import')
#
def sanitize_import_file_path(filename): # def sanitize_import_file_path(filename):
IMPORT_FOLDER = get_import_dir() # IMPORT_FOLDER = get_import_dir()
filename = os.path.join(IMPORT_FOLDER, filename) # filename = os.path.join(IMPORT_FOLDER, filename)
filename = os.path.realpath(filename) # filename = os.path.realpath(filename)
# path traversal # # path traversal
if not os.path.commonprefix([filename, IMPORT_FOLDER]) == IMPORT_FOLDER: # if not os.path.commonprefix([filename, IMPORT_FOLDER]) == IMPORT_FOLDER:
return os.path.join(IMPORT_FOLDER, str(uuid.uuid4()) + '.json') # return os.path.join(IMPORT_FOLDER, str(uuid.uuid4()) + '.json')
# check if file already exist # # check if file already exist
if os.path.isfile(filename): # if os.path.isfile(filename):
return os.path.join(IMPORT_FOLDER, str(uuid.uuid4()) + '.json') # return os.path.join(IMPORT_FOLDER, str(uuid.uuid4()) + '.json')
return filename # return filename
#
def get_misp_obj_tag(misp_obj): # def get_misp_obj_tag(misp_obj):
if misp_obj.attributes: # if misp_obj.attributes:
misp_tags = misp_obj.attributes[0].tags # misp_tags = misp_obj.attributes[0].tags
tags = [] # tags = []
for misp_tag in misp_tags: # for misp_tag in misp_tags:
tags.append(misp_tag.name) # tags.append(misp_tag.name)
return tags # return tags
else: # else:
return [] # return []
#
def get_object_metadata(misp_obj): # def get_object_metadata(misp_obj):
obj_meta = {} # obj_meta = {}
if 'first_seen' in misp_obj.keys(): # if 'first_seen' in misp_obj.keys():
obj_meta['first_seen'] = misp_obj.first_seen # obj_meta['first_seen'] = misp_obj.first_seen
if 'last_seen' in misp_obj.keys(): # if 'last_seen' in misp_obj.keys():
obj_meta['last_seen'] = misp_obj.last_seen # obj_meta['last_seen'] = misp_obj.last_seen
obj_meta['tags'] = get_misp_obj_tag(misp_obj) # obj_meta['tags'] = get_misp_obj_tag(misp_obj)
return obj_meta # return obj_meta
#
def unpack_item_obj(map_uuid_global_id, misp_obj): # def unpack_item_obj(map_uuid_global_id, misp_obj):
obj_meta = get_object_metadata(misp_obj) # obj_meta = get_object_metadata(misp_obj)
obj_id = None # obj_id = None
io_content = None # io_content = None
#
for attribute in misp_obj.attributes: # for attribute in misp_obj.attributes:
if attribute.object_relation == 'raw-data': # if attribute.object_relation == 'raw-data':
obj_id = attribute.value # # TODO: sanitize # obj_id = attribute.value # # TODO: sanitize
io_content = attribute.data # # TODO: check if type == io # io_content = attribute.data # # TODO: check if type == io
#
if obj_id and io_content: # if obj_id and io_content:
res = Items.create_item(obj_id, obj_meta, io_content) # res = Items.create_item(obj_id, obj_meta, io_content)
#
map_uuid_global_id[misp_obj.uuid] = get_global_id('item', obj_id) # map_uuid_global_id[misp_obj.uuid] = get_global_id('item', obj_id)
#
#
#
## TODO: handle multiple pgp in the same object # ## TODO: handle multiple pgp in the same object
def unpack_obj_pgp(map_uuid_global_id, misp_obj): # def unpack_obj_pgp(map_uuid_global_id, misp_obj):
# TODO ail_objects import_misp_object(misp_obj) # # TODO ail_objects import_misp_object(misp_obj)
pass # pass
# # get obj sub type # # # get obj sub type
# obj_attr = misp_obj.attributes[0] # # obj_attr = misp_obj.attributes[0]
# obj_id = obj_attr.value # # obj_id = obj_attr.value
# if obj_attr.object_relation == 'key-id': # # if obj_attr.object_relation == 'key-id':
# obj_subtype = 'key' # # obj_subtype = 'key'
# elif obj_attr.object_relation == 'user-id-name': # # elif obj_attr.object_relation == 'user-id-name':
# obj_subtype = 'name' # # obj_subtype = 'name'
# elif obj_attr.object_relation == 'user-id-email': # # elif obj_attr.object_relation == 'user-id-email':
# obj_subtype = 'mail' # # obj_subtype = 'mail'
# # #
# if obj_id and obj_subtype: # # if obj_id and obj_subtype:
# obj_meta = get_object_metadata(misp_obj) # # obj_meta = get_object_metadata(misp_obj)
# # res = Pgp.pgp.create_correlation(obj_subtype, obj_id, obj_meta) # # # res = Pgp.pgp.create_correlation(obj_subtype, obj_id, obj_meta)
# # TODO ail_objects import_misp_object(misp_obj) # # # TODO ail_objects import_misp_object(misp_obj)
# # #
# map_uuid_global_id[misp_obj.uuid] = get_global_id('pgp', obj_id, obj_subtype=obj_subtype) # # map_uuid_global_id[misp_obj.uuid] = get_global_id('pgp', obj_id, obj_subtype=obj_subtype)
#
#
def unpack_obj_cryptocurrency(map_uuid_global_id, misp_obj): # def unpack_obj_cryptocurrency(map_uuid_global_id, misp_obj):
# TODO ail_objects import_misp_object(misp_obj) # # TODO ail_objects import_misp_object(misp_obj)
pass # pass
# # #
# obj_id = None # # obj_id = None
# obj_subtype = None # # obj_subtype = None
# for attribute in misp_obj.attributes: # # for attribute in misp_obj.attributes:
# if attribute.object_relation == 'address': # # TODO: handle xmr address field # # if attribute.object_relation == 'address': # # TODO: handle xmr address field
# obj_id = attribute.value # # obj_id = attribute.value
# elif attribute.object_relation == 'symbol': # # elif attribute.object_relation == 'symbol':
# obj_subtype = Cryptocurrency.get_cryptocurrency_type(attribute.value) # # obj_subtype = Cryptocurrency.get_cryptocurrency_type(attribute.value)
# # #
# # valid cryptocurrency type # # # valid cryptocurrency type
# if obj_subtype and obj_id: # # if obj_subtype and obj_id:
# obj_meta = get_object_metadata(misp_obj) # # obj_meta = get_object_metadata(misp_obj)
# # res = Cryptocurrency.cryptocurrency.create_correlation(obj_subtype, obj_id, obj_meta) # # # res = Cryptocurrency.cryptocurrency.create_correlation(obj_subtype, obj_id, obj_meta)
# # #
# map_uuid_global_id[misp_obj.uuid] = get_global_id('cryptocurrency', obj_id, obj_subtype=obj_subtype) # # map_uuid_global_id[misp_obj.uuid] = get_global_id('cryptocurrency', obj_id, obj_subtype=obj_subtype)
#
def get_obj_type_from_relationship(misp_obj): # def get_obj_type_from_relationship(misp_obj):
obj_uuid = misp_obj.uuid # obj_uuid = misp_obj.uuid
obj_type = None # obj_type = None
#
for relation in misp_obj.ObjectReference: # for relation in misp_obj.ObjectReference:
if relation.object_uuid == obj_uuid: # if relation.object_uuid == obj_uuid:
if relation.relationship_type == "screenshot-of": # if relation.relationship_type == "screenshot-of":
return 'screenshot' # return 'screenshot'
if relation.relationship_type == "included-in": # if relation.relationship_type == "included-in":
obj_type = 'decoded' # obj_type = 'decoded'
return obj_type # return obj_type
#
#
# # TODO: covert md5 and sha1 to expected # # # TODO: covert md5 and sha1 to expected
def unpack_file(map_uuid_global_id, misp_obj): # def unpack_file(map_uuid_global_id, misp_obj):
#
obj_type = get_obj_type_from_relationship(misp_obj) # obj_type = get_obj_type_from_relationship(misp_obj)
if obj_type: # if obj_type:
obj_id = None # obj_id = None
io_content = None # io_content = None
for attribute in misp_obj.attributes: # for attribute in misp_obj.attributes:
# get file content # # get file content
if attribute.object_relation == 'attachment': # if attribute.object_relation == 'attachment':
io_content = attribute.data # io_content = attribute.data
elif attribute.object_relation == 'malware-sample': # elif attribute.object_relation == 'malware-sample':
io_content = attribute.data # io_content = attribute.data
#
# # TODO: use/verify specified mimetype # # # TODO: use/verify specified mimetype
elif attribute.object_relation == 'mimetype': # elif attribute.object_relation == 'mimetype':
#print(attribute.value) # #print(attribute.value)
pass # pass
#
# # TODO: support more # # # TODO: support more
elif attribute.object_relation == 'sha1' and obj_type == 'decoded': # elif attribute.object_relation == 'sha1' and obj_type == 'decoded':
obj_id = attribute.value # obj_id = attribute.value
elif attribute.object_relation == 'sha256' and obj_type == 'screenshot': # elif attribute.object_relation == 'sha256' and obj_type == 'screenshot':
obj_id = attribute.value # obj_id = attribute.value
#
# get SHA1/sha256 # # get SHA1/sha256
if io_content and not obj_id: # if io_content and not obj_id:
if obj_type=='screenshot': # if obj_type=='screenshot':
obj_id = sha256(io_content.getvalue()).hexdigest() # obj_id = sha256(io_content.getvalue()).hexdigest()
else: # decoded file # else: # decoded file
obj_id = sha1(io_content.getvalue()).hexdigest() # obj_id = sha1(io_content.getvalue()).hexdigest()
#
if obj_id and io_content: # if obj_id and io_content:
obj_meta = get_object_metadata(misp_obj) # obj_meta = get_object_metadata(misp_obj)
if obj_type == 'screenshot': # if obj_type == 'screenshot':
# TODO MIGRATE + REFACTOR ME # # TODO MIGRATE + REFACTOR ME
# Screenshot.create_screenshot(obj_id, obj_meta, io_content) # # Screenshot.create_screenshot(obj_id, obj_meta, io_content)
map_uuid_global_id[misp_obj.uuid] = get_global_id('image', obj_id) # map_uuid_global_id[misp_obj.uuid] = get_global_id('image', obj_id)
else: #decoded # else: #decoded
# TODO MIGRATE + REFACTOR ME # # TODO MIGRATE + REFACTOR ME
# Decoded.create_decoded(obj_id, obj_meta, io_content) # # Decoded.create_decoded(obj_id, obj_meta, io_content)
map_uuid_global_id[misp_obj.uuid] = get_global_id('decoded', obj_id) # map_uuid_global_id[misp_obj.uuid] = get_global_id('decoded', obj_id)
#
#
def get_misp_import_fct(map_uuid_global_id, misp_obj): # def get_misp_import_fct(map_uuid_global_id, misp_obj):
if misp_obj.name == 'ail-leak': # if misp_obj.name == 'ail-leak':
unpack_item_obj(map_uuid_global_id, misp_obj) # unpack_item_obj(map_uuid_global_id, misp_obj)
elif misp_obj.name == 'domain-crawled': # elif misp_obj.name == 'domain-crawled':
pass # pass
elif misp_obj.name == 'pgp-meta': # elif misp_obj.name == 'pgp-meta':
unpack_obj_pgp(map_uuid_global_id, misp_obj) # unpack_obj_pgp(map_uuid_global_id, misp_obj)
elif misp_obj.name == 'coin-address': # elif misp_obj.name == 'coin-address':
unpack_obj_cryptocurrency(map_uuid_global_id, misp_obj) # unpack_obj_cryptocurrency(map_uuid_global_id, misp_obj)
elif misp_obj.name == 'file': # elif misp_obj.name == 'file':
unpack_file(map_uuid_global_id, misp_obj) # unpack_file(map_uuid_global_id, misp_obj)
#
# import relationship between objects # # import relationship between objects
def create_obj_relationships(map_uuid_global_id, misp_obj): # def create_obj_relationships(map_uuid_global_id, misp_obj):
if misp_obj.uuid in map_uuid_global_id: # if misp_obj.uuid in map_uuid_global_id:
for relationship in misp_obj.ObjectReference: # for relationship in misp_obj.ObjectReference:
if relationship.referenced_uuid in map_uuid_global_id: # if relationship.referenced_uuid in map_uuid_global_id:
obj_meta_src = get_global_id_from_id(map_uuid_global_id[relationship.object_uuid]) # obj_meta_src = get_global_id_from_id(map_uuid_global_id[relationship.object_uuid])
obj_meta_target = get_global_id_from_id(map_uuid_global_id[relationship.referenced_uuid]) # obj_meta_target = get_global_id_from_id(map_uuid_global_id[relationship.referenced_uuid])
#
if obj_meta_src == 'decoded' or obj_meta_src == 'item': # if obj_meta_src == 'decoded' or obj_meta_src == 'item':
print('000000') # print('000000')
print(obj_meta_src) # print(obj_meta_src)
print(obj_meta_target) # print(obj_meta_target)
print('111111') # print('111111')
#
# TODO CREATE OBJ RELATIONSHIP # # TODO CREATE OBJ RELATIONSHIP
#
def import_objs_from_file(filepath): # def import_objs_from_file(filepath):
map_uuid_global_id = {} # map_uuid_global_id = {}
#
event_to_import = MISPEvent() # event_to_import = MISPEvent()
try: # try:
event_to_import.load_file(filepath) # event_to_import.load_file(filepath)
except: # except:
return map_uuid_global_id # return map_uuid_global_id
#
for misp_obj in event_to_import.objects: # for misp_obj in event_to_import.objects:
get_misp_import_fct(map_uuid_global_id, misp_obj) # get_misp_import_fct(map_uuid_global_id, misp_obj)
#
for misp_obj in event_to_import.objects: # for misp_obj in event_to_import.objects:
create_obj_relationships(map_uuid_global_id, misp_obj) # create_obj_relationships(map_uuid_global_id, misp_obj)
#
return map_uuid_global_id # return map_uuid_global_id
#
#
if __name__ == '__main__': # if __name__ == '__main__':
#
# misp = PyMISP('https://127.0.0.1:8443/', 'uXgcN42b7xuL88XqK5hubwD8Q8596VrrBvkHQzB0', False) # # misp = PyMISP('https://127.0.0.1:8443/', 'uXgcN42b7xuL88XqK5hubwD8Q8596VrrBvkHQzB0', False)
#
import_objs_from_file('ail_export_c777a4d1-5f63-4fa2-86c0-07da677bdac2.json') # import_objs_from_file('ail_export_c777a4d1-5f63-4fa2-86c0-07da677bdac2.json')
#
#Screenshot.delete_screenshot('a92d459f70c4dea8a14688f585a5e2364be8b91fbf924290ead361d9b909dcf1') # #Screenshot.delete_screenshot('a92d459f70c4dea8a14688f585a5e2364be8b91fbf924290ead361d9b909dcf1')
#Decoded.delete_decoded('d59a110ab233fe87cefaa0cf5603b047b432ee07') # #Decoded.delete_decoded('d59a110ab233fe87cefaa0cf5603b047b432ee07')
#Pgp.pgp.delete_correlation('key', '0xA4BB02A75E6AF448') # #Pgp.pgp.delete_correlation('key', '0xA4BB02A75E6AF448')
#
#Item.delete_item('submitted/2020/02/10/b2485894-4325-469b-bc8f-6ad1c2dbb202.gz') # #Item.delete_item('submitted/2020/02/10/b2485894-4325-469b-bc8f-6ad1c2dbb202.gz')
#Item.delete_item('archive/pastebin.com_pro/2020/02/10/K2cerjP4.gz') # #Item.delete_item('archive/pastebin.com_pro/2020/02/10/K2cerjP4.gz')

View File

@ -3,7 +3,7 @@
import os import os
import sys import sys
import redis from uuid import uuid4
sys.path.append(os.environ['AIL_BIN']) sys.path.append(os.environ['AIL_BIN'])
################################## ##################################
@ -20,6 +20,9 @@ AIL_OBJECTS = sorted({'cve', 'cryptocurrency', 'decoded', 'domain', 'item', 'pgp
def get_ail_uuid(): def get_ail_uuid():
return r_serv_db.get('ail:uuid') return r_serv_db.get('ail:uuid')
def generate_uuid():
return str(uuid4())
#### AIL OBJECTS #### #### AIL OBJECTS ####
def get_all_objects(): def get_all_objects():

View File

@ -17,7 +17,6 @@ from lib import Tag
config_loader = ConfigLoader.ConfigLoader() config_loader = ConfigLoader.ConfigLoader()
r_cache = config_loader.get_redis_conn("Redis_Cache") r_cache = config_loader.get_redis_conn("Redis_Cache")
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
r_object = config_loader.get_db_conn("Kvrocks_Objects") r_object = config_loader.get_db_conn("Kvrocks_Objects")
config_loader = None config_loader = None
@ -180,21 +179,21 @@ def get_all_domain_node_by_item_id(item_id, l_nodes=[]):
# FIXME: # FIXME:
#### UNKNOW SECTION #### #### UNKNOW SECTION ####
def get_obj_id_item_id(parent_type, parent_id): # def get_obj_id_item_id(parent_type, parent_id):
all_parents_type = ['twitter_id', 'jabber_id', 'telegram_id'] # all_parents_type = ['twitter_id', 'jabber_id', 'telegram_id']
if parent_type in all_parents_type: # if parent_type in all_parents_type:
return r_serv_metadata.hget('map:{}:item_id'.format(parent_type), parent_id) # return r_serv_metadata.hget('map:{}:item_id'.format(parent_type), parent_id)
else: # else:
return None # return None
# # TODO: # FIXME: TO MIGRATE ?????? # # # TODO: # FIXME: TO MIGRATE ??????
def add_map_obj_id_item_id(obj_id, item_id, obj_type): # def add_map_obj_id_item_id(obj_id, item_id, obj_type):
if obj_type == 'twitter_id': # if obj_type == 'twitter_id':
r_serv_metadata.hset('map:twitter_id:item_id', obj_id, item_id) # r_serv_metadata.hset('map:twitter_id:item_id', obj_id, item_id)
if obj_type == 'jabber_id': # if obj_type == 'jabber_id':
r_serv_metadata.hset('map:jabber_id:item_id', obj_id, item_id) # r_serv_metadata.hset('map:jabber_id:item_id', obj_id, item_id)
if obj_type == 'telegram_id': # if obj_type == 'telegram_id':
r_serv_metadata.hset('map:telegram_id:item_id', obj_id, item_id) # r_serv_metadata.hset('map:telegram_id:item_id', obj_id, item_id)
# delete twitter id # delete twitter id

View File

@ -5,8 +5,6 @@
The Submit paste module The Submit paste module
================ ================
This module is taking paste in redis queue ARDB_DB and submit to global
""" """
################################## ##################################
@ -15,7 +13,6 @@ This module is taking paste in redis queue ARDB_DB and submit to global
import os import os
import sys import sys
import gzip import gzip
import io
import base64 import base64
import datetime import datetime
import time import time
@ -51,11 +48,8 @@ class SubmitPaste(AbstractModule):
super(SubmitPaste, self).__init__(queue_name='submit_paste') super(SubmitPaste, self).__init__(queue_name='submit_paste')
# TODO KVROCKS # TODO KVROCKS
self.r_serv_db = ConfigLoader.ConfigLoader().get_redis_conn("ARDB_DB") self.r_serv_db = ConfigLoader.ConfigLoader().get_redis_conn("Kvrocks_DB")
self.r_serv_log_submit = ConfigLoader.ConfigLoader().get_redis_conn("Redis_Log_submit") self.r_serv_log_submit = ConfigLoader.ConfigLoader().get_redis_conn("Redis_Log_submit")
self.r_serv_tags = ConfigLoader.ConfigLoader().get_redis_conn("ARDB_Tags")
self.r_serv_metadata = ConfigLoader.ConfigLoader().get_redis_conn("ARDB_Metadata")
self.serv_statistics = ConfigLoader.ConfigLoader().get_redis_conn("ARDB_Statistics")
self.pending_seconds = 3 self.pending_seconds = 3
@ -305,7 +299,6 @@ class SubmitPaste(AbstractModule):
self.r_serv_log_submit.sadd(f'{uuid}:paste_submit_link', rel_item_path) self.r_serv_log_submit.sadd(f'{uuid}:paste_submit_link', rel_item_path)
curr_date = datetime.date.today() curr_date = datetime.date.today()
self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'submit_paste', 1)
self.redis_logger.debug("paste submitted") self.redis_logger.debug("paste submitted")
else: else:
self.addError(uuid, f'File: {save_path} already exist in submitted pastes') self.addError(uuid, f'File: {save_path} already exist in submitted pastes')
@ -335,7 +328,6 @@ class SubmitPaste(AbstractModule):
self.addError(uuid, errorMessage) self.addError(uuid, errorMessage)
self.r_serv_log_submit.set(f'{uuid}:end', 1) self.r_serv_log_submit.set(f'{uuid}:end', 1)
curr_date = datetime.date.today() curr_date = datetime.date.today()
self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"), 'submit_abord', 1)
self.remove_submit_uuid(uuid) self.remove_submit_uuid(uuid)
# # TODO: use Item function # # TODO: use Item function

View File

@ -16,7 +16,7 @@ from lib import ConfigLoader
config_loader = ConfigLoader.ConfigLoader() config_loader = ConfigLoader.ConfigLoader()
r_serv_db = config_loader.get_redis_conn("ARDB_DB") r_serv_db = config_loader.get_redis_conn("Kvrocks_DB")
r_serv_log_submit = config_loader.get_redis_conn("Redis_Log_submit") r_serv_log_submit = config_loader.get_redis_conn("Redis_Log_submit")
config_loader = None config_loader = None
@ -62,11 +62,11 @@ def create_import_queue(tags, galaxy, paste_content, UUID, password=None, isfile
def check_import_status(UUID): def check_import_status(UUID):
if not is_valid_uuid_v4(UUID): if not is_valid_uuid_v4(UUID):
return ({'status': 'error', 'reason': 'Invalid uuid'}, 400) return {'status': 'error', 'reason': 'Invalid uuid'}, 400
processing = r_serv_log_submit.get(UUID + ':processing') processing = r_serv_log_submit.get(UUID + ':processing')
if not processing: if not processing:
return ({'status': 'error', 'reason': 'Unknown uuid'}, 404) return {'status': 'error', 'reason': 'Unknown uuid'}, 404
# nb_total = r_serv_log_submit.get(UUID + ':nb_total') # nb_total = r_serv_log_submit.get(UUID + ':nb_total')
# nb_sucess = r_serv_log_submit.get(UUID + ':nb_sucess') # nb_sucess = r_serv_log_submit.get(UUID + ':nb_sucess')
@ -90,4 +90,4 @@ def check_import_status(UUID):
status = 'imported' status = 'imported'
dict_import_status['status'] = status dict_import_status['status'] = status
return (dict_import_status, 200) return dict_import_status, 200

View File

@ -14,7 +14,7 @@ if [ -z "$VIRTUAL_ENV" ]; then
echo export AIL_BIN=$(pwd)/bin/ >> ./AILENV/bin/activate echo export AIL_BIN=$(pwd)/bin/ >> ./AILENV/bin/activate
echo export AIL_FLASK=$(pwd)/var/www/ >> ./AILENV/bin/activate echo export AIL_FLASK=$(pwd)/var/www/ >> ./AILENV/bin/activate
echo export AIL_REDIS=$(pwd)/redis/src/ >> ./AILENV/bin/activate echo export AIL_REDIS=$(pwd)/redis/src/ >> ./AILENV/bin/activate
echo export AIL_ARDB=$(pwd)/ardb/src/ >> ./AILENV/bin/activate echo export AIL_KVROCKS=$(pwd)/kvrocks/src/ >> ./AILENV/bin/activate
fi fi

View File

@ -84,10 +84,10 @@ sudo make install
popd popd
# ARDB # # ARDB #
test ! -d ardb/ && git clone https://github.com/ail-project/ardb.git #test ! -d ardb/ && git clone https://github.com/ail-project/ardb.git
pushd ardb/ #pushd ardb/
make #make
popd #popd
DEFAULT_HOME=$(pwd) DEFAULT_HOME=$(pwd)
@ -148,8 +148,8 @@ echo "AIL current version:"
git describe --abbrev=0 --tags git describe --abbrev=0 --tags
popd popd
# LAUNCH ARDB # LAUNCH Kvrocks
bash ${AIL_BIN}/LAUNCH.sh -lav & bash ${AIL_BIN}/LAUNCH.sh -lkv &
wait wait
echo "" echo ""

View File

@ -25,9 +25,7 @@ from flask_login import login_required
################################## ##################################
# Import Project packages # Import Project packages
################################## ##################################
from export import Export
from lib import Tag from lib import Tag
from lib.objects.Items import Item
from packages import Import_helper from packages import Import_helper
@ -75,7 +73,7 @@ def limit_content_length():
# ============ FUNCTIONS ============ # ============ FUNCTIONS ============
def allowed_file(filename): def allowed_file(filename):
if not '.' in filename: if '.' not in filename:
return True return True
else: else:
file_ext = filename.rsplit('.', 1)[1].lower() file_ext = filename.rsplit('.', 1)[1].lower()
@ -86,7 +84,7 @@ def allowed_file(filename):
def clean_filename(filename, whitelist=valid_filename_chars, replace=' '): def clean_filename(filename, whitelist=valid_filename_chars, replace=' '):
# replace characters # replace characters
for r in replace: for r in replace:
filename = filename.replace(r,'_') filename = filename.replace(r, '_')
# keep only valid ascii chars # keep only valid ascii chars
cleaned_filename = unicodedata.normalize('NFKD', filename).encode('ASCII', 'ignore').decode() cleaned_filename = unicodedata.normalize('NFKD', filename).encode('ASCII', 'ignore').decode()
@ -116,8 +114,6 @@ def PasteSubmit_page():
@login_analyst @login_analyst
@limit_content_length() @limit_content_length()
def submit(): def submit():
#paste_name = request.form['paste_name']
logger.debug('submit') logger.debug('submit')
password = request.form['archive_pass'] password = request.form['archive_pass']
@ -127,10 +123,10 @@ def submit():
paste_source = request.form['paste_source'] paste_source = request.form['paste_source']
if paste_source: if paste_source:
# limit source length # limit source length
paste_source = paste_source.replace('/', '')[:80] paste_source = paste_source.replace('/', '')[:80]
if paste_source in ['crawled', 'tests']: if paste_source in ['crawled', 'tests']:
content = f'Invalid source' content = 'Invalid source'
logger.info(paste_source) logger.info(paste_source)
return content, 400 return content, 400
@ -150,9 +146,9 @@ def submit():
submitted_tag = 'infoleak:submission="manual"' submitted_tag = 'infoleak:submission="manual"'
#active taxonomies # active taxonomies
active_taxonomies = Tag.get_active_taxonomies() active_taxonomies = Tag.get_active_taxonomies()
#active galaxies # active galaxies
active_galaxies = Tag.get_active_galaxies() active_galaxies = Tag.get_active_galaxies()
if ltags or ltagsgalaxies: if ltags or ltagsgalaxies:
@ -179,16 +175,12 @@ def submit():
# get UUID # get UUID
UUID = str(uuid.uuid4()) UUID = str(uuid.uuid4())
'''if paste_name:
# clean file name
UUID = clean_filename(paste_name)'''
# create submitted dir # create submitted dir
if not os.path.exists(UPLOAD_FOLDER): if not os.path.exists(UPLOAD_FOLDER):
logger.debug('create folder') logger.debug('create folder')
os.makedirs(UPLOAD_FOLDER) os.makedirs(UPLOAD_FOLDER)
if not '.' in file_import.filename: if '.' not in file_import.filename:
logger.debug('add UUID to path') logger.debug('add UUID to path')
full_path = os.path.join(UPLOAD_FOLDER, UUID) full_path = os.path.join(UPLOAD_FOLDER, UUID)
else: else:
@ -202,23 +194,22 @@ def submit():
full_path = os.path.join(UPLOAD_FOLDER, name) full_path = os.path.join(UPLOAD_FOLDER, name)
logger.debug(f'full path {full_path}') logger.debug(f'full path {full_path}')
#Flask verify the file size # Flask verify the file size
file_import.save(full_path) file_import.save(full_path)
logger.debug('file saved') logger.debug('file saved')
Import_helper.create_import_queue(ltags, ltagsgalaxies, full_path, UUID, password, True) Import_helper.create_import_queue(ltags, ltagsgalaxies, full_path, UUID, password, True)
return render_template("submit_items.html", return render_template("submit_items.html",
active_taxonomies = active_taxonomies, active_taxonomies=active_taxonomies,
active_galaxies = active_galaxies, active_galaxies=active_galaxies,
UUID = UUID) UUID=UUID)
else: else:
content = f'wrong file type, allowed_extensions: {allowed_extensions} or remove the extension' content = f'wrong file type, allowed_extensions: {allowed_extensions} or remove the extension'
logger.info(content) logger.info(content)
return content, 400 return content, 400
elif paste_content != '': elif paste_content != '':
logger.debug(f'entering text paste management') logger.debug(f'entering text paste management')
if sys.getsizeof(paste_content) < Flask_config.SUBMIT_PASTE_TEXT_MAX_SIZE: if sys.getsizeof(paste_content) < Flask_config.SUBMIT_PASTE_TEXT_MAX_SIZE:
@ -242,7 +233,6 @@ def submit():
logger.error(content) logger.error(content)
return content, 400 return content, 400
return PasteSubmit_page() return PasteSubmit_page()
@PasteSubmit.route("/PasteSubmit/submit_status", methods=['GET']) @PasteSubmit.route("/PasteSubmit/submit_status", methods=['GET'])
@ -278,10 +268,7 @@ def submit_status():
else: else:
prog = 0 prog = 0
if error: isError = bool(error)
isError = True
else:
isError = False
if end == '0': if end == '0':
end = False end = False
@ -327,6 +314,8 @@ def submit_status():
@login_required @login_required
@login_analyst @login_analyst
def edit_tag_export(): def edit_tag_export():
return abort(404)
misp_auto_events = r_serv_db.get('misp:auto-events') misp_auto_events = r_serv_db.get('misp:auto-events')
hive_auto_alerts = r_serv_db.get('hive:auto-alerts') hive_auto_alerts = r_serv_db.get('hive:auto-alerts')
@ -393,6 +382,9 @@ def edit_tag_export():
@login_required @login_required
@login_analyst @login_analyst
def tag_export_edited(): def tag_export_edited():
return abort(404)
tag_enabled_misp = request.form.getlist('tag_enabled_misp') tag_enabled_misp = request.form.getlist('tag_enabled_misp')
tag_enabled_hive = request.form.getlist('tag_enabled_hive') tag_enabled_hive = request.form.getlist('tag_enabled_hive')
@ -419,6 +411,8 @@ def tag_export_edited():
@login_required @login_required
@login_analyst @login_analyst
def enable_misp_auto_event(): def enable_misp_auto_event():
return abort(404)
r_serv_db.set('misp:auto-events', 1) r_serv_db.set('misp:auto-events', 1)
return edit_tag_export() return edit_tag_export()
@ -426,6 +420,8 @@ def enable_misp_auto_event():
@login_required @login_required
@login_analyst @login_analyst
def disable_misp_auto_event(): def disable_misp_auto_event():
return abort(404)
r_serv_db.set('misp:auto-events', 0) r_serv_db.set('misp:auto-events', 0)
return edit_tag_export() return edit_tag_export()
@ -433,6 +429,8 @@ def disable_misp_auto_event():
@login_required @login_required
@login_analyst @login_analyst
def enable_hive_auto_alert(): def enable_hive_auto_alert():
return abort(404)
r_serv_db.set('hive:auto-alerts', 1) r_serv_db.set('hive:auto-alerts', 1)
return edit_tag_export() return edit_tag_export()
@ -440,6 +438,8 @@ def enable_hive_auto_alert():
@login_required @login_required
@login_analyst @login_analyst
def disable_hive_auto_alert(): def disable_hive_auto_alert():
return abort(404)
r_serv_db.set('hive:auto-alerts', 0) r_serv_db.set('hive:auto-alerts', 0)
return edit_tag_export() return edit_tag_export()
@ -447,6 +447,8 @@ def disable_hive_auto_alert():
@login_required @login_required
@login_analyst @login_analyst
def add_push_tag(): def add_push_tag():
return abort(404)
tag = request.args.get('tag') tag = request.args.get('tag')
if tag is not None: if tag is not None:
@ -466,6 +468,9 @@ def add_push_tag():
@login_required @login_required
@login_analyst @login_analyst
def delete_push_tag(): def delete_push_tag():
return abort(404)
tag = request.args.get('tag') tag = request.args.get('tag')
infoleak_tags = Taxonomies().get('infoleak').machinetags() infoleak_tags = Taxonomies().get('infoleak').machinetags()

View File

@ -69,11 +69,11 @@
</span> </span>
</h5> </h5>
<ul class="nav flex-md-column flex-row navbar-nav justify-content-between w-100"> <ul class="nav flex-md-column flex-row navbar-nav justify-content-between w-100">
<li class="nav-item"> {# <li class="nav-item">#}
<a class="nav-link" href="{{url_for('import_export.import_object')}}" id="nav_misp_import"> {# <a class="nav-link" href="{{url_for('import_export.import_object')}}" id="nav_misp_import">#}
<b>Import</b> {# <b>Import</b>#}
</a> {# </a>#}
</li> {# </li>#}
<li class="nav-item"> <li class="nav-item">
<a class="nav-link" href="{{url_for('import_export.objects_misp_export')}}" id="nav_misp_export"> <a class="nav-link" href="{{url_for('import_export.objects_misp_export')}}" id="nav_misp_export">
<b>Export</b> <b>Export</b>