fix: [d4-server] worker launcher: don't add invalid HMAC or empty data stream to workers queue

pull/49/head
Terrtia 2021-04-20 15:43:03 +02:00
parent 39d593364d
commit adf0f6008b
No known key found for this signature in database
GPG Key ID: 1E1B1F50D84613D0
3 changed files with 37 additions and 23 deletions

View File

@ -269,6 +269,20 @@ class D4_Server(Protocol, TimeoutMixin):
data_header['size'] = struct.unpack('I', data[58:62])[0] data_header['size'] = struct.unpack('I', data[58:62])[0]
return data_header return data_header
def check_hmac_key(self, hmac_header, data):
if self.hmac_key is None:
self.hmac_key = redis_server_metadata.hget('metadata_uuid:{}'.format(self.uuid), 'hmac_key')
if self.hmac_key is None:
self.hmac_key = redis_server_metadata.get('server:hmac_default_key')
# set hmac_header to 0
data = data.replace(hmac_header, hmac_reset, 1)
HMAC = hmac.new(self.hmac_key, msg=data, digestmod='sha256')
hmac_header = hmac_header.hex()
# hmac match
return hmac_header == HMAC.hexdigest()
def check_connection_validity(self, data_header): def check_connection_validity(self, data_header):
# blacklist ip by uuid # blacklist ip by uuid
if redis_server_metadata.sismember('blacklist_ip_by_uuid', data_header['uuid_header']): if redis_server_metadata.sismember('blacklist_ip_by_uuid', data_header['uuid_header']):
@ -345,8 +359,14 @@ class D4_Server(Protocol, TimeoutMixin):
self.type = data_header['type'] self.type = data_header['type']
self.uuid = data_header['uuid_header'] self.uuid = data_header['uuid_header']
# worker entry point: map type:session_uuid # check HMAC
redis_server_stream.sadd('session_uuid:{}'.format(data_header['type']), self.session_uuid.encode()) if not self.check_hmac_key(data_header['hmac_header'], data):
print('hmac do not match')
print(data)
logger.debug("HMAC don't match, uuid={}, session_uuid={}".format(self.uuid, self.session_uuid))
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: HMAC don\'t match')
self.transport.abortConnection()
return 1
## save active connection ## ## save active connection ##
#active Connection #active Connection
@ -473,15 +493,6 @@ class D4_Server(Protocol, TimeoutMixin):
def process_d4_data(self, data, data_header, ip): def process_d4_data(self, data, data_header, ip):
# empty buffer # empty buffer
self.buffer = b'' self.buffer = b''
# set hmac_header to 0
data = data.replace(data_header['hmac_header'], hmac_reset, 1)
if self.hmac_key is None:
self.hmac_key = redis_server_metadata.hget('metadata_uuid:{}'.format(data_header['uuid_header']), 'hmac_key')
if self.hmac_key is None:
self.hmac_key = redis_server_metadata.get('server:hmac_default_key')
HMAC = hmac.new(self.hmac_key, msg=data, digestmod='sha256')
data_header['hmac_header'] = data_header['hmac_header'].hex()
### Debug ### ### Debug ###
#print('hexdigest: {}'.format( HMAC.hexdigest() )) #print('hexdigest: {}'.format( HMAC.hexdigest() ))
@ -494,7 +505,7 @@ class D4_Server(Protocol, TimeoutMixin):
### ### ### ###
# hmac match # hmac match
if data_header['hmac_header'] == HMAC.hexdigest(): if self.check_hmac_key(data_header['hmac_header'], data):
if not self.stream_max_size: if not self.stream_max_size:
temp = redis_server_metadata.hget('stream_max_size_by_uuid', data_header['uuid_header']) temp = redis_server_metadata.hget('stream_max_size_by_uuid', data_header['uuid_header'])
if temp is not None: if temp is not None:
@ -526,6 +537,9 @@ class D4_Server(Protocol, TimeoutMixin):
redis_server_metadata.hset('metadata_type_by_uuid:{}:{}'.format(data_header['uuid_header'], data_header['type']), 'last_seen', d4_packet_rcv_time) redis_server_metadata.hset('metadata_type_by_uuid:{}:{}'.format(data_header['uuid_header'], data_header['type']), 'last_seen', d4_packet_rcv_time)
if not self.data_saved: if not self.data_saved:
# worker entry point: map type:session_uuid
redis_server_stream.sadd('session_uuid:{}'.format(data_header['type']), self.session_uuid.encode())
#UUID IP: ## TODO: use d4 timestamp ? #UUID IP: ## TODO: use d4 timestamp ?
redis_server_metadata.lpush('list_uuid_ip:{}'.format(data_header['uuid_header']), '{}-{}'.format(ip, datetime.datetime.now().strftime("%Y%m%d%H%M%S"))) redis_server_metadata.lpush('list_uuid_ip:{}'.format(data_header['uuid_header']), '{}-{}'.format(ip, datetime.datetime.now().strftime("%Y%m%d%H%M%S")))
redis_server_metadata.ltrim('list_uuid_ip:{}'.format(data_header['uuid_header']), 0, 15) redis_server_metadata.ltrim('list_uuid_ip:{}'.format(data_header['uuid_header']), 0, 15)

View File

@ -85,8 +85,8 @@ if __name__ == "__main__":
os.makedirs(rel_path) os.makedirs(rel_path)
print('---- worker launched, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time())) print('---- worker launched, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
else: else:
print('Incorrect Stream, Closing worker: type={} session_uuid={}'.format(type, session_uuid))
sys.exit(1) sys.exit(1)
print('Incorrect message')
redis_server_stream.sadd('working_session_uuid:{}'.format(type), session_uuid) redis_server_stream.sadd('working_session_uuid:{}'.format(type), session_uuid)
#LAUNCH a tcpdump #LAUNCH a tcpdump
@ -149,16 +149,16 @@ if __name__ == "__main__":
except subprocess.TimeoutExpired: except subprocess.TimeoutExpired:
process_compressor.kill() process_compressor.kill()
### compress all files ### ### compress all files ###
date = datetime.datetime.now().strftime("%Y%m%d")
worker_data_directory = os.path.join(full_tcpdump_path, date[0:4], date[4:6], date[6:8]) worker_data_directory = os.path.join(full_tcpdump_path, date[0:4], date[4:6], date[6:8])
all_files = os.listdir(worker_data_directory) if os.path.isdir(worker_data_directory):
all_files.sort() all_files = os.listdir(worker_data_directory)
if all_files: all_files.sort()
for file in all_files: if all_files:
if file.endswith('.cap'): for file in all_files:
full_path = os.path.join(worker_data_directory, file) if file.endswith('.cap'):
if redis_server_stream.get('data_in_process:{}'.format(session_uuid)) != full_path: full_path = os.path.join(worker_data_directory, file)
compress_file(full_path) if redis_server_stream.get('data_in_process:{}'.format(session_uuid)) != full_path:
compress_file(full_path)
### ### ### ###
#print(process.stderr.read()) #print(process.stderr.read())

View File

@ -60,8 +60,8 @@ if __name__ == "__main__":
rel_path = os.path.join(dir_path, filename) rel_path = os.path.join(dir_path, filename)
print('---- worker launched, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time())) print('---- worker launched, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
else: else:
print('Incorrect Stream, Closing worker: type={} session_uuid={}'.format(type, session_uuid))
sys.exit(1) sys.exit(1)
print('Incorrect message')
time_file = time.time() time_file = time.time()
rotate_file = False rotate_file = False