2018-12-05 16:24:10 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2018-12-13 10:55:27 +01:00
|
|
|
import os
|
2018-12-05 16:24:10 +01:00
|
|
|
import sys
|
2019-01-11 13:54:01 +01:00
|
|
|
import uuid
|
2018-12-12 15:27:00 +01:00
|
|
|
import hmac
|
2018-12-13 10:55:27 +01:00
|
|
|
import stat
|
2018-12-13 16:03:05 +01:00
|
|
|
import redis
|
2018-12-21 15:27:51 +01:00
|
|
|
import struct
|
2019-01-02 17:00:43 +01:00
|
|
|
import time
|
2019-01-07 16:11:04 +01:00
|
|
|
import datetime
|
2019-01-14 11:17:18 +01:00
|
|
|
import argparse
|
|
|
|
import logging
|
|
|
|
import logging.handlers
|
2018-12-05 16:24:10 +01:00
|
|
|
|
|
|
|
from twisted.internet import ssl, task, protocol, endpoints, defer
|
|
|
|
from twisted.python import log
|
|
|
|
from twisted.python.modules import getModule
|
|
|
|
|
|
|
|
from twisted.internet.protocol import Protocol
|
2019-01-03 15:23:06 +01:00
|
|
|
from twisted.protocols.policies import TimeoutMixin
|
2018-12-05 16:24:10 +01:00
|
|
|
|
2018-12-21 15:27:51 +01:00
|
|
|
hmac_reset = bytearray(32)
|
2019-01-14 11:17:18 +01:00
|
|
|
hmac_key = b'private key to change\n'
|
2018-12-13 10:55:27 +01:00
|
|
|
|
2019-01-03 15:46:42 +01:00
|
|
|
timeout_time = 30
|
|
|
|
|
|
|
|
header_size = 62
|
|
|
|
|
2019-01-11 13:54:01 +01:00
|
|
|
data_default_size_limit = 100000
|
|
|
|
|
|
|
|
host_redis="localhost"
|
|
|
|
port_redis=6379
|
2018-12-13 16:03:05 +01:00
|
|
|
redis_server = redis.StrictRedis(
|
2019-01-11 13:54:01 +01:00
|
|
|
host=host_redis,
|
|
|
|
port=port_redis,
|
2019-01-08 10:09:57 +01:00
|
|
|
db=0)
|
2018-12-13 16:03:05 +01:00
|
|
|
|
2019-01-11 13:54:01 +01:00
|
|
|
try:
|
|
|
|
redis_server.ping()
|
|
|
|
except redis.exceptions.ConnectionError:
|
|
|
|
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
|
|
|
|
sys.exit(1)
|
|
|
|
|
2019-01-03 15:23:06 +01:00
|
|
|
class Echo(Protocol, TimeoutMixin):
|
2018-12-05 16:24:10 +01:00
|
|
|
|
2019-01-02 17:00:43 +01:00
|
|
|
def __init__(self):
|
2019-01-03 09:41:15 +01:00
|
|
|
self.buffer = b''
|
2019-01-03 15:23:06 +01:00
|
|
|
self.setTimeout(timeout_time)
|
2019-01-11 13:54:01 +01:00
|
|
|
self.session_uuid = str(uuid.uuid4())
|
2019-01-14 11:17:18 +01:00
|
|
|
self.data_saved = False
|
2018-12-05 16:24:10 +01:00
|
|
|
|
|
|
|
def dataReceived(self, data):
|
2019-01-03 15:23:06 +01:00
|
|
|
self.resetTimeout()
|
2019-01-07 16:11:04 +01:00
|
|
|
ip, source_port = self.transport.client
|
2019-01-08 16:29:44 +01:00
|
|
|
# check blacklisted_ip
|
2019-01-11 13:54:01 +01:00
|
|
|
if redis_server.sismember('blacklist_ip', ip):
|
2019-01-08 16:29:44 +01:00
|
|
|
self.transport.abortConnection()
|
2019-01-14 11:17:18 +01:00
|
|
|
|
2019-01-07 16:11:04 +01:00
|
|
|
self.process_header(data, ip, source_port)
|
2018-12-05 17:05:46 +01:00
|
|
|
|
2019-01-03 15:23:06 +01:00
|
|
|
def timeoutConnection(self):
|
|
|
|
self.resetTimeout()
|
|
|
|
self.buffer = b''
|
|
|
|
#self.transport.abortConnection()
|
2019-01-02 17:00:43 +01:00
|
|
|
|
2019-01-11 13:54:01 +01:00
|
|
|
def connectionLost(self, reason):
|
|
|
|
redis_server.sadd('ended_session', self.session_uuid)
|
|
|
|
|
2019-01-02 17:00:43 +01:00
|
|
|
def unpack_header(self, data):
|
|
|
|
data_header = {}
|
2019-01-03 15:46:42 +01:00
|
|
|
if len(data) >= header_size:
|
2019-01-02 17:00:43 +01:00
|
|
|
data_header['version'] = struct.unpack('B', data[0:1])[0]
|
|
|
|
data_header['type'] = struct.unpack('B', data[1:2])[0]
|
|
|
|
data_header['uuid_header'] = data[2:18].hex()
|
|
|
|
data_header['timestamp'] = struct.unpack('Q', data[18:26])[0]
|
|
|
|
data_header['hmac_header'] = data[26:58]
|
|
|
|
data_header['size'] = struct.unpack('I', data[58:62])[0]
|
|
|
|
|
2019-01-11 13:54:01 +01:00
|
|
|
# uuid blacklist
|
|
|
|
if redis_server.sismember('blacklist_uuid', data_header['uuid_header']):
|
|
|
|
self.transport.abortConnection()
|
|
|
|
|
|
|
|
# check default size limit
|
|
|
|
if data_header['size'] > data_default_size_limit:
|
|
|
|
self.transport.abortConnection()
|
|
|
|
|
2019-01-02 17:00:43 +01:00
|
|
|
return data_header
|
|
|
|
|
2019-01-03 14:53:53 +01:00
|
|
|
def is_valid_uuid_v4(self, header_uuid):
|
|
|
|
try:
|
2019-01-11 13:54:01 +01:00
|
|
|
uuid_test = uuid.UUID(hex=header_uuid, version=4)
|
2019-01-03 14:53:53 +01:00
|
|
|
return uuid_test.hex == header_uuid
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
2019-01-02 17:00:43 +01:00
|
|
|
# # TODO: check timestamp
|
2019-01-11 13:54:01 +01:00
|
|
|
def is_valid_header(self, uuid_to_check):
|
|
|
|
if self.is_valid_uuid_v4(uuid_to_check):
|
2019-01-02 17:00:43 +01:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2019-01-07 16:11:04 +01:00
|
|
|
def process_header(self, data, ip, source_port):
|
2019-01-02 17:00:43 +01:00
|
|
|
if not self.buffer:
|
|
|
|
data_header = self.unpack_header(data)
|
|
|
|
if data_header:
|
|
|
|
if self.is_valid_header(data_header['uuid_header']):
|
|
|
|
# check data size
|
2019-01-03 15:46:42 +01:00
|
|
|
if data_header['size'] == (len(data) - header_size):
|
2019-01-07 16:11:04 +01:00
|
|
|
self.process_d4_data(data, data_header, ip)
|
2019-01-02 17:00:43 +01:00
|
|
|
# multiple d4 headers
|
2019-01-03 15:46:42 +01:00
|
|
|
elif data_header['size'] < (len(data) - header_size):
|
|
|
|
next_data = data[data_header['size'] + header_size:]
|
|
|
|
data = data[:data_header['size'] + header_size]
|
2019-01-03 09:41:15 +01:00
|
|
|
#print('------------------------------------------------')
|
|
|
|
#print(data)
|
|
|
|
#print()
|
|
|
|
#print(next_data)
|
2019-01-07 16:11:04 +01:00
|
|
|
self.process_d4_data(data, data_header, ip)
|
2019-01-02 17:00:43 +01:00
|
|
|
# process next d4 header
|
2019-01-08 10:09:57 +01:00
|
|
|
self.process_header(next_data, ip, source_port)
|
2019-01-03 15:46:42 +01:00
|
|
|
# data_header['size'] > (len(data) - header_size)
|
2019-01-02 17:00:43 +01:00
|
|
|
# buffer the data
|
|
|
|
else:
|
2019-01-03 09:41:15 +01:00
|
|
|
#print('**********************************************************')
|
|
|
|
#print(data)
|
|
|
|
#print(data_header['size'])
|
2019-01-03 15:46:42 +01:00
|
|
|
#print((len(data) - header_size))
|
2019-01-02 17:00:43 +01:00
|
|
|
self.buffer += data
|
|
|
|
else:
|
2019-01-03 15:46:42 +01:00
|
|
|
if len(data) < header_size:
|
2019-01-02 17:00:43 +01:00
|
|
|
self.buffer += data
|
|
|
|
else:
|
|
|
|
print('discard data')
|
|
|
|
print(data_header)
|
|
|
|
print(data)
|
2019-01-14 11:17:18 +01:00
|
|
|
#time.sleep(5)
|
2019-01-02 17:00:43 +01:00
|
|
|
#sys.exit(1)
|
|
|
|
else:
|
2019-01-03 15:46:42 +01:00
|
|
|
if len(data) < header_size:
|
2019-01-02 17:00:43 +01:00
|
|
|
self.buffer += data
|
|
|
|
else:
|
|
|
|
print('error discard data')
|
|
|
|
print(data_header)
|
|
|
|
print(data)
|
2019-01-14 11:17:18 +01:00
|
|
|
#time.sleep(5)
|
2019-01-02 17:00:43 +01:00
|
|
|
#sys.exit(1)
|
|
|
|
|
|
|
|
# not a header
|
|
|
|
else:
|
|
|
|
# add previous data
|
2019-01-03 15:46:42 +01:00
|
|
|
if len(data) < header_size:
|
2019-01-03 12:01:06 +01:00
|
|
|
self.buffer += data
|
2019-01-14 11:17:18 +01:00
|
|
|
#print(self.buffer)
|
|
|
|
#print(len(self.buffer))
|
2019-01-02 17:00:43 +01:00
|
|
|
#todo check if valid header before adding ?
|
|
|
|
else:
|
|
|
|
data = self.buffer + data
|
2019-01-03 09:41:15 +01:00
|
|
|
#print('()()()()()()()()()')
|
|
|
|
#print(data)
|
|
|
|
#print()
|
|
|
|
self.buffer = b''
|
2019-01-08 10:09:57 +01:00
|
|
|
self.process_header(data, ip, source_port)
|
2019-01-02 17:00:43 +01:00
|
|
|
|
2019-01-07 16:11:04 +01:00
|
|
|
def process_d4_data(self, data, data_header, ip):
|
2019-01-02 17:00:43 +01:00
|
|
|
# empty buffer
|
|
|
|
self.buffer = b''
|
|
|
|
# set hmac_header to 0
|
|
|
|
data = data.replace(data_header['hmac_header'], hmac_reset, 1)
|
|
|
|
HMAC = hmac.new(hmac_key, msg=data, digestmod='sha256')
|
|
|
|
data_header['hmac_header'] = data_header['hmac_header'].hex()
|
|
|
|
|
|
|
|
### Debug ###
|
2019-01-03 14:53:53 +01:00
|
|
|
#print('hexdigest: {}'.format( HMAC.hexdigest() ))
|
|
|
|
#print('version: {}'.format( data_header['version'] ))
|
|
|
|
#print('type: {}'.format( data_header['type'] ))
|
|
|
|
#print('uuid: {}'.format(data_header['uuid_header']))
|
|
|
|
#print('timestamp: {}'.format( data_header['timestamp'] ))
|
|
|
|
#print('hmac: {}'.format( data_header['hmac_header'] ))
|
|
|
|
#print('size: {}'.format( data_header['size'] ))
|
2019-01-02 17:00:43 +01:00
|
|
|
### ###
|
|
|
|
|
2019-01-14 11:17:18 +01:00
|
|
|
# hmac match
|
2019-01-02 17:00:43 +01:00
|
|
|
if data_header['hmac_header'] == HMAC.hexdigest():
|
2019-01-07 16:11:04 +01:00
|
|
|
date = datetime.datetime.now().strftime("%Y%m%d")
|
2019-01-11 13:54:01 +01:00
|
|
|
redis_server.xadd('stream:{}:{}'.format(data_header['type'], self.session_uuid), {'message': data[header_size:], 'uuid': data_header['uuid_header'], 'timestamp': data_header['timestamp'], 'version': data_header['version']})
|
2019-01-07 16:11:04 +01:00
|
|
|
redis_server.zincrby('stat_uuid_ip:{}:{}'.format(date, data_header['uuid_header']), 1, ip)
|
2019-01-08 16:29:44 +01:00
|
|
|
redis_server.zincrby('stat_ip_uuid:{}:{}'.format(date, ip), 1, data_header['uuid_header'])
|
2019-01-14 11:17:18 +01:00
|
|
|
|
|
|
|
redis_server.sadd('daily_uuid:{}'.format(date), data_header['uuid_header'])
|
|
|
|
redis_server.sadd('daily_ip:{}'.format(date), ip)
|
|
|
|
|
|
|
|
if not self.data_saved:
|
|
|
|
redis_server.sadd('session_uuid:{}'.format(data_header['type']), self.session_uuid.encode())
|
|
|
|
redis_server.hset('map-type:session_uuid-uuid:{}'.format(data_header['type']), self.session_uuid, data_header['uuid_header'])
|
|
|
|
self.data_saved = True
|
2019-01-02 17:00:43 +01:00
|
|
|
else:
|
|
|
|
print('hmac do not match')
|
2019-01-03 12:01:06 +01:00
|
|
|
print(data)
|
2018-12-12 15:27:00 +01:00
|
|
|
|
2018-12-05 16:24:10 +01:00
|
|
|
|
2018-12-12 15:27:00 +01:00
|
|
|
|
2018-12-05 16:24:10 +01:00
|
|
|
def main(reactor):
|
|
|
|
log.startLogging(sys.stdout)
|
2019-01-03 16:31:54 +01:00
|
|
|
try:
|
|
|
|
certData = getModule(__name__).filePath.sibling('server.pem').getContent()
|
|
|
|
except FileNotFoundError as e:
|
|
|
|
print('Error, pem file not found')
|
|
|
|
print(e)
|
|
|
|
sys.exit(1)
|
2018-12-05 16:24:10 +01:00
|
|
|
certificate = ssl.PrivateCertificate.loadPEM(certData)
|
|
|
|
factory = protocol.Factory.forProtocol(Echo)
|
|
|
|
reactor.listenSSL(4443, factory, certificate.options())
|
|
|
|
return defer.Deferred()
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2019-01-14 11:17:18 +01:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('-v', '--verbose',help='dddd' , type=int, default=30)
|
|
|
|
args = parser.parse_args()
|
|
|
|
print(args.verbose)
|
|
|
|
|
|
|
|
log_filename = 'd4-server-logs.log'
|
|
|
|
logger = logging.getLogger()
|
|
|
|
#formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
|
|
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
|
|
|
handler_log = logging.handlers.TimedRotatingFileHandler(log_filename, when="midnight", interval=1)
|
|
|
|
handler_log.suffix = '%Y-%m-%d-{}'.format(log_filename)
|
|
|
|
handler_log.setFormatter(formatter)
|
|
|
|
logger.addHandler(handler_log)
|
|
|
|
logger.setLevel(args.verbose)
|
|
|
|
|
|
|
|
logger.error('test')
|
|
|
|
|
2018-12-05 16:24:10 +01:00
|
|
|
task.react(main)
|