2017-08-24 16:02:28 +02:00
|
|
|
#!/usr/bin/env python3.5
|
|
|
|
|
2017-10-20 16:55:07 +02:00
|
|
|
import time, datetime
|
2017-10-27 15:33:09 +02:00
|
|
|
import copy
|
|
|
|
from collections import OrderedDict
|
|
|
|
from pprint import pprint
|
2017-08-24 16:02:28 +02:00
|
|
|
import zmq
|
|
|
|
import redis
|
|
|
|
import random
|
2017-09-11 14:53:06 +02:00
|
|
|
import configparser
|
2017-10-25 10:41:46 +02:00
|
|
|
import argparse
|
2017-09-11 14:53:06 +02:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import json
|
2017-10-13 15:03:09 +02:00
|
|
|
import geoip2.database
|
2017-08-24 16:02:28 +02:00
|
|
|
|
2017-10-30 09:17:57 +01:00
|
|
|
configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg')
|
2017-09-11 14:53:06 +02:00
|
|
|
cfg = configparser.ConfigParser()
|
|
|
|
cfg.read(configfile)
|
|
|
|
|
2017-10-25 12:12:14 +02:00
|
|
|
ZMQ_URL = cfg.get('RedisLog', 'zmq_url')
|
|
|
|
CHANNEL = cfg.get('RedisLog', 'channel')
|
|
|
|
CHANNELDISP = cfg.get('RedisMap', 'channelDisp')
|
|
|
|
CHANNEL_PROC = cfg.get('RedisMap', 'channelProc')
|
|
|
|
PATH_TO_DB = cfg.get('RedisMap', 'pathMaxMindDB')
|
2017-08-24 16:02:28 +02:00
|
|
|
|
2017-10-25 16:21:35 +02:00
|
|
|
serv_log = redis.StrictRedis(
|
2017-10-27 16:36:27 +02:00
|
|
|
host=cfg.get('RedisGlobal', 'host'),
|
|
|
|
port=cfg.getint('RedisGlobal', 'port'),
|
2017-10-11 10:47:11 +02:00
|
|
|
db=cfg.getint('RedisLog', 'db'))
|
|
|
|
serv_coord = redis.StrictRedis(
|
2017-10-27 16:36:27 +02:00
|
|
|
host=cfg.get('RedisGlobal', 'host'),
|
|
|
|
port=cfg.getint('RedisGlobal', 'port'),
|
2017-10-24 09:32:23 +02:00
|
|
|
db=cfg.getint('RedisMap', 'db'))
|
2017-10-25 16:21:35 +02:00
|
|
|
serv_redis_db = redis.StrictRedis(
|
2017-10-27 16:36:27 +02:00
|
|
|
host=cfg.get('RedisGlobal', 'host'),
|
|
|
|
port=cfg.getint('RedisGlobal', 'port'),
|
2017-10-25 16:21:35 +02:00
|
|
|
db=cfg.getint('RedisDB', 'db'))
|
2017-10-11 10:47:11 +02:00
|
|
|
|
2017-10-25 12:12:14 +02:00
|
|
|
reader = geoip2.database.Reader(PATH_TO_DB)
|
|
|
|
|
2017-08-24 16:02:28 +02:00
|
|
|
|
2017-10-25 10:41:46 +02:00
|
|
|
def publish_log(zmq_name, name, content):
|
|
|
|
to_send = { 'name': name, 'log': json.dumps(content), 'zmqName': zmq_name }
|
2017-10-25 16:21:35 +02:00
|
|
|
serv_log.publish(CHANNEL, json.dumps(to_send))
|
|
|
|
|
|
|
|
def push_to_redis_zset(keyCateg, toAdd):
|
|
|
|
now = datetime.datetime.now()
|
|
|
|
today_str = str(now.year)+str(now.month)+str(now.day)
|
|
|
|
keyname = "{}:{}".format(keyCateg, today_str)
|
|
|
|
serv_redis_db.zincrby(keyname, toAdd)
|
2017-10-24 11:29:32 +02:00
|
|
|
|
2017-10-26 16:59:02 +02:00
|
|
|
def push_to_redis_geo(keyCateg, lon, lat, content):
|
|
|
|
now = datetime.datetime.now()
|
|
|
|
today_str = str(now.year)+str(now.month)+str(now.day)
|
|
|
|
keyname = "{}:{}".format(keyCateg, today_str)
|
|
|
|
serv_redis_db.geoadd(keyname, lon, lat, content)
|
|
|
|
|
2017-08-24 16:02:28 +02:00
|
|
|
|
2017-10-13 15:03:09 +02:00
|
|
|
def ip_to_coord(ip):
|
|
|
|
resp = reader.city(ip)
|
2017-10-20 16:55:07 +02:00
|
|
|
lat = float(resp.location.latitude)
|
|
|
|
lon = float(resp.location.longitude)
|
|
|
|
# 0.0001 correspond to ~10m
|
|
|
|
# Cast the float so that it has the correct float format
|
|
|
|
lat_corrected = float("{:.4f}".format(lat))
|
|
|
|
lon_corrected = float("{:.4f}".format(lon))
|
2017-10-23 14:38:47 +02:00
|
|
|
return { 'coord': {'lat': lat_corrected, 'lon': lon_corrected}, 'full_rep': resp }
|
2017-10-13 15:03:09 +02:00
|
|
|
|
2017-10-25 10:41:46 +02:00
|
|
|
def getCoordAndPublish(zmq_name, supposed_ip, categ):
|
2017-10-24 11:29:32 +02:00
|
|
|
try:
|
|
|
|
rep = ip_to_coord(supposed_ip)
|
|
|
|
coord = rep['coord']
|
|
|
|
coord_dic = {'lat': coord['lat'], 'lon': coord['lon']}
|
2017-10-27 15:33:09 +02:00
|
|
|
ordDic = OrderedDict()
|
|
|
|
ordDic['lat'] = coord_dic['lat']
|
|
|
|
ordDic['lon'] = coord_dic['lon']
|
2017-10-24 11:29:32 +02:00
|
|
|
coord_list = [coord['lat'], coord['lon']]
|
2017-10-27 15:33:09 +02:00
|
|
|
push_to_redis_zset('GEO_COORD', json.dumps(ordDic))
|
2017-10-25 16:21:35 +02:00
|
|
|
push_to_redis_zset('GEO_COUNTRY', rep['full_rep'].country.iso_code)
|
2017-10-26 16:59:02 +02:00
|
|
|
push_to_redis_geo('GEO_RAD', coord['lon'], coord['lat'], json.dumps({ 'categ': categ, 'value': supposed_ip }))
|
2017-10-24 11:29:32 +02:00
|
|
|
to_send = {
|
|
|
|
"coord": coord,
|
|
|
|
"categ": categ,
|
|
|
|
"value": supposed_ip,
|
|
|
|
"country": rep['full_rep'].country.name,
|
|
|
|
"specifName": rep['full_rep'].subdivisions.most_specific.name,
|
|
|
|
"cityName": rep['full_rep'].city.name,
|
2017-10-24 17:07:47 +02:00
|
|
|
"regionCode": rep['full_rep'].country.iso_code,
|
2017-10-24 11:29:32 +02:00
|
|
|
}
|
2017-10-25 12:12:14 +02:00
|
|
|
serv_coord.publish(CHANNELDISP, json.dumps(to_send))
|
2017-10-24 11:29:32 +02:00
|
|
|
except ValueError:
|
|
|
|
print("can't resolve ip")
|
2017-10-27 16:16:40 +02:00
|
|
|
except geoip2.errors.AddressNotFoundError:
|
2017-10-27 15:33:09 +02:00
|
|
|
print("Address not in Database")
|
2017-10-24 09:32:23 +02:00
|
|
|
|
2017-10-27 13:19:22 +02:00
|
|
|
def getFields(obj, fields):
|
|
|
|
jsonWalker = fields.split('.')
|
|
|
|
itemToExplore = obj
|
|
|
|
lastName = ""
|
|
|
|
try:
|
|
|
|
for i in jsonWalker:
|
|
|
|
itemToExplore = itemToExplore[i]
|
|
|
|
lastName = i
|
|
|
|
if type(itemToExplore) is list:
|
|
|
|
return { 'name': lastName , 'data': itemToExplore }
|
|
|
|
else:
|
|
|
|
return itemToExplore
|
|
|
|
except KeyError as e:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
2017-10-24 09:32:23 +02:00
|
|
|
##############
|
|
|
|
## HANDLERS ##
|
|
|
|
##############
|
|
|
|
|
2017-10-25 10:41:46 +02:00
|
|
|
def handler_log(zmq_name, jsonevent):
|
2017-10-13 15:03:09 +02:00
|
|
|
print('sending', 'log')
|
|
|
|
return
|
|
|
|
|
2017-10-27 15:33:09 +02:00
|
|
|
def handler_dispatcher(zmq_name, jsonObj):
|
|
|
|
if "Event" in jsonObj:
|
|
|
|
handler_event(zmq_name, jsonObj)
|
|
|
|
|
2017-10-25 10:41:46 +02:00
|
|
|
def handler_keepalive(zmq_name, jsonevent):
|
2017-10-13 15:03:09 +02:00
|
|
|
print('sending', 'keepalive')
|
|
|
|
to_push = [ jsonevent['uptime'] ]
|
2017-10-25 10:41:46 +02:00
|
|
|
publish_log(zmq_name, 'Keepalive', to_push)
|
2017-10-13 15:03:09 +02:00
|
|
|
|
2017-10-27 12:05:14 +02:00
|
|
|
def handler_sighting(zmq_name, jsonsight):
|
|
|
|
print('sending' ,'sighting')
|
|
|
|
return
|
|
|
|
|
2017-10-27 15:33:09 +02:00
|
|
|
def handler_event(zmq_name, jsonobj):
|
2017-10-23 15:44:16 +02:00
|
|
|
#fields: threat_level_id, id, info
|
2017-10-27 15:33:09 +02:00
|
|
|
jsonevent = jsonobj['Event']
|
2017-10-24 11:29:32 +02:00
|
|
|
#redirect to handler_attribute
|
|
|
|
if 'Attribute' in jsonevent:
|
2017-10-24 12:17:10 +02:00
|
|
|
attributes = jsonevent['Attribute']
|
2017-10-25 16:21:35 +02:00
|
|
|
if type(attributes) is list:
|
2017-10-24 12:17:10 +02:00
|
|
|
for attr in attributes:
|
2017-10-27 15:33:09 +02:00
|
|
|
jsoncopy = copy.deepcopy(jsonobj)
|
|
|
|
jsoncopy['Attribute'] = attr
|
|
|
|
handler_attribute(zmq_name, jsoncopy)
|
2017-10-24 12:17:10 +02:00
|
|
|
else:
|
2017-10-25 16:21:35 +02:00
|
|
|
handler_attribute(zmq_name, attributes)
|
2017-10-23 15:44:16 +02:00
|
|
|
|
2017-10-27 12:05:14 +02:00
|
|
|
def handler_attribute(zmq_name, jsonobj):
|
2017-10-25 16:21:35 +02:00
|
|
|
# check if jsonattr is an attribute object
|
2017-10-27 12:05:14 +02:00
|
|
|
if 'Attribute' in jsonobj:
|
|
|
|
jsonattr = jsonobj['Attribute']
|
2017-10-25 16:21:35 +02:00
|
|
|
|
2017-10-23 15:44:16 +02:00
|
|
|
to_push = []
|
2017-10-23 15:47:34 +02:00
|
|
|
for field in json.loads(cfg.get('Log', 'fieldname_order')):
|
2017-10-24 15:17:52 +02:00
|
|
|
if type(field) is list:
|
2017-10-27 12:05:14 +02:00
|
|
|
to_join = []
|
|
|
|
for subField in field:
|
|
|
|
to_join.append(getFields(jsonobj, subField))
|
|
|
|
to_add = cfg.get('Log', 'char_separator').join(to_join)
|
2017-10-24 15:17:52 +02:00
|
|
|
else:
|
2017-10-27 12:05:14 +02:00
|
|
|
to_add = getFields(jsonobj, field)
|
2017-10-24 15:17:52 +02:00
|
|
|
to_push.append(to_add)
|
2017-10-13 15:03:09 +02:00
|
|
|
|
2017-10-25 16:21:35 +02:00
|
|
|
#try to get coord from ip
|
2017-10-20 16:55:07 +02:00
|
|
|
if jsonattr['category'] == "Network activity":
|
2017-10-25 10:41:46 +02:00
|
|
|
getCoordAndPublish(zmq_name, jsonattr['value'], jsonattr['category'])
|
2017-10-13 15:03:09 +02:00
|
|
|
|
2017-10-25 16:21:35 +02:00
|
|
|
# Push to log
|
2017-10-25 10:41:46 +02:00
|
|
|
publish_log(zmq_name, 'Attribute', to_push)
|
2017-09-11 14:53:06 +02:00
|
|
|
|
|
|
|
|
2017-10-25 10:41:46 +02:00
|
|
|
def process_log(zmq_name, event):
|
2017-10-13 15:03:09 +02:00
|
|
|
event = event.decode('utf8')
|
|
|
|
topic, eventdata = event.split(' ', maxsplit=1)
|
|
|
|
jsonevent = json.loads(eventdata)
|
2017-10-25 10:41:46 +02:00
|
|
|
dico_action[topic](zmq_name, jsonevent)
|
2017-10-13 15:03:09 +02:00
|
|
|
|
|
|
|
|
2017-10-25 10:41:46 +02:00
|
|
|
def main(zmqName):
|
2017-10-25 12:12:14 +02:00
|
|
|
context = zmq.Context()
|
|
|
|
socket = context.socket(zmq.SUB)
|
|
|
|
socket.connect(ZMQ_URL)
|
|
|
|
socket.setsockopt_string(zmq.SUBSCRIBE, '')
|
|
|
|
|
2017-10-13 15:03:09 +02:00
|
|
|
while True:
|
|
|
|
content = socket.recv()
|
|
|
|
content.replace(b'\n', b'') # remove \n...
|
2017-10-25 10:41:46 +02:00
|
|
|
zmq_name = zmqName
|
|
|
|
process_log(zmq_name, content)
|
2017-10-13 15:03:09 +02:00
|
|
|
|
|
|
|
|
|
|
|
dico_action = {
|
2017-10-27 15:33:09 +02:00
|
|
|
"misp_json": handler_dispatcher,
|
2017-10-27 12:05:14 +02:00
|
|
|
"misp_json_event": handler_event,
|
2017-10-24 09:32:23 +02:00
|
|
|
"misp_json_self": handler_keepalive,
|
|
|
|
"misp_json_attribute": handler_attribute,
|
2017-10-27 12:05:14 +02:00
|
|
|
"misp_json_sighting": handler_sighting,
|
2017-10-24 09:32:23 +02:00
|
|
|
"misp_json_organisation": handler_log,
|
|
|
|
"misp_json_user": handler_log,
|
|
|
|
"misp_json_conversation": handler_log
|
2017-10-13 15:03:09 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2017-10-25 10:41:46 +02:00
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(description='A zmq subscriber. It subscribe to a ZNQ then redispatch it to the misp-dashboard')
|
2017-10-27 08:49:47 +02:00
|
|
|
parser.add_argument('-n', '--name', required=False, dest='zmqname', help='The ZMQ feed name', default="MISP Standard ZMQ")
|
2017-10-25 12:12:14 +02:00
|
|
|
parser.add_argument('-u', '--url', required=False, dest='zmqurl', help='The URL to connect to', default=ZMQ_URL)
|
2017-10-25 10:41:46 +02:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
main(args.zmqname)
|
2017-10-13 15:03:09 +02:00
|
|
|
reader.close()
|
|
|
|
|