Compare commits

...

23 Commits

Author SHA1 Message Date
Jean-Louis Huynen 4bc6a0b635
chg: [doc] typos 2020-06-26 15:53:15 +02:00
Jean-Louis Huynen 0d21913cd1
chg [doc] initial README material 2020-06-26 15:50:04 +02:00
Jean-Louis Huynen 025bfa89c8
chg [sshd] omitempty json fields 2020-06-26 15:20:13 +02:00
Jean-Louis Huynen 29f2ef718b
chg: [misp] correct timings 2020-06-22 15:45:45 +02:00
Jean-Louis Huynen ab4f9b967c
chg: [misp] MISP export 2020-06-19 11:59:48 +02:00
Jean-Louis Huynen 16d08d375d
chg: [mod] bump d4-golang-util - fix #13 2020-06-19 11:57:51 +02:00
Jean-Louis Huynen 9822f6a529
chg: [sshd] MISP export for top 100 usernames 2020-06-11 15:31:52 +02:00
Jean-Louis Huynen 2f919671fc
add: [sshd] periodic background MISP routine 2020-06-09 12:10:04 +02:00
Jean-Louis Huynen a158868857
add: [sshd] typos 2020-06-08 17:25:31 +02:00
Jean-Louis Huynen 5bb4df40e8 Merge branch 'nifi' of github.com:D4-project/analyzer-d4-log into nifi 2020-06-08 17:21:49 +02:00
Jean-Louis Huynen 34904b3796
add: [sshd] MISP export initial commit - python OK - Go untested 2020-06-08 17:21:22 +02:00
Jean-Louis Huynen 117a4d0b2f chg: [sshd] retry/sleep on lack of incoming data 2020-05-27 18:08:40 +02:00
Jean-Louis Huynen f69c9348da
chg: [sshd] retry on redisreader EOF 2020-05-27 17:16:11 +02:00
Jean-Louis Huynen 389d070f5a
add: [inputreader] push missing inputreader files 2020-04-06 10:08:15 +02:00
Jean-Louis Huynen fb558adab5
add: [sshd] csv export 2020-03-31 10:56:11 +02:00
Jean-Louis Huynen 7441d92eb5
chg: [signaling] merge OS signaling and error handling routines 2020-03-17 11:22:34 +01:00
Jean-Louis Huynen 31b491ba97
chg: [compiler] teardown function + error channel 2020-03-13 14:59:04 +01:00
Jean-Louis Huynen aef4b518c0
add: [grok] ingest from file 2020-03-10 16:31:53 +01:00
Jean-Louis Huynen 9a4d57ee0a
add: [grok] Stream d4 redis pulling 2020-03-10 16:02:24 +01:00
Jean-Louis Huynen 593c6425b5
chg: [grok] sshd groking test 2020-03-09 14:04:54 +01:00
Jean-Louis Huynen df32553050
chg: [grok] graceful compilation shutdown 2020-03-09 11:25:29 +01:00
Jean-Louis Huynen 547fdba5c8
add: [grok] moving to grokking support - logic refacto 2020-03-06 17:02:46 +01:00
Jean-Louis Huynen b3b3649503
chg: [main] no parsers anynore 2020-03-04 15:58:07 +01:00
28 changed files with 1106 additions and 407 deletions

View File

@ -0,0 +1,34 @@
#!/usr/bin/env python3
import time
from pymisp.tools.abstractgenerator import AbstractMISPObjectGenerator
class AuthFailureMISPObject(AbstractMISPObjectGenerator):
def __init__(self, dico_val, **kargs):
self._dico_val = dico_val
# Enforce attribute date with timestamp
super(AuthFailureMISPObject, self).__init__('authentication-failure-report',
default_attributes_parameters={'timestamp': int(time.time())},
**kargs)
self.name = "authentication-failure-report"
self.generate_attributes()
def generate_attributes(self):
valid_object_attributes = self._definition['attributes'].keys()
for object_relation, value in self._dico_val.items():
if object_relation not in valid_object_attributes:
continue
if object_relation == 'timestamp':
# Date already in ISO format, removing trailing Z
value = value.rstrip('Z')
if isinstance(value, dict):
self.add_attribute(object_relation, **value)
else:
# uniformize value, sometimes empty array
if isinstance(value, list) and len(value) == 0:
value = ''
self.add_attribute(object_relation, value=value)

160
MISP_export/fromredis.py Executable file
View File

@ -0,0 +1,160 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import datetime
import json
import sys
import time
import redis
import settings
from generator import FeedGenerator
def beautyful_sleep(sleep, additional):
length = 20
sleeptime = float(sleep) / float(length)
for i in range(length):
temp_string = '|'*i + ' '*(length-i-1)
print('sleeping [{}]\t{}'.format(temp_string, additional), end='\r', sep='')
sys.stdout.flush()
time.sleep(sleeptime)
class RedisToMISPFeed:
SUFFIX_SIGH = '_sighting'
SUFFIX_ATTR = '_attribute'
SUFFIX_OBJ = '_object'
SUFFIX_NO = ''
SUFFIX_LIST = [SUFFIX_SIGH, SUFFIX_ATTR, SUFFIX_OBJ, SUFFIX_NO]
def __init__(self):
self.host = settings.host
self.port = settings.port
self.db = settings.db
self.serv = redis.StrictRedis(self.host, self.port, self.db, decode_responses=True)
self.generator = FeedGenerator()
self.keynames = []
for k in settings.keyname_pop:
for s in self.SUFFIX_LIST:
self.keynames.append(k+s)
self.keynameError = settings.keyname_error
self.update_last_action("Init system")
def consume(self):
self.update_last_action("Started consuming redis")
while True:
for key in self.keynames:
while True:
data = self.pop(key)
if data is None:
break
try:
self.perform_action(key, data)
except Exception as error:
self.save_error_to_redis(error, data)
try:
beautyful_sleep(5, self.format_last_action())
except KeyboardInterrupt:
sys.exit(130)
def pop(self, key):
popped = self.serv.rpop(key)
if popped is None:
return None
try:
popped = json.loads(popped)
except ValueError as error:
self.save_error_to_redis(error, popped)
except ValueError as error:
self.save_error_to_redis(error, popped)
return popped
def perform_action(self, key, data):
# sighting
if key.endswith(self.SUFFIX_SIGH):
if self.generator.add_sighting_on_attribute():
self.update_last_action("Added sighting")
else:
self.update_last_action("Error while adding sighting")
# attribute
elif key.endswith(self.SUFFIX_ATTR):
attr_type = data.pop('type')
attr_value = data.pop('value')
if self.generator.add_attribute_to_event(attr_type, attr_value, **data):
self.update_last_action("Added attribute")
else:
self.update_last_action("Error while adding attribute")
# object
elif key.endswith(self.SUFFIX_OBJ):
# create the MISP object
obj_name = data.pop('name')
if self.generator.add_object_to_event(obj_name, **data):
self.update_last_action("Added object")
else:
self.update_last_action("Error while adding object")
else:
# Suffix not provided, try to add anyway
if settings.fallback_MISP_type == 'attribute':
new_key = key + self.SUFFIX_ATTR
# Add atribute type from the config
if 'type' not in data and settings.fallback_attribute_type:
data['type'] = settings.fallback_attribute_type
else:
new_key = None
elif settings.fallback_MISP_type == 'object':
new_key = key + self.SUFFIX_OBJ
# Add object template name from the config
if 'name' not in data and settings.fallback_object_template_name:
data['name'] = settings.fallback_object_template_name
else:
new_key = None
elif settings.fallback_MISP_type == 'sighting':
new_key = key + self.SUFFIX_SIGH
else:
new_key = None
if new_key is None:
self.update_last_action("Redis key suffix not supported and automatic not configured")
else:
self.perform_action(new_key, data)
# OTHERS
def update_last_action(self, action):
self.last_action = action
self.last_action_time = datetime.datetime.now()
def format_last_action(self):
return "Last action: [{}] @ {}".format(
self.last_action,
self.last_action_time.isoformat().replace('T', ' '),
)
def save_error_to_redis(self, error, item):
to_push = {'error': str(error), 'item': str(item)}
print('Error:', str(error), '\nOn adding:', item)
self.serv.lpush(self.keynameError, to_push)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Pop item fom redis and add "
+ "it to the MISP feed. By default, each action are pushed into a "
+ "daily named event. Configuration taken from the file settings.py.")
args = parser.parse_args()
redisToMISP = RedisToMISPFeed()
redisToMISP.consume()

294
MISP_export/generator.py Executable file
View File

@ -0,0 +1,294 @@
#!/usr/bin/env python3
import datetime
import hashlib
import json
import os
import sys
import time
import uuid
from pymisp import MISPEvent
import settings
def get_system_templates():
"""Fetch all MISP-Object template present on the local system.
Returns:
dict: A dictionary listing all MISP-Object templates
"""
misp_objects_path = os.path.join(
os.path.abspath(os.path.dirname(sys.modules['pymisp'].__file__)),
'data', 'misp-objects', 'objects')
templates = {}
for root, dirs, files in os.walk(misp_objects_path, topdown=False):
for def_file in files:
obj_name = root.split('/')[-1]
template_path = os.path.join(root, def_file)
with open(template_path, 'r') as f:
definition = json.load(f)
templates[obj_name] = definition
return templates
def gen_uuid():
"""Generate a random UUID and returns its string representation"""
return str(uuid.uuid4())
class FeedGenerator:
"""Helper object to create MISP feed.
Configuration taken from the file settings.py"""
def __init__(self):
"""This object can be use to easily create a daily MISP-feed.
It handles the event creation, manifest file and cache file
(hashes.csv).
"""
self.sys_templates = get_system_templates()
self.constructor_dict = settings.constructor_dict
self.flushing_interval = settings.flushing_interval
self.flushing_next = time.time() + self.flushing_interval
self.manifest = {}
self.attributeHashes = []
self.daily_event_name = settings.daily_event_name + ' {}'
event_date_str, self.current_event_uuid, self.event_name = self.get_last_event_from_manifest()
temp = [int(x) for x in event_date_str.split('-')]
self.current_event_date = datetime.date(temp[0], temp[1], temp[2])
self.current_event = self._get_event_from_id(self.current_event_uuid)
def add_sighting_on_attribute(self, sight_type, attr_uuid, **data):
"""Add a sighting on an attribute.
Not supported for the moment."""
self.update_daily_event_id()
self._after_addition()
return False
def add_attribute_to_event(self, attr_type, attr_value, **attr_data):
"""Add an attribute to the daily event"""
self.update_daily_event_id()
self.current_event.add_attribute(attr_type, attr_value, **attr_data)
self._add_hash(attr_type, attr_value)
self._after_addition()
return True
def add_object_to_event(self, obj_name, **data):
"""Add an object to the daily event"""
self.update_daily_event_id()
if obj_name not in self.sys_templates:
print('Unkown object template')
return False
# Get MISP object constructor
obj_constr = self.constructor_dict.get(obj_name, None)
# Constructor not known, using the generic one
if obj_constr is None:
obj_constr = self.constructor_dict.get('generic')
misp_object = obj_constr(obj_name)
# Fill generic object
for k, v in data.items():
# attribute is not in the object template definition
if k not in self.sys_templates[obj_name]['attributes']:
# add it with type text
misp_object.add_attribute(k, **{'value': v, 'type': 'text'})
else:
misp_object.add_attribute(k, **{'value': v})
else:
misp_object = obj_constr(data)
self.current_event.add_object(misp_object)
for attr_type, attr_value in data.items():
self._add_hash(attr_type, attr_value)
self._after_addition()
return True
def _after_addition(self):
"""Write event on disk"""
now = time.time()
if self.flushing_next <= now:
self.flush_event()
self.flushing_next = now + self.flushing_interval
# Cache
def _add_hash(self, attr_type, attr_value):
if ('|' in attr_type or attr_type == 'malware-sample'):
split = attr_value.split('|')
self.attributeHashes.append([
hashlib.md5(str(split[0]).encode("utf-8")).hexdigest(),
self.current_event_uuid
])
self.attributeHashes.append([
hashlib.md5(str(split[1]).encode("utf-8")).hexdigest(),
self.current_event_uuid
])
else:
self.attributeHashes.append([
hashlib.md5(str(attr_value).encode("utf-8")).hexdigest(),
self.current_event_uuid
])
# Manifest
def _init_manifest(self):
# check if outputdir exists and try to create it if not
if not os.path.exists(settings.outputdir):
try:
os.makedirs(settings.outputdir)
except PermissionError as error:
print(error)
print("Please fix the above error and try again.")
sys.exit(126)
# create an empty manifest
try:
with open(os.path.join(settings.outputdir, 'manifest.json'), 'w'):
pass
except PermissionError as error:
print(error)
print("Please fix the above error and try again.")
sys.exit(126)
# create new event and save manifest
self.create_daily_event()
def flush_event(self, new_event=None):
print('Writting event on disk'+' '*50)
if new_event is not None:
event_uuid = new_event['uuid']
event = new_event
else:
event_uuid = self.current_event_uuid
event = self.current_event
eventFile = open(os.path.join(settings.outputdir, event_uuid+'.json'), 'w')
eventSupport = "{{\"Event\": {}}}".format(event.to_json())
eventFile.write(eventSupport)
eventFile.close()
self.save_hashes()
def save_manifest(self):
try:
manifestFile = open(os.path.join(settings.outputdir, 'manifest.json'), 'w')
manifestFile.write(json.dumps(self.manifest))
manifestFile.close()
print('Manifest saved')
except Exception as e:
print(e)
sys.exit('Could not create the manifest file.')
def save_hashes(self):
if len(self.attributeHashes) == 0:
return False
try:
hashFile = open(os.path.join(settings.outputdir, 'hashes.csv'), 'a')
for element in self.attributeHashes:
hashFile.write('{},{}\n'.format(element[0], element[1]))
hashFile.close()
self.attributeHashes = []
print('Hash saved' + ' '*30)
except Exception as e:
print(e)
sys.exit('Could not create the quick hash lookup file.')
def _addEventToManifest(self, event):
event_dict = event.to_dict()
tags = []
for eventTag in event_dict.get('EventTag', []):
tags.append({'name': eventTag['Tag']['name'],
'colour': eventTag['Tag']['colour']})
return {
'Orgc': event_dict.get('Orgc', []),
'Tag': tags,
'info': event_dict['info'],
'date': event_dict['date'],
'analysis': event_dict['analysis'],
'threat_level_id': event_dict['threat_level_id'],
'timestamp': event_dict.get('timestamp', int(time.time()))
}
def get_last_event_from_manifest(self):
"""Retreive last event from the manifest.
If the manifest doesn't exists or if it is empty, initialize it.
"""
try:
manifest_path = os.path.join(settings.outputdir, 'manifest.json')
with open(manifest_path, 'r') as f:
man = json.load(f)
dated_events = []
for event_uuid, event_json in man.items():
# add events to manifest
self.manifest[event_uuid] = event_json
dated_events.append([
event_json['date'],
event_uuid,
event_json['info']
])
# Sort by date then by event name
dated_events.sort(key=lambda k: (k[0], k[2]), reverse=True)
return dated_events[0]
except FileNotFoundError as e:
print('Manifest not found, generating a fresh one')
self._init_manifest()
return self.get_last_event_from_manifest()
# DAILY
def update_daily_event_id(self):
if self.current_event_date != datetime.date.today(): # create new event
# save current event on disk
self.flush_event()
self.current_event = self.create_daily_event()
self.current_event_date = datetime.date.today()
self.current_event_uuid = self.current_event.get('uuid')
self.event_name = self.current_event.info
def _get_event_from_id(self, event_uuid):
with open(os.path.join(settings.outputdir, '%s.json' % event_uuid), 'r') as f:
event_dict = json.load(f)
event = MISPEvent()
event.from_dict(**event_dict)
return event
def create_daily_event(self):
new_uuid = gen_uuid()
today = str(datetime.date.today())
event_dict = {
'uuid': new_uuid,
'id': len(self.manifest)+1,
'Tag': settings.Tag,
'info': self.daily_event_name.format(today),
'analysis': settings.analysis, # [0-2]
'threat_level_id': settings.threat_level_id, # [1-4]
'published': settings.published,
'date': today
}
event = MISPEvent()
event.from_dict(**event_dict)
# reference org
org_dict = {}
org_dict['name'] = settings.org_name
org_dict['uuid'] = settings.org_uuid
event['Orgc'] = org_dict
# save event on disk
self.flush_event(new_event=event)
# add event to manifest
self.manifest[event['uuid']] = self._addEventToManifest(event)
self.save_manifest()
return event

4
MISP_export/install.sh Normal file
View File

@ -0,0 +1,4 @@
#!/bin/bash
virtualenv -p python3 serv-env
. ./serv-env/bin/activate
pip3 install -U flask Flask-AutoIndex redis

12
MISP_export/server.py Executable file
View File

@ -0,0 +1,12 @@
#!/usr/bin/env python3
import os.path
from flask import Flask
from flask_autoindex import AutoIndex
from settings import outputdir
app = Flask(__name__)
AutoIndex(app, browse_root=os.path.join(os.path.curdir, outputdir))
if __name__ == '__main__':
app.run(host='0.0.0.0')

65
MISP_export/settings.default.py Executable file
View File

@ -0,0 +1,65 @@
""" REDIS RELATED """
# Your redis server
host='127.0.0.1'
port=6385
db=3
## The keynames to POP element from
keyname_pop=['authf']
# OTHERS
## If key prefix not provided, data will be added as either object, attribute or sighting
fallback_MISP_type = 'object'
### How to handle the fallback
fallback_object_template_name = 'generic' # MISP-Object only
fallback_attribute_category = 'comment' # MISP-Attribute only
## How frequent the event should be written on disk
#flushing_interval=5*60
flushing_interval=15
## The redis list keyname in which to put items that generated an error
keyname_error='feed-generation-error'
""" FEED GENERATOR CONFIGURATION """
# The output dir for the feed. This will drop a lot of files, so make
# sure that you use a directory dedicated to the feed
outputdir = 'output'
# Event meta data
## Required
### The organisation id that generated this feed
org_name='myOrg'
### Your organisation UUID
org_uuid=''
### The daily event name to be used in MISP.
### (e.g. honeypot_1, will produce each day an event of the form honeypot_1 dd-mm-yyyy)
daily_event_name='PyMISP default event name'
## Optional
analysis=0
threat_level_id=3
published=False
Tag=[
{
"colour": "#ffffff",
"name": "tlp:white"
},
{
"colour": "#ff00ff",
"name": "my:custom:feed"
}
]
# MISP Object constructor
from ObjectConstructor.AuthFailureMISPObject import AuthFailureMISPObject
#from pymisp.pymisp.tools import GenericObjectGenerator
from pymisp.tools import GenericObjectGenerator
constructor_dict = {
'authentication-failure-report': AuthFailureMISPObject,
'generic': GenericObjectGenerator
}
# Others
## Redis pooling time
sleep=60

View File

@ -1,2 +1,30 @@
# analyzer-d4-log
Analyze logs collected though d4 to produce diverse statistics.
This analyzer processes loglines ingested by d4 (as type 3).
# Architecture
analyzer-d4-log relies on redis to consume grokked loglines.
To grok the loglines, analyzer-d4-log relies on an external tool: both logstash https://www.elastic.co/logstash
and nifi https://nifi.apache.org/ have been tested for this purpose (using this nifi template https://github.com/D4-project/d4-nifi-templates).
These tools poll directly d4 server's redis for loglines and push the results into a specific redis queue that the analyzer consumes.
![Grokking D4 loglines in nifi](assets/nifi.png)
analyzer-d4-log polls this queue periodically to produce counts and statistics of the data. At the moment, only sshd logs are supported but more will come in the future.
# SSHD log analysis
## Output generation
Every once in a while, analyzer-d4-log compiles the result into a svg images and csv files. It will also produce a minimalist webpage to navigate the data with a datarangepicker.;
![](assets/analyzer-d4-log.png)
## MISP export
I addition to this graphical view, the repository contains a MISP_export folder that allows for the publication of a MISP feed of daily events. It compiles the TOP 100 usernames and sources seen in ssh login failure by D4 sensors.
![](assets/dailyMISPevent.png)
![](assets/d4_auth_MISPobject.png)
Since MISP 2.4.128, MISP can conveniently display this data through specialized widgets.
![](assets/MISP_widgets.png)

BIN
assets/MISP_widgets.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

BIN
assets/dailyMISPevent.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

BIN
assets/nifi.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

View File

@ -0,0 +1 @@
localhost:6381/16

View File

@ -1 +0,0 @@
localhost:6380/2

1
conf.sample/redis_input Normal file
View File

@ -0,0 +1 @@
localhost:6385/3

View File

@ -1 +0,0 @@
localhost:6500/16

View File

@ -1 +0,0 @@
d42967c1-f7ad-464e-bbc7-4464c653d7a6

2
go.mod
View File

@ -3,7 +3,7 @@ module github.com/D4-project/analyzer-d4-log
go 1.13
require (
github.com/D4-project/d4-golang-utils v0.1.2
github.com/D4-project/d4-golang-utils v0.1.6
github.com/ajstarks/svgo v0.0.0-20200204031535-0cbcf57ea1d8 // indirect
github.com/gomodule/redigo v2.0.0+incompatible
github.com/jung-kurt/gofpdf v1.16.2 // indirect

6
go.sum
View File

@ -1,6 +1,6 @@
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/D4-project/d4-golang-utils v0.1.2 h1:aLdvwIR2CFvIn2FnqPjbHxzLeo3ZL7YEyhCXRL6a9kQ=
github.com/D4-project/d4-golang-utils v0.1.2/go.mod h1:2rq8KBQnNNDocwc/49cnpaqoQA/komoSHKom7ynvqJc=
github.com/D4-project/d4-golang-utils v0.1.6 h1:g8To+VLwa5ucH8hcmRZGx/hmrHmn6AD6A0vaiYKFAQc=
github.com/D4-project/d4-golang-utils v0.1.6/go.mod h1:GGR5KMhvABZtIfmS5jZkwQnBoP+9/V0ZEETSGiWLaM4=
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af h1:wVe6/Ea46ZMeNkQjjBW6xcqyQA/j5e0D6GytH95g0gQ=
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
github.com/ajstarks/svgo v0.0.0-20200204031535-0cbcf57ea1d8 h1:LMjxfr9tcHP10YI+i4+cjHWSjPeUAUy5+sqw5FhFzwE=
@ -56,8 +56,6 @@ gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6d
gonum.org/v1/netlib v0.0.0-20191229114700-bbb4dff026f8 h1:kHY67jAKYewKUCz9YdNDa7iLAJ2WfNmoHzCCX4KnA8w=
gonum.org/v1/netlib v0.0.0-20191229114700-bbb4dff026f8/go.mod h1:2IgXn/sJaRbePPBA1wRj8OE+QLvVaH0q8SK6TSTKlnk=
gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc=
gonum.org/v1/plot v0.0.0-20200212202559-4d97eda4de95 h1:dmUNA1pi2nVfGj2i7kijrI7bupgm2GFbJLYYu6C5bBY=
gonum.org/v1/plot v0.0.0-20200212202559-4d97eda4de95/go.mod h1:+HbaZVpsa73UwN7kXGCECULRHovLRJjH+t5cFPgxErs=
gonum.org/v1/plot v0.7.0 h1:Otpxyvra6Ie07ft50OX5BrCfS/BWEMvhsCUHwPEJmLI=
gonum.org/v1/plot v0.7.0/go.mod h1:2wtU6YrrdQAhAF9+MTd5tOQjrov/zF70b1i99Npjvgo=
modernc.org/cc v1.0.0/go.mod h1:1Sk4//wdnYJiUIxnW8ddKpaOJCF37yAdqYnkxUpaYxw=

View File

@ -0,0 +1,60 @@
package inputreader
import (
"bytes"
"github.com/gomodule/redigo/redis"
"io"
"log"
)
// RedisLPOPReader is a abstraction of LPOP list
// and behaves like a reader
type RedisLPOPReader struct {
// D4 redis connection
r *redis.Conn
// D4 redis database
d int
// D4 Queue storing
q string
// Current buffer
buf []byte
}
// NewLPOPReader creates a new RedisLPOPScanner
func NewLPOPReader(rc *redis.Conn, db int, queue string) *RedisLPOPReader {
rr := *rc
if _, err := rr.Do("SELECT", db); err != nil {
rr.Close()
log.Fatal(err)
}
return &RedisLPOPReader{
r: rc,
d: db,
q: queue,
}
}
// Read LPOP the redis queue and use a bytes reader to copy
// the resulting data in p
func (rl *RedisLPOPReader) Read(p []byte) (n int, err error) {
rr := *rl.r
buf, err := redis.Bytes(rr.Do("LPOP", rl.q))
// If redis return empty: EOF (user should not stop)
if err == redis.ErrNil {
return 0, io.EOF
} else if err != nil {
log.Println(err)
return 0, err
}
rreader := bytes.NewReader(buf)
n, err = rreader.Read(p)
return n, err
}
// Teardown is called on error to close the redis connection
func (rl *RedisLPOPReader) Teardown() {
(*rl.r).Close()
}

85
logcompiler/compiler.go Normal file
View File

@ -0,0 +1,85 @@
package logcompiler
import (
"io"
"sync"
"time"
"github.com/D4-project/analyzer-d4-log/inputreader"
"github.com/gomodule/redigo/redis"
)
type (
// Compiler provides the interface for a Compiler
// It should provide:
// Set to assign a redis connection to it
// Parse to parse a line of log
// Flush recomputes statistics and recompile output
Compiler interface {
Set(*sync.WaitGroup, *redis.Conn, *redis.Conn, io.Reader, int, *sync.WaitGroup, *chan error, time.Duration)
SetReader(io.Reader)
Pull(chan error)
Flush() error
MISPexport() error
}
// CompilerStruct will implements Compiler, and should be embedded in
// each type implementing compiler
CompilerStruct struct {
// Compiler redis Read
r0 *redis.Conn
// Compiler redis Write
r1 *redis.Conn
// Input Reader
reader io.Reader
// Number of line to process before triggering output
compilationTrigger int
// Current line processed
nbLines int
// Global WaitGroup to handle graceful exiting a compilation routines
compilegr *sync.WaitGroup
// Goroutines error channel
pullreturn *chan error
// Comutex embedding
comutex
// retry Period when applicable
retryPeriod time.Duration
}
comutex struct {
mu sync.Mutex
compiling bool
}
)
// Set set the redis connections to this compiler
func (s *CompilerStruct) Set(wg *sync.WaitGroup, rconn0 *redis.Conn, rconn1 *redis.Conn, reader io.Reader, ct int, compilegr *sync.WaitGroup, c *chan error, retry time.Duration) {
s.r0 = rconn0
s.r1 = rconn1
s.reader = reader
s.compilationTrigger = ct
s.compiling = false
s.compilegr = compilegr
s.pullreturn = c
s.retryPeriod = retry
}
// SetReader Changes compiler's input
func (s *CompilerStruct) SetReader(reader io.Reader) {
s.reader = reader
}
// tear down is called on error to close redis connections
// and log errors
func (s *CompilerStruct) teardown(err error) {
*s.pullreturn <- err
(*s.r0).Close()
(*s.r1).Close()
// If the reader is a LPOPReader, we need to teardown the connection
switch s.reader.(type) {
case *inputreader.RedisLPOPReader:
tmp := *s.reader.(*inputreader.RedisLPOPReader)
tmp.Teardown()
}
}

View File

@ -1,15 +1,16 @@
package logparser
package logcompiler
import (
"encoding/json"
"errors"
"fmt"
"html/template"
"io"
"io/ioutil"
"log"
"math"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"time"
@ -21,135 +22,144 @@ import (
"gonum.org/v1/plot/vg"
)
// SshdParser Holds a struct that corresponds to a sshd log line
// and the redis connection
type SshdParser struct {
// Write
r1 *redis.Conn
// Read
r2 *redis.Conn
// SSHDCompiler Holds a struct that corresponds to a sshd groked line
// and the redis connections
type SSHDCompiler struct {
CompilerStruct
}
// Set set the redic connection to this parser
func (s *SshdParser) Set(rconn1 *redis.Conn, rconn2 *redis.Conn) {
s.r1 = rconn1
s.r2 = rconn2
// GrokedSSHD map JSON fields to Go struct
type GrokedSSHD struct {
SSHMessage string `json:"ssh_message"`
SyslogPid string `json:"syslog_pid"`
SyslogHostname string `json:"syslog_hostname"`
SyslogTimestamp string `json:"syslog_timestamp"`
SshdClientIP string `json:"sshd_client_ip"`
SyslogProgram string `json:"syslog_program"`
SshdInvalidUser string `json:"sshd_invalid_user"`
}
type MISP_auth_failure_sshd_username struct {
Name string `json:"name"`
Mtype string `json:"type"`
Username string `json:"username,omitempty"`
Destination string `json:"ip-dst,omitempty"`
Source string `json:"ip-src,omitempty"`
Total string `json:"total"`
}
// Flush recomputes statistics and recompile HTML output
func (s *SshdParser) Flush() error {
// TODO : review after refacto
func (s *SSHDCompiler) Flush() error {
log.Println("Flushing")
r1 := *s.r1
r0 := *s.r2
r0 := *s.r0
// writing in database 1
if _, err := r1.Do("SELECT", 1); err != nil {
r0.Close()
r1.Close()
return err
s.teardown(err)
}
// flush stats DB
if _, err := r1.Do("FLUSHDB"); err != nil {
r0.Close()
r1.Close()
return err
s.teardown(err)
}
log.Println("Statistics Database Flushed")
// reading from database 0
if _, err := r0.Do("SELECT", 0); err != nil {
r0.Close()
r1.Close()
return err
s.teardown(err)
}
// Compile statistics / html output for each line
keys, err := redis.Strings(r0.Do("KEYS", "*"))
if err != nil {
r0.Close()
r1.Close()
return err
s.teardown(err)
}
for _, v := range keys {
dateHost := strings.Split(v, ":")
kkeys, err := redis.StringMap(r0.Do("HGETALL", v))
if err != nil {
r0.Close()
r1.Close()
return err
s.teardown(err)
}
dateInt, err := strconv.ParseInt(dateHost[0], 10, 64)
if err != nil {
r0.Close()
r1.Close()
return err
s.teardown(err)
}
parsedTime := time.Unix(dateInt, 0)
err = compileStats(s, parsedTime, kkeys["src"], kkeys["username"], dateHost[1])
if err != nil {
r0.Close()
r1.Close()
return err
s.teardown(err)
}
}
return nil
}
// Parse parses a line of sshd log
func (s *SshdParser) Parse(logline string) error {
r := *s.r1
re := regexp.MustCompile(`^(?P<date>[[:alpha:]]{3} {1,2}\d{1,2}\s\d{2}:\d{2}:\d{2}) (?P<host>[^ ]+) sshd\[[[:alnum:]]+\]: Invalid user (?P<username>.*) from (?P<src>.*$)`)
n1 := re.SubexpNames()
res := re.FindAllStringSubmatch(logline, -1)
if res == nil {
return errors.New("[sshd]: no match")
// Pull pulls a line of groked sshd logline from redis
func (s *SSHDCompiler) Pull(c chan error) {
r1 := *s.r1
for {
jsoner := json.NewDecoder(s.reader)
DecodeLoop:
for jsoner.More() {
var m GrokedSSHD
err := jsoner.Decode(&m)
if err := jsoner.Decode(&m); err == io.EOF {
// On EOF we break this loop to go to a sleep
break DecodeLoop
} else if err != nil {
s.teardown(err)
}
fmt.Printf("time: %s, hostname: %s, client_ip: %s, user: %s\n", m.SyslogTimestamp, m.SyslogHostname, m.SshdClientIP, m.SshdInvalidUser)
// Assumes the system parses logs recorded during the current year
m.SyslogTimestamp = fmt.Sprintf("%v %v", m.SyslogTimestamp, time.Now().Year())
// TODO Make this automatic or a config parameter
loc, _ := time.LoadLocation("Europe/Luxembourg")
parsedTime, _ := time.ParseInLocation("Jan 2 15:04:05 2006", m.SyslogTimestamp, loc)
m.SyslogTimestamp = string(strconv.FormatInt(parsedTime.Unix(), 10))
// Pushing loglines in database 0
if _, err := r1.Do("SELECT", 0); err != nil {
s.teardown(err)
}
// Writing logs
_, err = redis.Bool(r1.Do("HSET", fmt.Sprintf("%v:%v", m.SyslogTimestamp, m.SyslogHostname), "username", m.SshdInvalidUser, "src", m.SshdClientIP))
if err != nil {
s.teardown(err)
}
err = compileStats(s, parsedTime, m.SshdClientIP, m.SshdInvalidUser, m.SyslogHostname)
if err != nil {
s.teardown(err)
}
// Compiler html / jsons
s.nbLines++
if s.nbLines > s.compilationTrigger {
s.nbLines = 0
//Non-blocking
if !s.compiling {
go s.compile()
}
}
}
// EOF, we wait for the reader to have
// new data available
time.Sleep(s.retryPeriod)
}
r2 := res[0]
// Build the group map for the line
md := map[string]string{}
for i, n := range r2 {
// fmt.Printf("%d. match='%s'\tname='%s'\n", i, n, n1[i])
md[n1[i]] = n
}
// Assumes the system parses logs recorded during the current year
md["date"] = fmt.Sprintf("%v %v", md["date"], time.Now().Year())
// TODO Make this automatic or a config parameter
loc, _ := time.LoadLocation("Europe/Luxembourg")
parsedTime, _ := time.ParseInLocation("Jan 2 15:04:05 2006", md["date"], loc)
md["date"] = string(strconv.FormatInt(parsedTime.Unix(), 10))
// Pushing loglines in database 0
if _, err := r.Do("SELECT", 0); err != nil {
r.Close()
return err
}
// Writing logs
_, err := redis.Bool(r.Do("HSET", fmt.Sprintf("%v:%v", md["date"], md["host"]), "username", md["username"], "src", md["src"]))
if err != nil {
r.Close()
return err
}
err = compileStats(s, parsedTime, md["src"], md["username"], md["host"])
if err != nil {
r.Close()
return err
}
return nil
}
func compileStats(s *SshdParser, parsedTime time.Time, src string, username string, host string) error {
func compileStats(s *SSHDCompiler, parsedTime time.Time, src string, username string, host string) error {
r := *s.r1
// Pushing statistics in database 1
if _, err := r.Do("SELECT", 1); err != nil {
r.Close()
return err
s.teardown(err)
}
// Daily
@ -159,8 +169,7 @@ func compileStats(s *SshdParser, parsedTime time.Time, src string, username stri
if oldest, err := redis.String(r.Do("GET", "oldest")); err == redis.ErrNil {
r.Do("SET", "oldest", dstr)
} else if err != nil {
r.Close()
return err
s.teardown(err)
} else {
// Check if dates are the same
if oldest != dstr {
@ -176,8 +185,7 @@ func compileStats(s *SshdParser, parsedTime time.Time, src string, username stri
if newest, err := redis.String(r.Do("GET", "newest")); err == redis.ErrNil {
r.Do("SET", "newest", dstr)
} else if err != nil {
r.Close()
return err
s.teardown(err)
} else {
// Check if dates are the same
if newest != dstr {
@ -191,80 +199,73 @@ func compileStats(s *SshdParser, parsedTime time.Time, src string, username stri
err := compileStat(s, dstr, "daily", src, username, host)
if err != nil {
r.Close()
return err
s.teardown(err)
}
// Monthly
mstr := fmt.Sprintf("%v%v", parsedTime.Year(), fmt.Sprintf("%02d", int(parsedTime.Month())))
err = compileStat(s, mstr, "daily", src, username, host)
if err != nil {
r.Close()
return err
s.teardown(err)
}
// Yearly
ystr := fmt.Sprintf("%v", parsedTime.Year())
err = compileStat(s, ystr, "daily", src, username, host)
if err != nil {
r.Close()
return err
s.teardown(err)
}
return nil
}
func compileStat(s *SshdParser, datestr string, mode string, src string, username string, host string) error {
func compileStat(s *SSHDCompiler, datestr string, mode string, src string, username string, host string) error {
r := *s.r1
_, err := redis.String(r.Do("ZINCRBY", fmt.Sprintf("%v:%v", datestr, "statssrc"), 1, src))
if err != nil {
r.Close()
return err
}
_, err = redis.String(r.Do("ZINCRBY", fmt.Sprintf("%v:%v", datestr, "statsusername"), 1, username))
if err != nil {
r.Close()
return err
}
_, err = redis.String(r.Do("ZINCRBY", fmt.Sprintf("%v:%v", datestr, "statshost"), 1, host))
if err != nil {
r.Close()
return err
}
_, err = redis.Int(r.Do("SADD", fmt.Sprintf("toupdate:%v", mode), fmt.Sprintf("%v:%v", datestr, "statssrc")))
if err != nil {
r.Close()
return err
}
_, err = redis.Int(r.Do("SADD", fmt.Sprintf("toupdate:%v", mode), fmt.Sprintf("%v:%v", datestr, "statsusername")))
if err != nil {
r.Close()
return err
}
_, err = redis.Int(r.Do("SADD", fmt.Sprintf("toupdate:%v", mode), fmt.Sprintf("%v:%v", datestr, "statshost")))
if err != nil {
r.Close()
return err
}
return nil
}
// Compile create graphs of the results
func (s *SshdParser) Compile() error {
r := *s.r2
// compile create json and graphical representation of the results
func (s *SSHDCompiler) compile() error {
s.mu.Lock()
s.compiling = true
s.compilegr.Add(1)
log.Println("[+] SSHD compiling")
r := *s.r0
// Pulling statistics from database 1
if _, err := r.Do("SELECT", 1); err != nil {
r.Close()
return err
}
// List days for which we need to update statistics
toupdateD, err := redis.Strings(r.Do("SMEMBERS", "toupdate:daily"))
if err != nil {
r.Close()
return err
}
@ -272,7 +273,10 @@ func (s *SshdParser) Compile() error {
for _, v := range toupdateD {
err = plotStats(s, v)
if err != nil {
r.Close()
return err
}
err = csvStats(s, v)
if err != nil {
return err
}
}
@ -280,7 +284,6 @@ func (s *SshdParser) Compile() error {
// List months for which we need to update statistics
toupdateM, err := redis.Strings(r.Do("SMEMBERS", "toupdate:monthly"))
if err != nil {
r.Close()
return err
}
@ -288,7 +291,6 @@ func (s *SshdParser) Compile() error {
for _, v := range toupdateM {
err = plotStats(s, v)
if err != nil {
r.Close()
return err
}
}
@ -296,7 +298,6 @@ func (s *SshdParser) Compile() error {
// List years for which we need to update statistics
toupdateY, err := redis.Strings(r.Do("SMEMBERS", "toupdate:yearly"))
if err != nil {
r.Close()
return err
}
@ -304,7 +305,6 @@ func (s *SshdParser) Compile() error {
for _, v := range toupdateY {
err = plotStats(s, v)
if err != nil {
r.Close()
return err
}
}
@ -313,11 +313,9 @@ func (s *SshdParser) Compile() error {
var newest string
var oldest string
if newest, err = redis.String(r.Do("GET", "newest")); err == redis.ErrNil {
r.Close()
return err
}
if oldest, err = redis.String(r.Do("GET", "oldest")); err == redis.ErrNil {
r.Close()
return err
}
parsedOldest, _ := time.Parse("20060102", oldest)
@ -328,14 +326,12 @@ func (s *SshdParser) Compile() error {
// Gettings list of years for which we have statistics
reply, err := redis.Values(r.Do("SCAN", "0", "MATCH", "????:*", "COUNT", 1000))
if err != nil {
r.Close()
return err
}
var cursor int64
var items []string
_, err = redis.Scan(reply, &cursor, &items)
if err != nil {
r.Close()
return err
}
@ -359,13 +355,11 @@ func (s *SshdParser) Compile() error {
var mraw []string
reply, err = redis.Values(r.Do("SCAN", "0", "MATCH", v+"??:*", "COUNT", 1000))
if err != nil {
r.Close()
return err
}
_, err = redis.Scan(reply, &cursor, &mraw)
if err != nil {
r.Close()
return err
}
for _, m := range mraw {
@ -384,9 +378,8 @@ func (s *SshdParser) Compile() error {
}
// Parse Template
t, err := template.ParseFiles(filepath.Join("logparser", "sshd", "statistics.gohtml"))
t, err := template.ParseFiles(filepath.Join("logcompiler", "sshd", "statistics.gohtml"))
if err != nil {
r.Close()
return err
}
@ -426,7 +419,6 @@ func (s *SshdParser) Compile() error {
if _, err := os.Stat("data"); os.IsNotExist(err) {
err := os.Mkdir("data", 0700)
if err != nil {
r.Close()
return err
}
}
@ -434,7 +426,6 @@ func (s *SshdParser) Compile() error {
if _, err := os.Stat(filepath.Join("data", "sshd")); os.IsNotExist(err) {
err := os.Mkdir(filepath.Join("data", "sshd"), 0700)
if err != nil {
r.Close()
return err
}
}
@ -450,7 +441,6 @@ func (s *SshdParser) Compile() error {
err = t.ExecuteTemplate(f, "dailytpl", daily)
err = t.ExecuteTemplate(f, "footertpl", daily)
if err != nil {
r.Close()
return err
}
@ -461,7 +451,6 @@ func (s *SshdParser) Compile() error {
err = t.ExecuteTemplate(f, "monthlytpl", monthly)
err = t.ExecuteTemplate(f, "footertpl", monthly)
if err != nil {
r.Close()
return err
}
@ -472,29 +461,153 @@ func (s *SshdParser) Compile() error {
err = t.ExecuteTemplate(f, "yearlytpl", yearly)
err = t.ExecuteTemplate(f, "footertpl", yearly)
if err != nil {
r.Close()
return err
}
// Copy js asset file
input, err := ioutil.ReadFile(filepath.Join("logparser", "sshd", "load.js"))
input, err := ioutil.ReadFile(filepath.Join("logcompiler", "sshd", "load.js"))
if err != nil {
log.Println(err)
return err
}
err = ioutil.WriteFile(filepath.Join("data", "sshd", "load.js"), input, 0644)
if err != nil {
log.Println(err)
return err
}
log.Println("[-] SSHD compiling finished.")
s.compiling = false
s.mu.Unlock()
// Tell main program we can exit if needed now
s.compilegr.Done()
return nil
}
func csvStats(s *SSHDCompiler, v string) error {
r := *s.r0
zrank, err := redis.Strings(r.Do("ZRANGEBYSCORE", v, "-inf", "+inf", "WITHSCORES"))
if err != nil {
return err
}
stype := strings.Split(v, ":")
// Create folder to store data
if _, err := os.Stat("data"); os.IsNotExist(err) {
err := os.Mkdir("data", 0700)
if err != nil {
return err
}
}
if _, err := os.Stat(filepath.Join("data", "sshd")); os.IsNotExist(err) {
err := os.Mkdir(filepath.Join("data", "sshd"), 0700)
if err != nil {
return err
}
}
if _, err := os.Stat(filepath.Join("data", "sshd", stype[0])); os.IsNotExist(err) {
err := os.Mkdir(filepath.Join("data", "sshd", stype[0]), 0700)
if err != nil {
return err
}
}
var file *os.File
if file, err = os.Create(filepath.Join("data", "sshd", stype[0], fmt.Sprintf("%v.csv", v))); err != nil {
return err
}
defer file.Close()
for k, v := range zrank {
// pair: keys
if (k % 2) == 0 {
fmt.Fprintf(file, "%s, ", v)
// even: values
} else {
fmt.Fprintln(file, v)
}
}
return nil
}
func plotStats(s *SshdParser, v string) error {
r := *s.r2
func (s *SSHDCompiler) MISPexport() error {
today := time.Now()
dstr := fmt.Sprintf("%v%v%v", today.Year(), fmt.Sprintf("%02d", int(today.Month())), fmt.Sprintf("%02d", int(today.Day())))
r0 := *s.r0
r1 := *s.r1
// reading from database 1
if _, err := r0.Do("SELECT", 1); err != nil {
s.teardown(err)
}
// writing to database 3
if _, err := r1.Do("SELECT", 3); err != nil {
s.teardown(err)
}
zrankUsername, err := redis.Strings(r0.Do("ZREVRANGEBYSCORE", fmt.Sprintf("%v:statsusername", dstr), "+inf", "-inf", "WITHSCORES", "LIMIT", 0, 100))
if err != nil {
}
zrankSource, err := redis.Strings(r0.Do("ZREVRANGEBYSCORE", fmt.Sprintf("%v:statssrc", dstr), "+inf", "-inf", "WITHSCORES", "LIMIT", 0, 100))
if err != nil {
return err
}
mispobject := new(MISP_auth_failure_sshd_username)
mispobject.Name = "authentication-failure-report"
mispobject.Mtype = "sshd"
for k, v := range zrankUsername {
// pair: keys
if (k % 2) == 0 {
mispobject.Username = v
// even: values
} else {
mispobject.Total = v
b, err := json.Marshal(mispobject)
if err != nil {
return err
}
if string(b) != "{}" {
r1.Do("LPUSH", "authf_object", b)
}
}
}
mispobject.Username = ""
for k, v := range zrankSource {
// pair: keys
if (k % 2) == 0 {
mispobject.Source = v
// even: values
} else {
mispobject.Total = v
b, err := json.Marshal(mispobject)
if err != nil {
return err
}
if string(b) != "{}" {
r1.Do("LPUSH", "authf_object", b)
}
}
}
return nil
}
func plotStats(s *SSHDCompiler, v string) error {
r := *s.r0
zrank, err := redis.Strings(r.Do("ZRANGEBYSCORE", v, "-inf", "+inf", "WITHSCORES"))
if err != nil {
r.Close()
return err
}
@ -515,7 +628,6 @@ func plotStats(s *SshdParser, v string) error {
p, err := plot.New()
if err != nil {
r.Close()
return err
}
@ -529,7 +641,7 @@ func plotStats(s *SshdParser, v string) error {
p.Title.Text = "Host"
default:
p.Title.Text = ""
log.Println("We should not reach this point, open an issue.")
return errors.New("we should not reach this point, open an issue")
}
p.Y.Label.Text = "Count"
@ -549,7 +661,6 @@ func plotStats(s *SshdParser, v string) error {
if _, err := os.Stat("data"); os.IsNotExist(err) {
err := os.Mkdir("data", 0700)
if err != nil {
r.Close()
return err
}
}
@ -557,7 +668,6 @@ func plotStats(s *SshdParser, v string) error {
if _, err := os.Stat(filepath.Join("data", "sshd")); os.IsNotExist(err) {
err := os.Mkdir(filepath.Join("data", "sshd"), 0700)
if err != nil {
r.Close()
return err
}
}
@ -565,14 +675,12 @@ func plotStats(s *SshdParser, v string) error {
if _, err := os.Stat(filepath.Join("data", "sshd", stype[0])); os.IsNotExist(err) {
err := os.Mkdir(filepath.Join("data", "sshd", stype[0]), 0700)
if err != nil {
r.Close()
return err
}
}
xsize := 3 + vg.Length(math.Round(float64(len(keys)/2)))
if err := p.Save(15*vg.Centimeter, xsize*vg.Centimeter, filepath.Join("data", "sshd", stype[0], fmt.Sprintf("%v.svg", v))); err != nil {
r.Close()
return err
}

4
logcompiler/sshd.txt Normal file
View File

@ -0,0 +1,4 @@
{"ssh_message":"Invalid user misp-project from 119.42.175.200","syslog_pid":"28367","syslog_hostname":"sigmund","syslog_timestamp":"Feb 27 06:52:08","sshd_client_ip":"119.42.175.200","syslog_program":"sshd","sshd_invalid_user":"misp-project"}
{"ssh_message":"Invalid user oracle from 49.212.211.207","syslog_pid":"28372","syslog_hostname":"sigmund","syslog_timestamp":"Feb 27 06:53:15","sshd_client_ip":"49.212.211.207","syslog_program":"sshd","sshd_invalid_user":"oracle"}
{"ssh_message":"Invalid user 2019 from 112.78.1.247","syslog_pid":"28381","syslog_hostname":"sigmund","syslog_timestamp":"Feb 27 06:53:57","sshd_client_ip":"112.78.1.247","syslog_program":"sshd","sshd_invalid_user":"2019"}
{"ssh_message":"Invalid user postgres from 217.182.194.95","syslog_pid":"28435","syslog_hostname":"sigmund","syslog_timestamp":"Feb 27 06:58:11","sshd_client_ip":"217.182.194.95","syslog_program":"sshd","sshd_invalid_user":"postgres"}

View File

@ -1,17 +0,0 @@
package logparser
import "github.com/gomodule/redigo/redis"
type (
// Parser provides the interface for a Parser
// It should provide:
// Set to assign a redis connection to it
// Parse to parse a line of log
// Flush recomputes statisitcs and recompile output
Parser interface {
Set(*redis.Conn, *redis.Conn)
Parse(string) error
Flush() error
Compile() error
}
)

View File

@ -1,95 +0,0 @@
package logparser
import (
"bufio"
"fmt"
"log"
"os"
"regexp"
"testing"
)
var expected = map[int]map[string]string{
0: map[string]string{
"date": "Jan 22 11:59:37",
"host": "sigmund",
"username": "git",
"src": "106.12.14.144",
},
1: map[string]string{
"date": "Jan 22 11:37:19",
"host": "si.mund",
"username": "gestion",
"src": "159.89.153.54",
},
2: map[string]string{
"date": "Jan 22 11:34:46",
"host": "sigmund",
"username": "atpco",
"src": "177.152.124.21",
},
3: map[string]string{
"date": "Jan 22 11:33:07",
"host": "sigmund",
"username": "ki",
"src": "49.233.183.158",
},
4: map[string]string{
"date": "Jan 22 11:29:16",
"host": "sigmund",
"username": "a.min",
"src": "185.56.8.191",
},
5: map[string]string{
"date": "Jan 22 11:29:16",
"host": "sigmund",
"username": " ",
"src": "185.56.8.191",
},
6: map[string]string{
"date": "Jan 22 11:29:16",
"host": "sigmund",
"username": "",
"src": "185.56.8.191",
},
7: map[string]string{
"date": "Feb 3 06:50:51",
"host": "sigmund",
"username": "apples",
"src": "37.117.180.69",
},
}
func TestSshdParser(t *testing.T) {
// Opening sshd test file
fmt.Println("[+] Testing the sshd log parser")
f, err := os.Open("./test.log")
if err != nil {
log.Fatalf("Error opening test file: %v", err)
}
defer f.Close()
scanner := bufio.NewScanner(f)
c := 0
for scanner.Scan() {
re := regexp.MustCompile(`^(?P<date>[[:alpha:]]{3} {1,2}\d{1,2}\s\d{2}:\d{2}:\d{2}) (?P<host>[^ ]+) sshd\[[[:alnum:]]+\]: Invalid user (?P<username>.*) from (?P<src>.*$)`)
n1 := re.SubexpNames()
r2 := re.FindAllStringSubmatch(scanner.Text(), -1)[0]
// Build the group map for the line
md := map[string]string{}
for i, n := range r2 {
// fmt.Printf("%d. match='%s'\tname='%s'\n", i, n, n1[i])
md[n1[i]] = n
}
// Check against the expected map
for _, n := range n1 {
if n != "" {
if md[n] != expected[c][n] {
t.Errorf("%v = '%v'; want '%v'", n, md[n], expected[c][n])
}
}
}
c++
}
}

View File

@ -1,8 +0,0 @@
Jan 22 11:59:37 sigmund sshd[26514]: Invalid user git from 106.12.14.144
Jan 22 11:37:19 si.mund sshd[26143]: Invalid user gestion from 159.89.153.54
Jan 22 11:34:46 sigmund sshd[26125]: Invalid user atpco from 177.152.124.21
Jan 22 11:33:07 sigmund sshd[26109]: Invalid user ki from 49.233.183.158
Jan 22 11:29:16 sigmund sshd[26091]: Invalid user a.min from 185.56.8.191
Jan 22 11:29:16 sigmund sshd[26091]: Invalid user from 185.56.8.191
Jan 22 11:29:16 sigmund sshd[26091]: Invalid user from 185.56.8.191
Feb 3 06:50:51 sigmund sshd[12611]: Invalid user apples from 37.117.180.69

254
main.go
View File

@ -1,7 +1,6 @@
package main
import (
"bufio"
"flag"
"fmt"
"log"
@ -12,19 +11,20 @@ import (
"sync"
"time"
"github.com/D4-project/analyzer-d4-log/logparser"
"github.com/D4-project/analyzer-d4-log/inputreader"
"github.com/D4-project/analyzer-d4-log/logcompiler"
config "github.com/D4-project/d4-golang-utils/config"
"github.com/gomodule/redigo/redis"
)
type (
redisconfD4 struct {
redisHost string
redisPort string
redisDB int
redisQueue string
// Input is a grok - NIFI or Logstash
redisconfInput struct {
redisHost string
redisPort string
redisDB int
}
redisconfParsers struct {
redisconfCompilers struct {
redisHost string
redisPort string
redisDBCount int
@ -33,39 +33,52 @@ type (
httpHost string
httpPort string
}
comutex struct {
mu sync.Mutex
compiling bool
}
)
// Setting up flags
var (
confdir = flag.String("c", "conf.sample", "configuration directory")
all = flag.Bool("a", true, "run all parsers when set. Set by default")
specific = flag.String("o", "", "run only a specific parser [sshd]")
debug = flag.Bool("d", false, "debug info in logs")
fromfile = flag.String("f", "", "parse from file on disk")
retry = flag.Int("r", 1, "time in minute before retry on empty d4 queue")
flush = flag.Bool("F", false, "Flush HTML output, recompile all statistic from redis logs, then quits")
redisD4 redis.Conn
redisParsers *redis.Pool
parsers = [1]string{"sshd"}
compilationTrigger = 20
wg sync.WaitGroup
compiling comutex
torun = []logparser.Parser{}
tmpretry, _ = time.ParseDuration("30s")
// Flags
confdir = flag.String("c", "conf.sample", "configuration directory")
all = flag.Bool("a", true, "run all compilers when set. Set by default")
specific = flag.String("o", "", "run only a specific parser [sshd]")
debug = flag.Bool("d", false, "debug info in logs")
fromfile = flag.String("f", "", "parse from file on disk")
retry = flag.Duration("r", tmpretry, "Time in human format before retrying to read an empty d4 queue")
flush = flag.Bool("F", false, "Flush HTML output, recompile all statistic from redis logs, then quits")
// Pools of redis connections
redisCompilers *redis.Pool
redisInput *redis.Pool
// Compilers
compilers = [1]string{"sshd"}
compilationTrigger = 2000
torun = []logcompiler.Compiler{}
// Routine handling
pullgr sync.WaitGroup
compilegr sync.WaitGroup
)
func main() {
// Create a chan to get the goroutines errors messages
pullreturn := make(chan error, 1)
// Create a chan to get os Signals
sortie := make(chan os.Signal, 1)
signal.Notify(sortie, os.Interrupt, os.Kill)
// Signal goroutine
// OS signaling and error handling goroutine
go func() {
<-sortie
fmt.Println("Exiting.")
log.Println("Exit")
os.Exit(0)
select {
case <-sortie:
fmt.Println("Exiting.")
compilegr.Wait()
log.Println("Exit")
os.Exit(0)
case err := <-pullreturn:
log.Println(err)
fmt.Println("Exiting.")
compilegr.Wait()
log.Println("Exit.")
os.Exit(1)
}
}()
// Setting up log file
@ -80,24 +93,23 @@ func main() {
// Usage and flags
flag.Usage = func() {
fmt.Printf("analyzer-d4-log:\n\n")
fmt.Printf(" Generate statistics about logs collected through d4 in\n")
fmt.Printf(" HTML format. Optionally serves the results over HTTP.\n")
fmt.Printf(" Generate statistics about logs collected through d4 in HTML format.\n")
fmt.Printf(" Logs should be groked and served as escaped JSON.\n")
fmt.Printf("\n")
flag.PrintDefaults()
fmt.Printf("\n")
fmt.Printf("The configuration directory should hold the following files\n")
fmt.Printf("to specify the settings to use:\n\n")
fmt.Printf(" mandatory: redis_d4 - host:port/db\n")
fmt.Printf(" mandatory: redis_queue - uuid\n")
fmt.Printf(" mandatory: redis_parsers - host:port/maxdb\n")
fmt.Printf(" optional: http_server - host:port\n\n")
fmt.Printf(" mandatory: redis_compilers - host:port/maxdb\n")
// fmt.Printf(" optional: http_server - host:port\n\n")
fmt.Printf("See conf.sample for an example.\n")
}
// Config
// c := conf{}
rd4 := redisconfD4{}
rp := redisconfParsers{}
ri := redisconfInput{}
rp := redisconfCompilers{}
flag.Parse()
if flag.NFlag() == 0 || *confdir == "" {
flag.Usage()
@ -112,37 +124,28 @@ func main() {
log.SetFlags(log.LstdFlags | log.Lshortfile)
}
// Dont't touch D4 server if Flushing
// Dont't touch input server if Flushing
if !*flush {
// Parse Redis D4 Config
tmp := config.ReadConfigFile(*confdir, "redis_d4")
// Parse Input Redis Config
tmp := config.ReadConfigFile(*confdir, "redis_input")
ss := strings.Split(string(tmp), "/")
if len(ss) <= 1 {
log.Fatal("Missing Database in Redis D4 config: should be host:port/database_name")
log.Fatal("Missing Database in Redis input config: should be host:port/database_name")
}
rd4.redisDB, _ = strconv.Atoi(ss[1])
ri.redisDB, _ = strconv.Atoi(ss[1])
var ret bool
ret, ss[0] = config.IsNet(ss[0])
if ret {
sss := strings.Split(string(ss[0]), ":")
rd4.redisHost = sss[0]
rd4.redisPort = sss[1]
ri.redisHost = sss[0]
ri.redisPort = sss[1]
} else {
log.Fatal("Redis config error.")
}
rd4.redisQueue = string(config.ReadConfigFile(*confdir, "redis_queue"))
// Connect to D4 Redis
// TODO use DialOptions to Dial with a timeout
redisD4, err = redis.Dial("tcp", rd4.redisHost+":"+rd4.redisPort, redis.DialDatabase(rd4.redisDB))
if err != nil {
log.Fatal(err)
}
defer redisD4.Close()
}
// Parse Redis Parsers Config
tmp := config.ReadConfigFile(*confdir, "redis_parsers")
// Parse Redis Compilers Config
tmp := config.ReadConfigFile(*confdir, "redis_compilers")
ss := strings.Split(string(tmp), "/")
if len(ss) <= 1 {
log.Fatal("Missing Database Count in Redis config: should be host:port/max number of DB")
@ -158,120 +161,85 @@ func main() {
log.Fatal("Redis config error.")
}
// Create a connection Pool
redisParsers = newPool(rp.redisHost+":"+rp.redisPort, rp.redisDBCount)
// Create a connection Pool for output Redis
redisCompilers = newPool(rp.redisHost+":"+rp.redisPort, rp.redisDBCount)
redisInput = newPool(ri.redisHost+":"+ri.redisPort, 16)
// Line counter to trigger HTML compilation
nblines := 0
// Init parser depending on the parser flags:
// Init compiler depending on the compiler flags:
if *all {
// Init all parsers
for _, v := range parsers {
// Init all compilers
for _, v := range compilers {
switch v {
case "sshd":
sshdrcon1, err := redisParsers.Dial()
sshdrcon0, err := redisCompilers.Dial()
if err != nil {
log.Fatal("Could not connect to Line one Redis")
log.Fatal("Could not connect to input line on Compiler Redis")
}
sshdrcon2, err := redisParsers.Dial()
defer sshdrcon0.Close()
sshdrcon1, err := redisCompilers.Dial()
if err != nil {
log.Fatal("Could not connect to Line two Redis")
log.Fatal("Could not connect to output line on Compiler Redis")
}
sshd := logparser.SshdParser{}
sshd.Set(&sshdrcon1, &sshdrcon2)
defer sshdrcon1.Close()
sshdrcon2, err := redisInput.Dial()
if err != nil {
log.Fatal("Could not connect to output line on Input Redis")
}
defer sshdrcon2.Close()
redisReader := inputreader.NewLPOPReader(&sshdrcon2, ri.redisDB, "sshd")
sshd := logcompiler.SSHDCompiler{}
sshd.Set(&pullgr, &sshdrcon0, &sshdrcon1, redisReader, compilationTrigger, &compilegr, &pullreturn, *retry)
torun = append(torun, &sshd)
}
}
} else if *specific != "" {
log.Println("TODO should run specific parser here")
log.Println("TODO should run specific compiler here")
}
// If we flush, we bypass the parsing loop
// If we flush, we bypass the compiling loop
if *flush {
for _, v := range torun {
err := v.Flush()
if err != nil {
log.Fatal(err)
}
compile()
}
// Parsing loop
} else if *fromfile != "" {
f, err = os.Open(*fromfile)
if err != nil {
log.Fatalf("Error opening seed file: %v", err)
}
defer f.Close()
scanner := bufio.NewScanner(f)
for scanner.Scan() {
logline := scanner.Text()
for _, v := range torun {
err := v.Parse(logline)
if err != nil {
log.Fatal(err)
}
}
nblines++
if nblines > compilationTrigger {
nblines = 0
// Non-blocking
if !compiling.compiling {
go compile()
}
}
}
} else {
// Pop D4 redis queue
for {
logline, err := redis.String(redisD4.Do("LPOP", "analyzer:3:"+rd4.redisQueue))
if err == redis.ErrNil {
// redis queue empty, let's sleep for a while
time.Sleep(time.Duration(*retry) * time.Minute)
} else if err != nil {
log.Fatal(err)
// let's parse
} else {
for _, v := range torun {
err := v.Parse(logline)
if err != nil {
log.Println(err)
}
}
nblines++
if nblines > compilationTrigger {
nblines = 0
// Non-blocking
if !compiling.compiling {
go compile()
}
}
}
log.Println("Exit")
os.Exit(0)
}
}
wg.Wait()
log.Println("Exit")
}
func compile() {
compiling.mu.Lock()
compiling.compiling = true
wg.Add(1)
log.Println("Compiling")
// Launching Pull routines
for _, v := range torun {
err := v.Compile()
if err != nil {
log.Fatal(err)
// If we read from a file, we set the reader to os.open
if *fromfile != "" {
f, err = os.Open(*fromfile)
if err != nil {
log.Fatalf("Error opening seed file: %v", err)
}
defer f.Close()
v.SetReader(f)
}
// we add pulling routines to a waitgroup,
// we wait for completion on exit
pullgr.Add(1)
go v.Pull(pullreturn)
}
log.Println("Done")
compiling.compiling = false
compiling.mu.Unlock()
wg.Done()
// Launching MISP export routines
// they can immediately die when exiting.
for _, v := range torun {
go func() {
ticker := time.NewTicker(24 * time.Hour)
for _ = range ticker.C {
v.MISPexport()
}
}()
}
pullgr.Wait()
log.Println("Exit")
}
func newPool(addr string, maxconn int) *redis.Pool {