Merge pull request #29 from mokaddem/multipleMerges

Addition of a timeline, recording of the time when org log-in and draft of a testing framework
pull/31/head
Alexandre Dulaunoy 2018-01-15 15:32:10 +01:00 committed by GitHub
commit 933e9e1907
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 546 additions and 5 deletions

View File

@ -1,6 +1,7 @@
import math, random
import os
import json
import copy
import datetime, time
import logging
from collections import OrderedDict
@ -158,3 +159,48 @@ class Trendings_helper:
tagSet.add(tag['name'])
to_ret[self.keyTag] = list(tagSet)
return to_ret
# In contrary of getGenericTrending, it regroups items in the format: {item, start: timestamp1, end: timestamp2}
# so that it can be displayed easily on the timeline.
def getGenericTrendingOvertime(self, dateS, dateE, choice=None, topNum=0):
if choice == 'categs':
trendingType = self.keyCateg
elif choice == 'tags':
trendingType = self.keyTag
else:
trendingType = self.keyEvent
dico_items = {}
to_format = []
prev_days = (dateE - dateS).days
# get data
for curDate in util.getXPrevDaysSpan(dateE, prev_days):
keyname = "{}:{}".format(trendingType, util.getDateStrFormat(curDate))
data = self.serv_redis_db.zrange(keyname, 0, topNum-1, desc=True, withscores=True)
data = [ [record[0].decode('utf8'), record[1]] for record in data ]
data = data if data is not None else []
to_format.append([util.getTimestamp(curDate), data])
for timestamp, array in to_format:
for item, _ in array:
if item not in dico_items:
dico_items[item] = []
dico_items[item].append(timestamp)
# sort timestamps in correct order
for item in dico_items.keys():
dico_items[item].sort()
# dico_items have the form: {item: [t1,t2,t4], ...}
to_ret = []
ONEDAY = 60*60*24
for item, timestamps in dico_items.items():
obj = {'name': item, 'start': timestamps[0], 'end': timestamps[0]+ONEDAY}
for t in timestamps:
if t-obj['end'] > ONEDAY: #new entry
to_ret.append(copy.deepcopy(obj))
obj['start'] = t
obj['end'] = t+ONEDAY
else: # contrinue entry
obj['end'] = t+ONEDAY
to_ret.append(obj)
return to_ret

View File

@ -51,7 +51,7 @@ class Users_helper:
keyname = "{}:{}".format(self.keyTimestamp, org)
timestamps = self.serv_redis_db.zrange(keyname, 0, -1, desc=True, withscores=True)
if date is None:
to_return = [ t[1] for t in timestamps ]
to_return = [ datetime.datetime.fromtimestamp(float(t[1])) for t in timestamps ]
else:
to_return = []
for t in timestamps:
@ -165,7 +165,7 @@ class Users_helper:
data.append(to_append)
except KeyError: # no data
data.append([0 for x in range(24)])
# swap: punchcard day starts on monday
# swap: punchcard day starts on sunday
data = [data[6]]+data[:6]
return data

View File

@ -108,4 +108,9 @@ mv temp/jquery-punchcard/src/punchcard.js ./static/js
mv temp/jquery-punchcard/src/punchcard.css ./static/css
wget https://momentjs.com/downloads/moment.js -O ./static/js/moment.js
# timeline
VISJS_VERSION="4.21.0"
wget https://cdnjs.cloudflare.com/ajax/libs/vis/${VISJS_VERSION}/vis.min.js ./static/js/vis.min.js
wget https://cdnjs.cloudflare.com/ajax/libs/vis/${VISJS_VERSION}/vis.min.css ./static/css/vis.min.css
rm -rf ./temp

View File

@ -539,5 +539,18 @@ def getTypeaheadData():
data = trendings_helper.getTypeaheadData(dateS, dateE)
return jsonify(data)
@app.route("/_getGenericTrendingOvertime")
def getGenericTrendingOvertime():
try:
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
except:
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
dateE = datetime.datetime.now()
choice = request.args.get('choice', 'events')
data = trendings_helper.getGenericTrendingOvertime(dateS, dateE, choice)
return jsonify(data)
if __name__ == '__main__':
app.run(host=server_host, port=server_port, threaded=True)

View File

@ -9,6 +9,7 @@ var tagPie = ["#tagPie"];
var tagLine = ["#tagLine"];
var sightingLineWidget;
var discLine = ["#discussionLine"];
var timeline;
var allData;
var globalColorMapping = {};
@ -103,6 +104,14 @@ var typeaheadOption_tag = {
updateLineForLabel(tagLine, tag, undefined, url_getTrendingTag);
}
}
var timeline_option = {
groupOrder: 'content',
maxHeight: '94vh',
verticalScroll: true,
horizontalScroll: true,
zoomKey: 'ctrlKey',
};
/* FUNCTIONS */
function getColor(label) {
@ -134,7 +143,18 @@ function getTextColour(rgb) {
return 'black';
}
}
function legendFormatter(label, series) {
// If json (from tag), only retreive the name> otherwise return the supplied arg.
function getOnlyName(potentialJson) {
try {
jsonLabel = JSON.parse(potentialJson);
return jsonLabel.name;
} catch(err) {
return potentialJson;
}
}
function legendFormatter(label) {
try {
jsonLabel = JSON.parse(label);
var backgroundColor = jsonLabel.colour;
@ -156,7 +176,7 @@ function legendFormatter(label, series) {
}
return '<div '
+ 'style="font-size:8pt;text-align:inherit;padding:2px;">'
+ '<a class="tagElem" style="background-color: white; color: black;"> ' + labelLimited
+ '<a class="tagElem" title="'+label+'" style="background-color: white; color: black;"> ' + labelLimited
+ '</a>';
+ '</div>';
}
@ -396,6 +416,43 @@ function updateDisc() {
});
}
function updateTimeline() {
var selected = $( "#timeline_selector" ).val();
$.getJSON( url_getGenericTrendingOvertime+"?dateS="+parseInt(dateStart.getTime()/1000)+"&dateE="+parseInt(dateEnd.getTime()/1000)+"&choice="+selected, function( data ) {
var items = [];
var groups = new vis.DataSet();
var dico_groups = {};
var i = 1;
var g = 1;
for (var obj of data) {
var index = dico_groups[obj.name];
if (index == undefined) { // new group
index = groups.add({id: g, content: legendFormatter(obj.name)});
dico_groups[obj.name] = g;
g++;
}
items.push({
id: i,
content: getOnlyName(obj.name),
start: obj.start*1000,
end: obj.end*1000,
group: dico_groups[obj.name]
});
i++;
}
items = new vis.DataSet(items);
if (timeline === undefined) { // create timeline
timeline = new vis.Timeline(document.getElementById('timeline'));
}
var dateEndExtended = new Date(dateEnd).setDate(dateEnd.getDate()+1); // dateEnd+1
timeline_option.start = dateStart;
timeline_option.end = dateEndExtended;
timeline.setOptions(timeline_option);
timeline.setGroups(groups);
timeline.setItems(items);
});
}
function dateChanged() {
dateStart = datePickerWidgetStart.datepicker( "getDate" );
dateEnd = datePickerWidgetEnd.datepicker( "getDate" );
@ -404,6 +461,7 @@ function dateChanged() {
updatePieLine(tagPie, tagLine, url_getTrendingTag);
updateSignthingsChart();
updateDisc();
updateTimeline();
}
$(document).ready(function () {
@ -426,6 +484,7 @@ $(document).ready(function () {
updatePieLine(tagPie, tagLine, url_getTrendingTag)
updateSignthingsChart();
updateDisc();
updateTimeline();
$( "#num_selector" ).change(function() {
var sel = parseInt($( this ).val());
@ -433,9 +492,12 @@ $(document).ready(function () {
window.location.href = url_currentPage+'?maxNum='+maxNum;
});
$( "#timeline_selector" ).change(function() {
updateTimeline();
});
$("<div id='tooltip'></div>").css({
position: "absolute",
display: "none",
}).appendTo("body");
});

View File

@ -38,6 +38,12 @@
</head>
<style>
.leaflet-top {
position: absolute;
z-index: 801;
pointer-events: none;
}
.panel-body {
padding: 0px;
}

View File

@ -30,6 +30,11 @@
</head>
<style>
.leaflet-top {
position: absolute;
z-index: 801;
pointer-events: none;
}
.tagElem {
display: inline-block;

View File

@ -36,6 +36,9 @@
<link href="{{ url_for('static', filename='css/jquery-ui.min.css') }}" rel="stylesheet" type="text/css" />
<script src="{{ url_for('static', filename='js/jquery-ui.min.js') }}"></script>
<link href="{{ url_for('static', filename='css/vis.min.css') }}" rel="stylesheet" type="text/css" />
<script src="{{ url_for('static', filename='js/vis.min.js') }}"></script>
</head>
<style>
@ -206,6 +209,22 @@ small {
</div>
</div><!-- /.col-lg-12 -->
<div class="col-lg-12">
<div class="panel panel-default" style="">
<div class="panel-heading" style="font-weight: bold;">
<b>Timeline:</b>
<select id="timeline_selector">
<option value="events" selected="">Events</option>
<option value="categs">Categories</option>
<option value="tags">Tags</option>
</select>
</div>
<div class="panel-body" style="">
<div id="timeline" style="width:100%; height: 100%;"></div>
</div>
</div>
</div><!-- /.col-lg-12 -->
</div><!-- /.row -->
</div> <!-- /.container-fluid -->
@ -226,6 +245,7 @@ small {
var url_getTrendingTag = "{{ url_for('getTrendingTags') }}";
var url_getTrendingSightings = "{{ url_for('getTrendingSightings') }}";
var url_getTrendingDisc = "{{ url_for('getTrendingDisc') }}";
var url_getGenericTrendingOvertime = "{{ url_for('getGenericTrendingOvertime') }}";
var url_getTypeaheadData = "{{ url_for('getTypeaheadData') }}";

19
tests/start_framework.sh Executable file
View File

@ -0,0 +1,19 @@
#!/bin/bash
set -e
GREEN="\\033[1;32m"
DEFAULT="\\033[0;39m"
RED="\\033[1;31m"
[ -z "$DASH_HOME" ] && echo "Needs the env var DASHENV. Run the script from the virtual environment." && exit 1;
conf_dir="${DASH_HOME}/config/"
redis_dir="${DASH_HOME}/../redis/src/"
test_dir="${DASH_HOME}/tests/"
screenName="Misp-Dashboard-test"
screen -dmS "$screenName"
sleep 0.1
echo -e $GREEN"* Launching Redis servers"$DEFAULT
screen -S "$screenName" -X screen -t "redis-server" bash -c $redis_dir'redis-server --port 6260; read x'

21
tests/terminate_framework.sh Executable file
View File

@ -0,0 +1,21 @@
#!/bin/bash
set -e
GREEN="\\033[1;32m"
DEFAULT="\\033[0;39m"
RED="\\033[1;31m"
[ -z "$DASH_HOME" ] && echo "Needs the env var DASHENV. Run the script from the virtual environment." && exit 1;
conf_dir="${DASH_HOME}/config/"
redis_dir="${DASH_HOME}/../redis/src/"
redis_dir="../../redis/src/"
test_dir="${DASH_HOME}/tests/"
screenName="Misp-Dashboard-test"
bash -c $redis_dir'redis-cli -p 6260 shutdown'
screen -S $screenName -X quit
echo -e $GREEN"* Shutting down Redis servers"$DEFAULT

58
tests/test_config.cfg Normal file
View File

@ -0,0 +1,58 @@
[Dashboard]
#hours
graph_log_refresh_rate = 1
#sec
rotation_wait_time = 30
max_img_rotation = 10
hours_spanned = 48
zoomlevel = 15
# [1->12]
size_dashboard_left_width = 5
size_openStreet_pannel_perc = 55
size_world_pannel_perc = 35
item_to_plot = Attribute.category
fieldname_order=["Event.id", "Attribute.Tag", "Attribute.category", "Attribute.type", ["Attribute.value", "Attribute.comment"]]
char_separator=||
[GEO]
#min
updateFrequency = 60
zoomlevel = 11
# ~meter
clusteringDistance = 10
[CONTRIB]
max_number_of_last_contributor = 10
min_between_reload = 5
additional_help_text = ["Sightings multiplies earned points by 2", "Editing an attribute earns you the same as creating one"]
[Log]
directory=logs
filename=logs.log
[RedisGlobal]
host=localhost
port=6260
#misp_web_url = http://192.168.56.50
misp_web_url = https://misppriv.circl.lu
#zmq_url=tcp://192.168.56.50:50000
zmq_url=tcp://localhost:9990
[RedisLIST]
db=3
listName=bufferList
[RedisLog]
db=0
channel=1
channelLastContributor = lastContributor
channelLastAwards = lastAwards
[RedisMap]
db=1
channelDisp=PicToDisplay
pathMaxMindDB=../data/GeoIP2-City_20171017/GeoIP2-City.mmdb
path_countrycode_to_coord_JSON=../data/country_code_lat_long.json
[RedisDB]
db=2

81
tests/test_geo.py Executable file
View File

@ -0,0 +1,81 @@
#!/usr/bin/env python3.5
import configparser
import redis
import sys,os
import datetime
sys.path.append('..')
configfile = 'test_config.cfg'
cfg = configparser.ConfigParser()
cfg.read(configfile)
serv_redis_db = redis.StrictRedis(
host='localhost',
port=6260,
db=1)
from helpers import geo_helper
geo_helper = geo_helper.Geo_helper(serv_redis_db, cfg)
categ = 'Network Activity'
def wipeRedis():
serv_redis_db.flushall()
def errorExit():
sys.exit(1)
def test():
flag_error = False
today = datetime.datetime.now()
# IP -> Coord
supposed_ip = '8.8.8.8'
geo_helper.getCoordFromIpAndPublish(supposed_ip, categ)
rep = geo_helper.getTopCoord(today)
excpected_result = [['{"lat": 37.751, "lon": -97.822, "categ": "Network Activity", "value": "8.8.8.8"}', 1.0]]
if rep != excpected_result:
print('ip to coord result not matching')
flag_error = True
# gethitmap
rep = geo_helper.getHitMap(today)
excpected_result = [['US', 1.0]]
if rep != excpected_result:
print('getHitMap result not matching')
flag_error = True
# getCoordsByRadius
rep = geo_helper.getCoordsByRadius(today, today, 0.000, 0.000, '1')
excpected_result = []
if rep != excpected_result:
print('getCoordsByRadius result not matching')
flag_error = True
rep = geo_helper.getCoordsByRadius(today, today, 37.750, -97.821, '10')
excpected_result = [[['{"categ": "Network Activity", "value": "8.8.8.8"}'], [-97.82200008630753, 37.75100012475438]]]
if rep != excpected_result:
print('getCoordsByRadius result not matching')
flag_error = True
wipeRedis()
# Phone -> Coord
phoneNumber = '(+352) 247-82000'
geo_helper.getCoordFromPhoneAndPublish(phoneNumber, categ)
rep = geo_helper.getTopCoord(datetime.datetime.now())[0]
excpected_result = ['{"lat": "49.7500", "lon": "6.1667"}', 1.0]
if rep != excpected_result:
print('Phone to coord result not matching')
flag_error = True
return flag_error
wipeRedis()
if test():
wipeRedis()
errorExit()
else:
wipeRedis()
print('Geo tests succeeded')

74
tests/test_trendings.py Executable file
View File

@ -0,0 +1,74 @@
#!/usr/bin/env python3.5
import configparser
import redis
import sys,os
import datetime, time
sys.path.append('..')
configfile = 'test_config.cfg'
cfg = configparser.ConfigParser()
cfg.read(configfile)
serv_redis_db = redis.StrictRedis(
host='localhost',
port=6260,
db=1)
from helpers import trendings_helper
trendings_helper = trendings_helper.Trendings_helper(serv_redis_db, cfg)
def wipeRedis():
serv_redis_db.flushall()
def errorExit():
sys.exit(1)
def test():
flag_error = False
today = datetime.datetime.now()
now = time.time
# Events
event1 = 'test_event_1'
event2 = 'test_event_2'
trendings_helper.addTrendingEvent(event1, now())
trendings_helper.addTrendingEvent(event1, now()+5)
trendings_helper.addTrendingEvent(event2, now()+10)
expected_result = [[int(now()), [[event1, 2.0], [event2, 1.0]]]]
rep = trendings_helper.getTrendingEvents(today, today)
if rep[0][1] != expected_result[0][1]: #ignore timestamps
print('getTrendingEvents result not matching')
flag_error = True
# Tags
tag1 = {'id': 'tag1', 'colour': 'blue', 'name': 'tag1Name'}
tag2 = {'id': 'tag2', 'colour': 'red', 'name': 'tag2Name'}
trendings_helper.addTrendingTags([tag1], now())
trendings_helper.addTrendingTags([tag1], now()+5)
trendings_helper.addTrendingTags([tag2], now()+10)
expected_result = [[int(now()), [[tag1, 2.0], [tag2, 1.0]]]]
rep = trendings_helper.getTrendingTags(today, today)
if rep[0][1] != expected_result[0][1]: #ignore timestamps
print('getTrendingTags result not matching')
flag_error = True
# Sightings
trendings_helper.addSightings(now())
trendings_helper.addSightings(now())
trendings_helper.addFalsePositive(now())
expected_result = [[1512636256, {'sightings': 2, 'false_positive': 1}]]
rep = trendings_helper.getTrendingSightings(today, today)
if rep[0][1] != expected_result[0][1]: #ignore timestamps
print('getTrendingSightings result not matching')
flag_error = True
return flag_error
wipeRedis()
if test():
wipeRedis()
errorExit()
else:
wipeRedis()
print('Trendings tests succeeded')

120
tests/test_users.py Executable file
View File

@ -0,0 +1,120 @@
#!/usr/bin/env python3.5
import configparser
import redis
import sys,os
import datetime, time
sys.path.append('..')
configfile = 'test_config.cfg'
cfg = configparser.ConfigParser()
cfg.read(configfile)
serv_redis_db = redis.StrictRedis(
host='localhost',
port=6260,
db=1)
from helpers import users_helper
users_helper = users_helper.Users_helper(serv_redis_db, cfg)
def wipeRedis():
serv_redis_db.flushall()
def errorExit():
sys.exit(1)
# return if array are equal even if they are unordered
def checkArrayEquality(arr1, arr2):
temp = arr2[:]
for i in arr1:
if i in temp:
temp.remove(i)
else:
return False
return True
def test():
flag_error = False
now = int(time.time())
today = datetime.datetime.fromtimestamp(now)
twoDayAgo = today - datetime.timedelta(days=2)
DAY = 60*60*24
org = 'TEST_ORG'
org2 = 'TEST_ORG2'
# logged in dates
users_helper.add_user_login(now, org)
users_helper.add_user_login(now+5, org)
users_helper.add_user_login(now-DAY*2, org)
expected_result = [datetime.datetime.fromtimestamp(now-DAY*2), datetime.datetime.fromtimestamp(now+5), datetime.datetime.fromtimestamp(now)]
rep = users_helper.getDates(org)
if not checkArrayEquality(rep, expected_result):
print('getDates result not matching for all dates')
flag_error = True
expected_result = [datetime.datetime.fromtimestamp(now+5), datetime.datetime.fromtimestamp(now)]
rep = users_helper.getDates(org, datetime.datetime.now())
if not checkArrayEquality(rep, expected_result):
print('getDates result not matching for query 1')
flag_error = True
expected_result = []
rep = users_helper.getDates(org, datetime.datetime.now()-datetime.timedelta(days=7))
if not checkArrayEquality(rep, expected_result):
print('getDates result not matching for query 2')
flag_error = True
# all logged orgs
users_helper.add_user_login(now, org2)
expected_result = [datetime.datetime.fromtimestamp(now+5), datetime.datetime.fromtimestamp(now), datetime.datetime.fromtimestamp(now)]
rep = users_helper.getUserLogins(datetime.datetime.now())
if not checkArrayEquality(rep, expected_result):
print('getUserLogins result not matching')
flag_error = True
# all logged in org
expected_result = [org, org2]
rep = users_helper.getAllLoggedInOrgs(datetime.datetime.fromtimestamp(now+5), prev_days=7)
if not checkArrayEquality(rep, expected_result):
print('getAllLoggedInOrgs result not matching')
flag_error = True
# punchcard
expected_result = [ [0 for x in range(24)] for y in range(7)]
# set correct values
day = today.weekday()
hour = today.hour
expected_result[day][hour] = 3
day = twoDayAgo.weekday()
hour = twoDayAgo.hour
expected_result[day][hour] = 1
# swap: punchcard day starts on sunday
expected_result = [expected_result[6]]+expected_result[:6]
rep = users_helper.getUserLoginsForPunchCard(datetime.datetime.fromtimestamp(now), org=None, prev_days=6)
if not checkArrayEquality(rep, expected_result):
print('getUserLoginsForPunchCard result not matching')
flag_error = True
# overtime
rep = users_helper.getUserLoginsAndContribOvertime(datetime.datetime.fromtimestamp(now), org=None, prev_days=6)
t1 = all([tab[1]==0 for tab in rep['contrib']]) # no contribution
t2 = [True for tab in rep['login'] if tab[1] == 3]
t2 = t2[0] and len(t2)==1 # one login at 3, others at 0
if not (t1 and t2):
print('getUserLoginsAndContribOvertime result not matching')
flag_error = True
return flag_error
wipeRedis()
if test():
wipeRedis()
errorExit()
else:
wipeRedis()
print('Users tests succeeded')

11
tests/testall.sh Executable file
View File

@ -0,0 +1,11 @@
#!/bin/bash
set -e
[ -z "$DASH_HOME" ] && echo "Needs the env var DASHENV. Run the script from the virtual environment." && exit 1;
./start_framework.sh
# Wait a bit that redis terminate
sleep 1
python test_geo.py
python test_users.py
python test_trendings.py
./terminate_framework.sh